diff --git a/.cargo/ci-config.toml b/.cargo/ci-config.toml new file mode 100644 index 0000000..6644198 --- /dev/null +++ b/.cargo/ci-config.toml @@ -0,0 +1,15 @@ +# This config is different from config.toml in this directory, as the latter is recognized by Cargo. +# This file is placed in $HOME/.cargo/config.toml on CI runs. Cargo then merges Zeds .cargo/config.toml with $HOME/.cargo/config.toml +# with preference for settings from Zeds config.toml. +# TL;DR: If a value is set in both ci-config.toml and config.toml, config.toml value takes precedence. +# Arrays are merged together though. See: https://doc.rust-lang.org/cargo/reference/config.html#hierarchical-structure +# The intent for this file is to configure CI build process with a divergance from Zed developers experience; for example, in this config file +# we use `-D warnings` for rustflags (which makes compilation fail in presence of warnings during build process). Placing that in developers `config.toml` +# would be incovenient. +# We *could* override things like RUSTFLAGS manually by setting them as environment variables, but that is less DRY; worse yet, if you forget to set proper environment variables +# in one spot, that's going to trigger a rebuild of all of the artifacts. Using ci-config.toml we can define these overrides for CI in one spot and not worry about it. +[build] +rustflags = ["-D", "warnings"] + +[alias] +xtask = "run --package xtask --" diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 0000000..d73dead --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,6 @@ +[build] +# v0 mangling scheme provides more detailed backtraces around closures +rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"] + +[alias] +xtask = "run --package xtask --" diff --git a/.cloudflare/README.md b/.cloudflare/README.md new file mode 100644 index 0000000..d010c26 --- /dev/null +++ b/.cloudflare/README.md @@ -0,0 +1,15 @@ +We have two cloudflare workers that let us serve some assets of this repo +from Cloudflare. + +* `open-source-website-assets` is used for `install.sh` +* `docs-proxy` is used for `https://zed.dev/docs` + +On push to `main`, both of these (and the files they depend on) are uploaded to Cloudflare. + +### Deployment + +These functions are deployed on push to main by the deploy_cloudflare.yml workflow. Worker Rules in Cloudflare intercept requests to zed.dev and proxy them to the appropriate workers. + +### Testing + +You can use [wrangler](https://developers.cloudflare.com/workers/cli-wrangler/install-update) to test these workers locally, or to deploy custom versions. diff --git a/.cloudflare/docs-proxy/src/worker.js b/.cloudflare/docs-proxy/src/worker.js new file mode 100644 index 0000000..f9f4418 --- /dev/null +++ b/.cloudflare/docs-proxy/src/worker.js @@ -0,0 +1,14 @@ +export default { + async fetch(request, _env, _ctx) { + const url = new URL(request.url); + url.hostname = "docs-anw.pages.dev"; + + let res = await fetch(url, request); + + if (res.status === 404) { + res = await fetch("https://zed.dev/404"); + } + + return res; + }, +}; diff --git a/.cloudflare/docs-proxy/wrangler.toml b/.cloudflare/docs-proxy/wrangler.toml new file mode 100644 index 0000000..b5262cc --- /dev/null +++ b/.cloudflare/docs-proxy/wrangler.toml @@ -0,0 +1,8 @@ +name = "docs-proxy" +main = "src/worker.js" +compatibility_date = "2024-05-03" +workers_dev = true + +[[routes]] +pattern = "zed.dev/docs*" +zone_name = "zed.dev" diff --git a/.cloudflare/open-source-website-assets/src/worker.js b/.cloudflare/open-source-website-assets/src/worker.js new file mode 100644 index 0000000..be34f8d --- /dev/null +++ b/.cloudflare/open-source-website-assets/src/worker.js @@ -0,0 +1,19 @@ +export default { + async fetch(request, env) { + const url = new URL(request.url); + const key = url.pathname.slice(1); + + const object = await env.OPEN_SOURCE_WEBSITE_ASSETS_BUCKET.get(key); + if (!object) { + return await fetch("https://zed.dev/404"); + } + + const headers = new Headers(); + object.writeHttpMetadata(headers); + headers.set("etag", object.httpEtag); + + return new Response(object.body, { + headers, + }); + }, +}; diff --git a/.cloudflare/open-source-website-assets/wrangler.toml b/.cloudflare/open-source-website-assets/wrangler.toml new file mode 100644 index 0000000..b4947fa --- /dev/null +++ b/.cloudflare/open-source-website-assets/wrangler.toml @@ -0,0 +1,8 @@ +name = "open-source-website-assets" +main = "src/worker.js" +compatibility_date = "2024-05-15" +workers_dev = true + +[[r2_buckets]] +binding = 'OPEN_SOURCE_WEBSITE_ASSETS_BUCKET' +bucket_name = 'zed-open-source-website-assets' diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 0000000..b05d689 --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,6 @@ +[test-groups] +sequential-db-tests = { max-threads = 1 } + +[[profile.default.overrides]] +filter = 'package(db)' +test-group = 'sequential-db-tests' diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..a6e6c35 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +**/target +zed.xcworkspace +.DS_Store +plugins/bin +script/node_modules +styles/node_modules +crates/collab/static/styles.css +vendor/bin +assets/themes/ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..9973cfb --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Prevent GitHub from displaying comments within JSON files as errors. +*.json linguist-language=JSON-with-Comments diff --git a/.github/ISSUE_TEMPLATE/0_feature_request.yml b/.github/ISSUE_TEMPLATE/0_feature_request.yml new file mode 100644 index 0000000..c5e1fa9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/0_feature_request.yml @@ -0,0 +1,24 @@ +name: Feature Request +description: "Tip: open this issue template from within Zed with the `request feature` command palette action" +labels: ["admin read", "triage", "enhancement"] +body: + - type: checkboxes + attributes: + label: Check for existing issues + description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it. + options: + - label: Completed + required: true + - type: textarea + attributes: + label: Describe the feature + description: A clear and concise description of what you want to happen. + validations: + required: true + - type: textarea + attributes: + label: | + If applicable, add mockups / screenshots to help present your vision of the feature + description: Drag images into the text input below + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yml b/.github/ISSUE_TEMPLATE/1_bug_report.yml new file mode 100644 index 0000000..ccdd084 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1_bug_report.yml @@ -0,0 +1,40 @@ +name: Bug Report +description: | + Use this template for **non-crash-related** bug reports. + Tip: open this issue template from within Zed with the `file bug report` command palette action. +labels: ["admin read", "triage", "defect"] +body: + - type: checkboxes + attributes: + label: Check for existing issues + description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it. + options: + - label: Completed + required: true + - type: textarea + attributes: + label: Describe the bug / provide steps to reproduce it + description: A clear and concise description of what the bug is. + validations: + required: true + - type: textarea + id: environment + attributes: + label: Environment + description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below. + validations: + required: true + - type: textarea + attributes: + label: If applicable, add mockups / screenshots to help explain present your vision of the feature + description: Drag issues into the text input below + validations: + required: false + - type: textarea + attributes: + label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue. + description: | + Drag Zed.log into the text input below. + If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/2_crash_report.yml b/.github/ISSUE_TEMPLATE/2_crash_report.yml new file mode 100644 index 0000000..c877ff1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2_crash_report.yml @@ -0,0 +1,33 @@ +name: Crash Report +description: | + Use this template for crash reports. +labels: ["admin read", "triage", "defect", "panic / crash"] +body: + - type: checkboxes + attributes: + label: Check for existing issues + description: Check the backlog of issues to reduce the chances of creating duplicates; if an issue already exists, place a `+1` (👍) on it. + options: + - label: Completed + required: true + - type: textarea + attributes: + label: Describe the bug / provide steps to reproduce it + description: A clear and concise description of what the bug is. + validations: + required: true + - type: textarea + id: environment + attributes: + label: Environment + description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below. + validations: + required: true + - type: textarea + attributes: + label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue. + description: | + Drag Zed.log into the text input below. + If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..9351115 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,17 @@ +blank_issues_enabled: false +contact_links: + - name: Language Request + url: https://github.com/zed-industries/extensions/issues/new?assignees=&labels=language&projects=&template=1_language_request.yml&title=%3Cname_of_language%3E + about: Request a language in the extensions repository + - name: Theme Request + url: https://github.com/zed-industries/extensions/issues/new?assignees=&labels=theme&projects=&template=0_theme_request.yml&title=%3Cname_of_theme%3E+theme + about: Request a theme in the extensions repository + - name: Top-Ranking Issues + url: https://github.com/zed-industries/zed/issues/5393 + about: See an overview of the most popular Zed issues + - name: Platform Support + url: https://github.com/zed-industries/zed/issues/5391 + about: A quick note on platform support + - name: Positive Feedback + url: https://github.com/zed-industries/zed/discussions/5397 + about: A central location for kind words about Zed diff --git a/.github/actions/check_style/action.yml b/.github/actions/check_style/action.yml new file mode 100644 index 0000000..7f41e71 --- /dev/null +++ b/.github/actions/check_style/action.yml @@ -0,0 +1,15 @@ +name: "Check formatting" +description: "Checks code formatting use cargo fmt" + +runs: + using: "composite" + steps: + - name: cargo fmt + shell: bash -euxo pipefail {0} + run: cargo fmt --all -- --check + + - name: Find modified migrations + shell: bash -euxo pipefail {0} + run: | + export SQUAWK_GITHUB_TOKEN=${{ github.token }} + . ./script/squawk diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml new file mode 100644 index 0000000..334867d --- /dev/null +++ b/.github/actions/run_tests/action.yml @@ -0,0 +1,23 @@ +name: "Run tests" +description: "Runs the tests" + +runs: + using: "composite" + steps: + - name: Install Rust + shell: bash -euxo pipefail {0} + run: | + cargo install cargo-nextest + + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Limit target directory size + shell: bash -euxo pipefail {0} + run: script/clear-target-dir-if-larger-than 100 + + - name: Run tests + shell: bash -euxo pipefail {0} + run: cargo nextest run --workspace --no-fail-fast diff --git a/.github/cherry-pick-bot.yml b/.github/cherry-pick-bot.yml new file mode 100644 index 0000000..1f62315 --- /dev/null +++ b/.github/cherry-pick-bot.yml @@ -0,0 +1,2 @@ +enabled: true +preservePullRequestTitle: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..b4f6090 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,13 @@ + + +Release Notes: + +- Added/Fixed/Improved ... ([#](https://github.com/zed-industries/zed/issues/)). + +Optionally, include screenshots / media showcasing your addition that can be included in the release notes. + +### Or... + +Release Notes: + +- N/A diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml new file mode 100644 index 0000000..a56051e --- /dev/null +++ b/.github/workflows/bump_patch_version.yml @@ -0,0 +1,49 @@ +name: bump_patch_version + +on: + workflow_dispatch: + inputs: + branch: + description: "Branch name to run on" + required: true + +concurrency: + # Allow only one workflow per any non-`main` branch. + group: ${{ github.workflow }}-${{ inputs.branch }} + cancel-in-progress: true + +jobs: + bump_patch_version: + runs-on: + - self-hosted + - test + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + ref: ${{ github.event.inputs.branch }} + ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }} + + - name: Bump Patch Version + run: | + set -eux + + channel=$(cat crates/zed/RELEASE_CHANNEL) + + tag_suffix="" + case $channel in + stable) + ;; + preview) + tag_suffix="-pre" + ;; + *) + echo "this must be run on either of stable|preview release branches" >&2 + exit 1 + ;; + esac + which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl + output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //') + git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot " + git tag v${output}${tag_suffix} + git push origin HEAD v${output}${tag_suffix} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..35d764a --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,324 @@ +name: CI + +on: + push: + branches: + - main + - "v[0-9]+.[0-9]+.x" + tags: + - "v*" + pull_request: + branches: + - "**" + +concurrency: + # Allow only one workflow per any non-`main` branch. + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: 0 + RUST_BACKTRACE: 1 + +jobs: + style: + name: Check formatting and spelling + runs-on: + - self-hosted + - test + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + fetch-depth: 0 + + - name: Remove untracked files + run: git clean -df + + - name: Set up default .cargo/config.toml + run: cp ./.cargo/ci-config.toml ~/.cargo/config.toml + + - name: Check spelling + run: | + if ! which typos > /dev/null; then + cargo install typos-cli + fi + typos + + - name: Run style checks + uses: ./.github/actions/check_style + + - name: Check unused dependencies + uses: bnjbvr/cargo-machete@main + + - name: Check license generation + run: script/generate-licenses /tmp/zed_licenses_output + + - name: Ensure fresh merge + shell: bash -euxo pipefail {0} + run: | + if [ -z "$GITHUB_BASE_REF" ]; + then + echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> $GITHUB_ENV + else + git checkout -B temp + git merge -q origin/$GITHUB_BASE_REF -m "merge main into temp" + echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> $GITHUB_ENV + fi + + - uses: bufbuild/buf-setup-action@v1 + with: + version: v1.29.0 + - uses: bufbuild/buf-breaking-action@v1 + with: + input: "crates/rpc/proto/" + against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/rpc/proto/" + + macos_tests: + name: (macOS) Run Clippy and tests + runs-on: + - self-hosted + - test + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: cargo clippy + run: cargo xtask clippy + + - name: Run tests + uses: ./.github/actions/run_tests + + - name: Build collab + run: cargo build -p collab + + - name: Build other binaries and features + run: cargo build --workspace --bins --all-features; cargo check -p gpui --features "macos-blade" + + # todo(linux): Actually run the tests + linux_tests: + name: (Linux) Run Clippy and tests + runs-on: + - self-hosted + - deploy + steps: + - name: Add Rust to the PATH + run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: cargo clippy + run: cargo xtask clippy + + - name: Build Zed + run: cargo build -p zed + + # todo(windows): Actually run the tests + windows_tests: + name: (Windows) Run Clippy and tests + runs-on: hosted-windows-1 + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Cache dependencies + uses: swatinem/rust-cache@v2 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + + - name: cargo clippy + run: cargo xtask clippy + + - name: Build Zed + run: cargo build -p zed + + bundle-mac: + name: Create a macOS bundle + runs-on: + - self-hosted + - bundle + if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + needs: [macos_tests] + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} + APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} + DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} + steps: + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Checkout repo + uses: actions/checkout@v4 + with: + # We need to fetch more than one commit so that `script/draft-release-notes` + # is able to diff between the current and previous tag. + # + # 25 was chosen arbitrarily. + fetch-depth: 25 + clean: false + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 100 + + - name: Determine version and release channel + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + run: | + set -eu + + version=$(script/get-crate-version zed) + channel=$(cat crates/zed/RELEASE_CHANNEL) + echo "Publishing version: ${version} on release channel ${channel}" + echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV + + expected_tag_name="" + case ${channel} in + stable) + expected_tag_name="v${version}";; + preview) + expected_tag_name="v${version}-pre";; + nightly) + expected_tag_name="v${version}-nightly";; + *) + echo "can't publish a release on channel ${channel}" + exit 1;; + esac + if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then + echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" + exit 1 + fi + mkdir -p target/ + # Ignore any errors that occur while drafting release notes to not fail the build. + script/draft-release-notes "$version" "$channel" > target/release-notes.md || true + + - name: Generate license file + run: script/generate-licenses + + - name: Create macOS app bundle + run: script/bundle-mac + + - name: Rename single-architecture binaries + if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + run: | + mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg + mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg + + - name: Upload app bundle (universal) to workflow run if main branch or specific label + uses: actions/upload-artifact@v4 + if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg + path: target/release/Zed.dmg + - name: Upload app bundle (aarch64) to workflow run if main branch or specific label + uses: actions/upload-artifact@v4 + if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg + path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg + + - name: Upload app bundle (x86_64) to workflow run if main branch or specific label + uses: actions/upload-artifact@v4 + if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg + path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg + + - uses: softprops/action-gh-release@v1 + name: Upload app bundle to release + if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }} + with: + draft: true + prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} + files: | + target/aarch64-apple-darwin/release/Zed-aarch64.dmg + target/x86_64-apple-darwin/release/Zed-x86_64.dmg + target/release/Zed.dmg + body_file: target/release-notes.md + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + bundle-linux: + name: Create a Linux bundle + runs-on: + - self-hosted + - deploy + if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + needs: [linux_tests] + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + steps: + - name: Add Rust to the PATH + run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 100 + + - name: Determine version and release channel + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + run: | + set -eu + + version=$(script/get-crate-version zed) + channel=$(cat crates/zed/RELEASE_CHANNEL) + echo "Publishing version: ${version} on release channel ${channel}" + echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV + + expected_tag_name="" + case ${channel} in + stable) + expected_tag_name="v${version}";; + preview) + expected_tag_name="v${version}-pre";; + nightly) + expected_tag_name="v${version}-nightly";; + *) + echo "can't publish a release on channel ${channel}" + exit 1;; + esac + if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then + echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" + exit 1 + fi + + - name: Generate license file + run: script/generate-licenses + + - name: Create and upload Linux .tar.gz bundle + run: script/bundle-linux + + - name: Upload Linux bundle to workflow run if main branch or specific label + uses: actions/upload-artifact@v4 + if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} + with: + name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + path: zed-*.tar.gz + + - name: Upload app bundle to release + uses: softprops/action-gh-release@v1 + if: ${{ env.RELEASE_CHANNEL == 'preview' }} + with: + draft: true + prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} + files: target/release/zed-linux-x86_64.tar.gz + body: "" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml new file mode 100644 index 0000000..a6044dc --- /dev/null +++ b/.github/workflows/danger.yml @@ -0,0 +1,41 @@ +name: Danger + +on: + pull_request: + branches: [main] + types: + - opened + - synchronize + - reopened + - edited + +jobs: + danger: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - uses: pnpm/action-setup@v3 + with: + version: 8 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "pnpm" + cache-dependency-path: "script/danger/pnpm-lock.yaml" + + - run: pnpm install --dir script/danger + + - name: Run Danger + run: pnpm run --dir script/danger danger ci + env: + # This GitHub token is not used, but the value needs to be here to prevent + # Danger from throwing an error. + GITHUB_TOKEN: "not_a_real_token" + # All requests are instead proxied through an instance of + # https://github.com/maxdeviant/danger-proxy that allows Danger to securely + # authenticate with GitHub while still being able to run on PRs from forks. + DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github" diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml new file mode 100644 index 0000000..dc8aa40 --- /dev/null +++ b/.github/workflows/deploy_cloudflare.yml @@ -0,0 +1,56 @@ +name: Deploy Docs + +on: + push: + branches: + - main + +jobs: + deploy-docs: + name: Deploy Docs + runs-on: ubuntu-latest + + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Setup mdBook + uses: peaceiris/actions-mdbook@v2 + with: + mdbook-version: "0.4.37" + + - name: Build book + run: | + set -euo pipefail + mkdir -p target/deploy + mdbook build ./docs --dest-dir=../target/deploy/docs/ + + - name: Deploy Docs + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: pages deploy target/deploy --project-name=docs + + - name: Deploy Install + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh + + - name: Deploy Docs Workers + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: deploy .cloudflare/docs-proxy/src/worker.js + + - name: Deploy Install Workers + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: deploy .cloudflare/docs-proxy/src/worker.js diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml new file mode 100644 index 0000000..e6f741b --- /dev/null +++ b/.github/workflows/deploy_collab.yml @@ -0,0 +1,133 @@ +name: Publish Collab Server Image + +on: + push: + tags: + - collab-production + - collab-staging + +env: + DOCKER_BUILDKIT: 1 + DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} + +jobs: + style: + name: Check formatting and Clippy lints + runs-on: + - self-hosted + - test + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + fetch-depth: 0 + + - name: Run style checks + uses: ./.github/actions/check_style + + - name: Run clippy + run: cargo xtask clippy + + tests: + name: Run tests + runs-on: + - self-hosted + - test + needs: style + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + fetch-depth: 0 + + - name: Install cargo nextest + shell: bash -euxo pipefail {0} + run: | + cargo install cargo-nextest + + - name: Limit target directory size + shell: bash -euxo pipefail {0} + run: script/clear-target-dir-if-larger-than 100 + + - name: Run tests + shell: bash -euxo pipefail {0} + run: cargo nextest run --package collab --no-fail-fast + + publish: + name: Publish collab server image + needs: + - style + - tests + runs-on: + - self-hosted + - deploy + steps: + - name: Add Rust to the PATH + run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + + - name: Sign into DigitalOcean docker registry + run: doctl registry login + + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Build docker image + run: docker build . --build-arg GITHUB_SHA=$GITHUB_SHA --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA + + - name: Publish docker image + run: docker push registry.digitalocean.com/zed/collab:${GITHUB_SHA} + + - name: Prune Docker system + run: docker system prune --filter 'until=72h' -f + + deploy: + name: Deploy new server image + needs: + - publish + runs-on: + - self-hosted + - deploy + + steps: + - name: Sign into Kubernetes + run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} + + - name: Start rollout + run: | + set -eu + if [[ $GITHUB_REF_NAME = "collab-production" ]]; then + export ZED_KUBE_NAMESPACE=production + export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=10 + export ZED_API_LOAD_BALANCER_SIZE_UNIT=2 + elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then + export ZED_KUBE_NAMESPACE=staging + export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=1 + export ZED_API_LOAD_BALANCER_SIZE_UNIT=1 + else + echo "cowardly refusing to deploy from an unknown branch" + exit 1 + fi + + echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE" + + source script/lib/deploy-helpers.sh + export_vars_for_environment $ZED_KUBE_NAMESPACE + + export ZED_DO_CERTIFICATE_ID=$(doctl compute certificate list --format ID --no-header) + export ZED_IMAGE_ID="registry.digitalocean.com/zed/collab:${GITHUB_SHA}" + + export ZED_SERVICE_NAME=collab + export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT + envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" + + export ZED_SERVICE_NAME=api + export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT + envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml new file mode 100644 index 0000000..7dd75e9 --- /dev/null +++ b/.github/workflows/publish_extension_cli.yml @@ -0,0 +1,39 @@ +name: Publish zed-extension CLI + +on: + push: + tags: + - extension-cli + +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: 0 + +jobs: + publish: + name: Publish zed-extension CLI + runs-on: + - ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Cache dependencies + uses: swatinem/rust-cache@v2 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + + - name: Configure linux + shell: bash -euxo pipefail {0} + run: script/linux + + - name: Build extension CLI + run: cargo build --release --package extension_cli + + - name: Upload binary + env: + DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} + DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} + run: script/upload-extension-cli ${{ github.sha }} diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml new file mode 100644 index 0000000..54d7945 --- /dev/null +++ b/.github/workflows/randomized_tests.yml @@ -0,0 +1,36 @@ +name: Randomized Tests + +concurrency: randomized-tests + +on: + push: + branches: + - randomized-tests-runner + # schedule: + # - cron: '0 * * * *' + +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: 0 + RUST_BACKTRACE: 1 + ZED_SERVER_URL: https://zed.dev + +jobs: + tests: + name: Run randomized tests + runs-on: + - self-hosted + - randomized-tests + steps: + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Run randomized tests + run: script/randomized-test-ci diff --git a/.github/workflows/release_actions.yml b/.github/workflows/release_actions.yml new file mode 100644 index 0000000..0908501 --- /dev/null +++ b/.github/workflows/release_actions.yml @@ -0,0 +1,32 @@ +on: + release: + types: [published] + +jobs: + discord_release: + runs-on: ubuntu-latest + steps: + - name: Get release URL + id: get-release-url + run: | + if [ "${{ github.event.release.prerelease }}" == "true" ]; then + URL="https://zed.dev/releases/preview/latest" + else + URL="https://zed.dev/releases/stable/latest" + fi + echo "::set-output name=URL::$URL" + - name: Get content + uses: 2428392/gh-truncate-string-action@v1.3.0 + id: get-content + with: + stringToTruncate: | + 📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released! + + ${{ github.event.release.body }} + maxLength: 2000 + truncationSymbol: "..." + - name: Discord Webhook Action + uses: tsickert/discord-webhook@v5.3.0 + with: + webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }} + content: ${{ steps.get-content.outputs.string }} diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml new file mode 100644 index 0000000..cd31885 --- /dev/null +++ b/.github/workflows/release_nightly.yml @@ -0,0 +1,127 @@ +name: Release Nightly + +on: + schedule: + # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) + - cron: "0 7 * * *" + push: + tags: + - "nightly" + +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: 0 + RUST_BACKTRACE: 1 + +jobs: + style: + name: Check formatting and Clippy lints + if: github.repository_owner == 'zed-industries' + runs-on: + - self-hosted + - test + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + fetch-depth: 0 + + - name: Run style checks + uses: ./.github/actions/check_style + + - name: Run clippy + run: cargo xtask clippy + tests: + name: Run tests + if: github.repository_owner == 'zed-industries' + runs-on: + - self-hosted + - test + needs: style + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Run tests + uses: ./.github/actions/run_tests + + bundle-mac: + name: Create a macOS bundle + if: github.repository_owner == 'zed-industries' + runs-on: + - self-hosted + - bundle + needs: tests + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} + APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} + DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + steps: + - name: Install Node + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Set release channel to nightly + run: | + set -eu + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + + - name: Generate license file + run: script/generate-licenses + + - name: Create macOS app bundle + run: script/bundle-mac + + - name: Upload Zed Nightly + run: script/upload-nightly macos + + bundle-deb: + name: Create a Linux *.tar.gz bundle + if: github.repository_owner == 'zed-industries' + runs-on: + - self-hosted + - deploy + needs: tests + env: + DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} + DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + clean: false + + - name: Add Rust to the PATH + run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + + - name: Set release channel to nightly + run: | + set -euo pipefail + version=$(git rev-parse --short HEAD) + echo "Publishing version: ${version} on release channel nightly" + echo "nightly" > crates/zed/RELEASE_CHANNEL + + - name: Generate license file + run: script/generate-licenses + + - name: Create Linux .tar.gz bundle + run: script/bundle-linux + + - name: Upload Zed Nightly + run: script/upload-nightly linux-targz diff --git a/.github/workflows/update_all_top_ranking_issues.yml b/.github/workflows/update_all_top_ranking_issues.yml new file mode 100644 index 0000000..259932c --- /dev/null +++ b/.github/workflows/update_all_top_ranking_issues.yml @@ -0,0 +1,18 @@ +on: + schedule: + - cron: "0 */12 * * *" + workflow_dispatch: + +jobs: + update_top_ranking_issues: + runs-on: ubuntu-latest + if: github.repository_owner == 'zed-industries' + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + architecture: "x64" + cache: "pip" + - run: pip install -r script/update_top_ranking_issues/requirements.txt + - run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393 diff --git a/.github/workflows/update_weekly_top_ranking_issues.yml b/.github/workflows/update_weekly_top_ranking_issues.yml new file mode 100644 index 0000000..38ead65 --- /dev/null +++ b/.github/workflows/update_weekly_top_ranking_issues.yml @@ -0,0 +1,18 @@ +on: + schedule: + - cron: "0 15 * * *" + workflow_dispatch: + +jobs: + update_top_ranking_issues: + runs-on: ubuntu-latest + if: github.repository_owner == 'zed-industries' + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + architecture: "x64" + cache: "pip" + - run: pip install -r script/update_top_ranking_issues/requirements.txt + - run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..48e329d --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +.idea +**/target +**/cargo-target +/zed.xcworkspace +.DS_Store +/plugins/bin +/script/node_modules +/crates/theme/schemas/theme.json +/crates/collab/seed.json +/assets/*licenses.md +**/venv +.build +*.wasm +Packages +*.xcodeproj +xcuserdata/ +DerivedData/ +.swiftpm/config/registries.json +.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata +.netrc +.swiftpm +**/*.db +.pytest_cache +.venv +.blob_store +.vscode +.wrangler diff --git a/.mailmap b/.mailmap new file mode 100644 index 0000000..ef4cd1c --- /dev/null +++ b/.mailmap @@ -0,0 +1,59 @@ +# Canonical author names and emails. +# +# Use this to provide a canonical name and email for an author when their +# name is not always written the same way and/or they have commits authored +# under different email addresses. +# +# Reference: https://git-scm.com/docs/gitmailmap + +# Keep these entries sorted alphabetically. +# In Zed: `editor: sort lines case sensitive` + +Antonio Scandurra +Antonio Scandurra +Christian Bergschneider +Christian Bergschneider +Conrad Irwin +Conrad Irwin +Fernando Tagawa +Fernando Tagawa +Greg Morenz +Greg Morenz +Ivan Žužak +Ivan Žužak +Joseph T. Lyons +Joseph T. Lyons +Julia +Julia <30666851+ForLoveOfCats@users.noreply.github.com> +Kaylee Simmons +Kaylee Simmons +Kaylee Simmons +Kaylee Simmons +Kirill Bulatov +Kirill Bulatov +Kyle Caverly +Kyle Caverly +LoganDark +LoganDark +LoganDark +Marshall Bowers +Marshall Bowers +Max Brunsfeld +Max Brunsfeld +Mikayla Maki +Mikayla Maki +Mikayla Maki +Nate Butler +Nate Butler +Nathan Sobo +Nathan Sobo +Nathan Sobo +Petros Amoiridis +Petros Amoiridis +Piotr Osiewicz +Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> +Robert Clover +Robert Clover +Thorsten Ball +Thorsten Ball +Thorsten Ball diff --git a/.zed/settings.json b/.zed/settings.json new file mode 100644 index 0000000..eedf2f3 --- /dev/null +++ b/.zed/settings.json @@ -0,0 +1,27 @@ +{ + "languages": { + "Markdown": { + "tab_size": 2, + "formatter": "prettier" + }, + "TOML": { + "formatter": "prettier", + "format_on_save": "off" + }, + "YAML": { + "tab_size": 2, + "formatter": "prettier" + }, + "JSON": { + "tab_size": 2, + "formatter": "prettier" + }, + "JavaScript": { + "tab_size": 2, + "formatter": "prettier" + } + }, + "formatter": "auto", + "remove_trailing_whitespace_on_save": true, + "ensure_final_newline_on_save": true +} diff --git a/.zed/tasks.json b/.zed/tasks.json new file mode 100644 index 0000000..8046596 --- /dev/null +++ b/.zed/tasks.json @@ -0,0 +1,7 @@ +[ + { + "label": "clippy", + "command": "cargo", + "args": ["xtask", "clippy"] + } +] diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..57e3cc7 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Code of Conduct + +The Code of Conduct for this repository can be found online at [zed.dev/docs/code-of-conduct](https://zed.dev/docs/code-of-conduct). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..b9719e9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,54 @@ +# Contributing to Zed + +Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor! + +All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged. + +## Contribution ideas + +If you're looking for ideas about what to work on, check out: + +- Our [public roadmap](https://zed.dev/roadmap) contains a rough outline of our near-term priorities for Zed. +- Our [top-ranking issues](https://github.com/zed-industries/zed/issues/5393) based on votes by the community. + +For adding themes or support for a new language to Zed, check out our [extension docs](https://github.com/zed-industries/extensions/blob/main/AUTHORING_EXTENSIONS.md). + +## Proposing changes + +The best way to propose a change is to [start a discussion on our GitHub repository](https://github.com/zed-industries/zed/discussions). + +First, write a short **problem statement**, which _clearly_ and _briefly_ describes the problem you want to solve independently from any specific solution. It doesn't need to be long or formal, but it's difficult to consider a solution in absence of a clear understanding of the problem. + +Next, write a short **solution proposal**. How can the problem (or set of problems) you have stated above be addressed? What are the pros and cons of your approach? Again, keep it brief and informal. This isn't a specification, but rather a starting point for a conversation. + +By effectively engaging with the Zed team and community early in your process, we're better positioned to give you feedback and understand your pull request once you open it. If the first thing we see from you is a big changeset, we're much less likely to respond to it in a timely manner. + +## Pair programming + +We plan to set aside time each week to pair program with contributors on promising pull requests in Zed. This will be an experiment. We tend to prefer pairing over async code review on our team, and we'd like to see how well it works in an open source setting. If we're finding it difficult to get on the same page with async review, we may ask you to pair with us if you're open to it. The closer a contribution is to the goals outlined in our roadmap, the more likely we'll be to spend time pairing on it. + +## Tips to improve the chances of your PR getting reviewed and merged + +- Discuss your plans ahead of time with the team +- Small, focused, incremental pull requests are much easier to review +- Spend time explaining your changes in the pull request body +- Add test coverage and documentation +- Choose tasks that align with our roadmap +- Pair with us and watch us code to learn the codebase +- Low effort PRs, such as those that just re-arrange syntax, won't be merged without a compelling justification + +## Bird's-eye view of Zed + +Zed is made up of several smaller crates - let's go over those you're most likely to interact with: + +- [`gpui`](/crates/gpui) is a GPU-accelerated UI framework which provides all of the building blocks for Zed. **We recommend familiarizing yourself with the root level GPUI documentation** +- [`editor`](/crates/editor) contains the core `Editor` type that drives both the code editor and all various input fields within Zed. It also handles a display layer for LSP features such as Inlay Hints or code completions. +- [`project`](/crates/project) manages files and navigation within the filetree. It is also Zed's side of communication with LSP. +- [`workspace`](/crates/workspace) handles local state serialization and groups projects together. +- [`vim`](/crates/vim) is a thin implementation of Vim workflow over `editor`. +- [`lsp`](/crates/lsp) handles communication with external LSP server. +- [`language`](/crates/language) drives `editor`'s understanding of language - from providing a list of symbols to the syntax map. +- [`collab`](/crates/collab) is the collaboration server itself, driving the collaboration features such as project sharing. +- [`rpc`](/crates/rpc) defines messages to be exchanged with collaboration server. +- [`theme`](/crates/theme) defines the theme system and provides a default theme. +- [`ui`](/crates/ui) is a collection of UI components and common patterns used throughout Zed. diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..85991f8 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,13404 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "activity_indicator" +version = "0.1.0" +dependencies = [ + "anyhow", + "auto_update", + "editor", + "extension", + "futures 0.3.28", + "gpui", + "language", + "project", + "smallvec", + "ui", + "workspace", +] + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher 0.4.4", + "cpufeatures", + "zeroize", +] + +[[package]] +name = "ahash" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "version_check", +] + +[[package]] +name = "ahash" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff" +dependencies = [ + "cfg-if", + "const-random", + "getrandom 0.2.10", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" +dependencies = [ + "memchr", +] + +[[package]] +name = "alacritty_terminal" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6d1ea4484c8676f295307a4892d478c70ac8da1dbd8c7c10830a504b7f1022f" +dependencies = [ + "base64 0.22.0", + "bitflags 2.4.2", + "home", + "libc", + "log", + "miow", + "parking_lot", + "piper", + "polling 3.3.2", + "regex-automata 0.4.5", + "rustix-openpty", + "serde", + "signal-hook", + "unicode-width", + "vte", + "windows-sys 0.48.0", +] + +[[package]] +name = "aliasable" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" + +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "alsa" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2562ad8dcf0f789f65c6fdaad8a8a9708ed6b488e649da28c01656ad66b8b47" +dependencies = [ + "alsa-sys", + "bitflags 1.3.2", + "libc", + "nix 0.24.3", +] + +[[package]] +name = "alsa-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db8fee663d06c4e303404ef5f40488a53e062f89ba8bfed81f42325aafad1527" +dependencies = [ + "libc", + "pkg-config", +] + +[[package]] +name = "ambient-authority" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9d4ee0d472d1cd2e28c97dfa124b3d8d992e10eb0a035f33f5d12e3a177ba3b" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstream" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84bf0a05bbb2a83e5eb6fa36bb6e87baa08193c35ff52bbf6b38d8af2890e46" + +[[package]] +name = "anstyle-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd" +dependencies = [ + "anstyle", + "windows-sys 0.48.0", +] + +[[package]] +name = "anthropic" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.28", + "http 0.1.0", + "isahc", + "schemars", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "any_vec" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78f17bacc1bc7b91fef7b1885c10772eb2b9e4e989356f6f0f6a972240f97cd" + +[[package]] +name = "anyhow" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" + +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + +[[package]] +name = "arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "as-raw-xcb-connection" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175571dd1d178ced59193a6fc02dde1b972eb0bc56c892cde9beeceac5bf0f6b" + +[[package]] +name = "ascii" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" + +[[package]] +name = "ash" +version = "0.38.0+1.3.281" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb44936d800fea8f016d7f2311c6a4f97aebd5dc86f09906139ec848cf3a46f" +dependencies = [ + "libloading 0.8.0", +] + +[[package]] +name = "ash-window" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52bca67b61cb81e5553babde81b8211f713cb6db79766f80168f3e5f40ea6c82" +dependencies = [ + "ash", + "raw-window-handle 0.6.0", + "raw-window-metal", +] + +[[package]] +name = "ashpd" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd884d7c72877a94102c3715f3b1cd09ff4fac28221add3e57cfbe25c236d093" +dependencies = [ + "async-fs 2.1.1", + "async-net 2.0.0", + "enumflags2", + "futures-channel", + "futures-util", + "rand 0.8.5", + "serde", + "serde_repr", + "url", + "zbus", +] + +[[package]] +name = "assets" +version = "0.1.0" +dependencies = [ + "anyhow", + "gpui", + "rust-embed", +] + +[[package]] +name = "assistant" +version = "0.1.0" +dependencies = [ + "anthropic", + "anyhow", + "cargo_toml", + "chrono", + "client", + "collections", + "command_palette_hooks", + "ctor", + "editor", + "env_logger", + "file_icons", + "fs", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "indoc", + "language", + "log", + "menu", + "multi_buffer", + "open_ai", + "ordered-float 2.10.0", + "parking_lot", + "project", + "rand 0.8.5", + "regex", + "rope", + "schemars", + "search", + "serde", + "serde_json", + "settings", + "smol", + "telemetry_events", + "theme", + "tiktoken-rs", + "toml 0.8.10", + "ui", + "unindent", + "util", + "uuid", + "workspace", +] + +[[package]] +name = "assistant2" +version = "0.1.0" +dependencies = [ + "anyhow", + "assets", + "assistant_tooling", + "chrono", + "client", + "collections", + "editor", + "env_logger", + "feature_flags", + "file_icons", + "fs", + "futures 0.3.28", + "fuzzy", + "gpui", + "http 0.1.0", + "language", + "languages", + "log", + "markdown", + "node_runtime", + "open_ai", + "picker", + "project", + "rand 0.8.5", + "regex", + "release_channel", + "schemars", + "semantic_index", + "serde", + "serde_json", + "settings", + "story", + "theme", + "ui", + "unindent", + "util", + "workspace", +] + +[[package]] +name = "assistant_tooling" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "futures 0.3.28", + "gpui", + "log", + "project", + "repair_json", + "schemars", + "serde", + "serde_json", + "settings", + "sum_tree", + "ui", + "unindent", + "util", +] + +[[package]] +name = "async-broadcast" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "258b52a1aa741b9f09783b2d86cf0aeeb617bbf847f6933340a39644227acbdb" +dependencies = [ + "event-listener 5.1.0", + "event-listener-strategy 0.5.0", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +dependencies = [ + "concurrent-queue", + "event-listener 5.1.0", + "event-listener-strategy 0.5.0", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-compat" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b48b4ff0c2026db683dea961cd8ea874737f56cffca86fa84415eaddc51c00d" +dependencies = [ + "futures-core", + "futures-io", + "once_cell", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-compression" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" +dependencies = [ + "deflate64", + "flate2", + "futures-core", + "futures-io", + "memchr", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fa3dc5f2a8564f07759c008b9109dc0d39de92a88d5588b8a5036d286383afb" +dependencies = [ + "async-lock 2.8.0", + "async-task", + "concurrent-queue", + "fastrand 1.9.0", + "futures-lite 1.13.0", + "slab", +] + +[[package]] +name = "async-fs" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-fs" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc19683171f287921f2405677dd2ed2549c3b3bda697a563ebc3a121ace2aba1" +dependencies = [ + "async-lock 3.3.0", + "blocking", + "futures-lite 2.2.0", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel 1.9.0", + "async-executor", + "async-io 1.13.0", + "async-lock 2.8.0", + "blocking", + "futures-lite 1.13.0", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock 2.8.0", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite 1.13.0", + "log", + "parking", + "polling 2.8.0", + "rustix 0.37.23", + "slab", + "socket2 0.4.9", + "waker-fn", +] + +[[package]] +name = "async-io" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65" +dependencies = [ + "async-lock 3.3.0", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite 2.2.0", + "parking", + "polling 3.3.2", + "rustix 0.38.32", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener 2.5.3", +] + +[[package]] +name = "async-lock" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +dependencies = [ + "event-listener 4.0.3", + "event-listener-strategy 0.4.0", + "pin-project-lite", +] + +[[package]] +name = "async-native-tls" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9e7a929bd34c68a82d58a4de7f86fffdaf97fb2af850162a7bb19dd7269b33" +dependencies = [ + "async-std", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-native-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9343dc5acf07e79ff82d0c37899f079db3534d99f189a1837c8e549c99405bec" +dependencies = [ + "futures-util", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-net" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4051e67316bc7eff608fe723df5d32ed639946adcd69e07df41fd42a7b411f1f" +dependencies = [ + "async-io 1.13.0", + "autocfg", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "async-net" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7" +dependencies = [ + "async-io 2.3.1", + "blocking", + "futures-lite 2.2.0", +] + +[[package]] +name = "async-pipe" +version = "0.1.3" +source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553" +dependencies = [ + "futures 0.3.28", + "log", +] + +[[package]] +name = "async-process" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9" +dependencies = [ + "async-io 1.13.0", + "async-lock 2.8.0", + "autocfg", + "blocking", + "cfg-if", + "event-listener 2.5.3", + "futures-lite 1.13.0", + "rustix 0.37.23", + "signal-hook", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-process" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "451e3cf68011bd56771c79db04a9e333095ab6349f7e47592b788e9b98720cc8" +dependencies = [ + "async-channel 2.2.0", + "async-io 2.3.1", + "async-lock 3.3.0", + "async-signal", + "blocking", + "cfg-if", + "event-listener 5.1.0", + "futures-lite 2.2.0", + "rustix 0.38.32", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-recursion" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7d78656ba01f1b93024b7c3a0467f1608e4be67d725749fdcd7d2c7678fd7a2" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-recursion" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "async-signal" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +dependencies = [ + "async-io 2.3.1", + "async-lock 2.8.0", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix 0.38.32", + "signal-hook-registry", + "slab", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io 1.13.0", + "async-lock 2.8.0", + "async-process 1.7.0", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite 1.13.0", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "async-tar" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c49359998a76e32ef6e870dbc079ebad8f1e53e8441c5dd39d27b44493fe331" +dependencies = [ + "async-std", + "filetime", + "libc", + "pin-project", + "redox_syscall 0.2.16", + "xattr", +] + +[[package]] +name = "async-task" +version = "4.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" + +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "async-tungstenite" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5682ea0913e5c20780fe5785abacb85a411e7437bf52a1bedb93ddb3972cb8dd" +dependencies = [ + "async-native-tls 0.3.3", + "async-std", + "futures-io", + "futures-util", + "log", + "pin-project-lite", + "tungstenite 0.16.0", +] + +[[package]] +name = "async_zip" +version = "0.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52" +dependencies = [ + "async-compression", + "crc32fast", + "futures-lite 2.2.0", + "pin-project", + "thiserror", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" + +[[package]] +name = "atomic-waker" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "audio" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "derive_more", + "gpui", + "parking_lot", + "rodio", + "util", +] + +[[package]] +name = "auto_update" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "db", + "editor", + "gpui", + "http 0.1.0", + "isahc", + "log", + "markdown_preview", + "menu", + "release_channel", + "schemars", + "serde", + "serde_derive", + "serde_json", + "settings", + "smol", + "tempfile", + "util", + "workspace", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "aws-config" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7af266887e24cd5f6d2ea7433cacd25dcd4773b7f70e488701968a7cdf51df57" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes 1.5.0", + "fastrand 2.0.0", + "hex", + "http 0.2.9", + "hyper", + "ring", + "time", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d56f287a9e65e4914bfedb5b22c056b65e4c232fca512d5509a9df36386759f" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-runtime" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d6a29eca8ea8982028a4df81883e7001e250a21d323b86418884b5345950a4b" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes 1.5.0", + "fastrand 2.0.0", + "http 0.2.9", + "http-body", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c977e92277652aefb9a76a0fca652b26757d6845dce0d7bf4426da80f13d85b0" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes 1.5.0", + "http 0.2.9", + "http-body", + "once_cell", + "percent-encoding", + "regex-lite", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d7f527c7b28af1a641f7d89f9e6a4863e8ec00f39d2b731b056fc5ec5ce829" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes 1.5.0", + "http 0.2.9", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d0be3224cd574ee8ab5fd7c32087876f25c134c27ac603fcb38669ed8d346b0" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes 1.5.0", + "http 0.2.9", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b3167c60d82a13bbaef569da06041644ff41e85c6377e5dad53fa2526ccfe9d" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.9", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b1cbe0eee57a213039088dbdeca7be9352f24e0d72332d961e8a1cb388f82d" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes 1.5.0", + "crypto-bigint 0.5.5", + "form_urlencoded", + "hex", + "hmac 0.12.1", + "http 0.2.9", + "http 1.0.0", + "once_cell", + "p256", + "percent-encoding", + "ring", + "sha2 0.10.7", + "subtle", + "time", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-async" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "426a5bc369ca7c8d3686439e46edc727f397a47ab3696b13f3ae8c81b3b36132" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.60.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ee554133eca2611b66d23548e48f9b44713befdb025ab76bc00185b878397a1" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes 1.5.0", + "crc32c", + "crc32fast", + "hex", + "http 0.2.9", + "http-body", + "md-5", + "pin-project-lite", + "sha1", + "sha2 0.10.7", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858" +dependencies = [ + "aws-smithy-types", + "bytes 1.5.0", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.60.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85d6a0619f7b67183067fa3b558f94f90753da2df8c04aeb7336d673f804b0b8" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes 1.5.0", + "bytes-utils", + "futures-core", + "http 0.2.9", + "http-body", + "once_cell", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.60.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1c1b5186b6f5c579bf0de1bcca9dd3d946d6d51361ea1d18131f6a0b64e13ae" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c0a2ce65882e788d2cf83ff28b9b16918de0460c47bf66c5da4f6c17b4c9694" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4cb6b3afa5fc9825a75675975dcc3e21764b5476bc91dbc63df4ea3d30a576e" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes 1.5.0", + "fastrand 2.0.0", + "h2", + "http 0.2.9", + "http-body", + "hyper", + "hyper-rustls", + "once_cell", + "pin-project-lite", + "pin-utils", + "rustls", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23165433e80c04e8c09cee66d171292ae7234bae05fa9d5636e33095eae416b2" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes 1.5.0", + "http 0.2.9", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94a5bec34850b92c9a054dad57b95c1d47f25125f55973e19f6ad788f0381ff" +dependencies = [ + "base64-simd", + "bytes 1.5.0", + "bytes-utils", + "futures-core", + "http 0.2.9", + "http-body", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d16f94c9673412b7a72e3c3efec8de89081c320bf59ea12eed34c417a62ad600" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ff7e122ee50ca962e9de91f5850cc37e2184b1219611eef6d44aa85929b54f6" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "http 0.2.9", + "rustc_version", + "tracing", +] + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "base64 0.21.7", + "bitflags 1.3.2", + "bytes 1.5.0", + "futures-util", + "headers", + "http 0.2.9", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper", + "tokio", + "tokio-tungstenite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes 1.5.0", + "futures-util", + "http 0.2.9", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-extra" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a320103719de37b7b4da4c8eb629d4573f6bcfd3dfe80d3208806895ccf81d" +dependencies = [ + "axum", + "bytes 1.5.0", + "futures-util", + "http 0.2.9", + "mime", + "pin-project-lite", + "serde", + "serde_json", + "tokio", + "tower", + "tower-http 0.3.5", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide 0.7.1", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bigdecimal" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.64.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 1.0.109", +] + +[[package]] +name = "bindgen" +version = "0.65.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.59", + "which 4.4.2", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +dependencies = [ + "serde", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blade-graphics" +version = "0.4.0" +source = "git+https://github.com/kvark/blade?rev=e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c#e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c" +dependencies = [ + "ash", + "ash-window", + "bitflags 2.4.2", + "block", + "bytemuck", + "codespan-reporting", + "core-graphics-types", + "glow", + "gpu-alloc", + "gpu-alloc-ash", + "hidden-trait", + "js-sys", + "khronos-egl", + "libloading 0.8.0", + "log", + "metal", + "mint", + "naga", + "objc", + "raw-window-handle 0.6.0", + "slab", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "blade-macros" +version = "0.2.1" +source = "git+https://github.com/kvark/blade?rev=e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c#e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "block" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" +dependencies = [ + "async-channel 2.2.0", + "async-lock 3.3.0", + "async-task", + "fastrand 2.0.0", + "futures-io", + "futures-lite 2.2.0", + "piper", + "tracing", +] + +[[package]] +name = "borsh" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" +dependencies = [ + "borsh-derive", + "hashbrown 0.13.2", +] + +[[package]] +name = "borsh-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" +dependencies = [ + "borsh-derive-internal", + "borsh-schema-derive-internal", + "proc-macro-crate 0.1.5", + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "borsh-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "borsh-schema-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "breadcrumbs" +version = "0.1.0" +dependencies = [ + "editor", + "gpui", + "itertools 0.11.0", + "outline", + "theme", + "ui", + "workspace", +] + +[[package]] +name = "bstr" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c2f7349907b712260e64b0afe2f84692af14a454be26187d9df565c7f69266a" +dependencies = [ + "memchr", + "regex-automata 0.3.8", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "bytecheck" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "bytemuck" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374d28ec25809ee0e23827c2ab573d729e293f281dfe393500e7ad618baa61c6" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965ab7eb5f8f97d2a083c799f3a1b994fc397b2fe2da5d1da1626ce15a39f2b1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" +dependencies = [ + "byteorder", + "iovec", +] + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes 1.5.0", + "either", +] + +[[package]] +name = "call" +version = "0.1.0" +dependencies = [ + "anyhow", + "audio", + "client", + "collections", + "fs", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "language", + "live_kit_client", + "log", + "postage", + "project", + "schemars", + "serde", + "serde_derive", + "settings", + "util", +] + +[[package]] +name = "calloop" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fba7adb4dd5aa98e5553510223000e7148f621165ec5f9acd7113f6ca4995298" +dependencies = [ + "bitflags 2.4.2", + "log", + "polling 3.3.2", + "rustix 0.38.32", + "slab", + "thiserror", +] + +[[package]] +name = "calloop-wayland-source" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f0ea9b9476c7fad82841a8dbb380e2eae480c21910feba80725b46931ed8f02" +dependencies = [ + "calloop", + "rustix 0.38.32", + "wayland-backend", + "wayland-client", +] + +[[package]] +name = "cap-fs-ext" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "769f8cd02eb04d57f14e2e371ebb533f96817f9b2525d73a5c72b61ca7973747" +dependencies = [ + "cap-primitives", + "cap-std", + "io-lifetimes 2.0.3", + "windows-sys 0.52.0", +] + +[[package]] +name = "cap-net-ext" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ff6d3fb274292a9af283417e383afe6ded1fe66f6472d2c781216d3d80c218" +dependencies = [ + "cap-primitives", + "cap-std", + "rustix 0.38.32", + "smallvec", +] + +[[package]] +name = "cap-primitives" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90a0b44fc796b1a84535a63753d50ba3972c4db55c7255c186f79140e63d56d0" +dependencies = [ + "ambient-authority", + "fs-set-times", + "io-extras", + "io-lifetimes 2.0.3", + "ipnet", + "maybe-owned", + "rustix 0.38.32", + "windows-sys 0.52.0", + "winx", +] + +[[package]] +name = "cap-rand" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4327f08daac33a99bb03c54ae18c8f32c3ba31c728a33ddf683c6c6a5043de68" +dependencies = [ + "ambient-authority", + "rand 0.8.5", +] + +[[package]] +name = "cap-std" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "266626ce180cf9709f317d0bf9754e3a5006359d87f4bf792f06c9c5f1b63c0f" +dependencies = [ + "cap-primitives", + "io-extras", + "io-lifetimes 2.0.3", + "rustix 0.38.32", +] + +[[package]] +name = "cap-time-ext" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1353421ba83c19da60726e35db0a89abef984b3be183ff6f58c5b8084fcd0c5" +dependencies = [ + "ambient-authority", + "cap-primitives", + "iana-time-zone", + "once_cell", + "rustix 0.38.32", + "winx", +] + +[[package]] +name = "cargo_toml" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8cb1d556b8b8f36e5ca74938008be3ac102f5dcb5b68a0477e4249ae2291cd3" +dependencies = [ + "serde", + "toml 0.8.10", +] + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "castaway" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher 0.4.4", +] + +[[package]] +name = "cbindgen" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da6bc11b07529f16944307272d5bd9b22530bc7d05751717c9d416586cedab49" +dependencies = [ + "clap 3.2.25", + "heck 0.4.1", + "indexmap 1.9.3", + "log", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 1.0.109", + "tempfile", + "toml 0.5.11", +] + +[[package]] +name = "cc" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + +[[package]] +name = "channel" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "clock", + "collections", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "language", + "log", + "rand 0.8.5", + "release_channel", + "rpc", + "settings", + "sum_tree", + "text", + "time", + "util", +] + +[[package]] +name = "chrono" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.48.5", +] + +[[package]] +name = "chunked_transfer" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cca491388666e04d7248af3f60f0c40cfb0991c72205595d7c396e3510207d1a" + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "cipher" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", + "zeroize", +] + +[[package]] +name = "clang-sys" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" +dependencies = [ + "glob", + "libc", + "libloading 0.7.4", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_lex 0.2.4", + "indexmap 1.9.3", + "strsim", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap" +version = "4.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1d7b8d5ec32af0fadc644bf1fd509a688c2103b185644bb1e29d164e0703136" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5179bb514e4d7c2051749d8fcefa2ed6d06a9f4e6d69faf3805f5d80b8cf8d56" +dependencies = [ + "anstream", + "anstyle", + "clap_lex 0.5.1", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "clap_lex" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" + +[[package]] +name = "cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.4", + "core-foundation", + "core-services", + "exec", + "fork", + "ipc-channel", + "libc", + "once_cell", + "plist", + "release_channel", + "serde", + "util", +] + +[[package]] +name = "clickhouse" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0875e527e299fc5f4faba42870bf199a39ab0bb2dbba1b8aef0a2151451130f" +dependencies = [ + "bstr", + "bytes 1.5.0", + "clickhouse-derive", + "clickhouse-rs-cityhash-sys", + "futures 0.3.28", + "hyper", + "hyper-tls", + "lz4", + "sealed", + "serde", + "static_assertions", + "thiserror", + "tokio", + "url", +] + +[[package]] +name = "clickhouse-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18af5425854858c507eec70f7deb4d5d8cec4216fcb086283a78872387281ea5" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 1.0.109", +] + +[[package]] +name = "clickhouse-rs-cityhash-sys" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4baf9d4700a28d6cb600e17ed6ae2b43298a5245f1f76b4eab63027ebfd592b9" +dependencies = [ + "cc", +] + +[[package]] +name = "client" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-native-tls 0.5.0", + "async-recursion 0.3.2", + "async-tungstenite", + "chrono", + "clock", + "collections", + "feature_flags", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "lazy_static", + "log", + "once_cell", + "parking_lot", + "postage", + "rand 0.8.5", + "release_channel", + "rpc", + "schemars", + "serde", + "serde_json", + "settings", + "sha2 0.10.7", + "smol", + "sysinfo", + "telemetry_events", + "tempfile", + "text", + "thiserror", + "time", + "tiny_http", + "url", + "util", +] + +[[package]] +name = "clipboard-win" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fdf5e01086b6be750428ba4a40619f847eb2e95756eee84b18e06e5f0b50342" +dependencies = [ + "lazy-bytes-cast", + "winapi", +] + +[[package]] +name = "clock" +version = "0.1.0" +dependencies = [ + "chrono", + "parking_lot", + "smallvec", +] + +[[package]] +name = "cmake" +version = "0.1.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" +dependencies = [ + "cc", +] + +[[package]] +name = "cocoa" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6140449f97a6e97f9511815c5632d84c8aacf8ac271ad77c559218161a1373c" +dependencies = [ + "bitflags 1.3.2", + "block", + "cocoa-foundation", + "core-foundation", + "core-graphics", + "foreign-types 0.5.0", + "libc", + "objc", +] + +[[package]] +name = "cocoa-foundation" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c6234cbb2e4c785b456c0644748b1ac416dd045799740356f8363dfe00c93f7" +dependencies = [ + "bitflags 1.3.2", + "block", + "core-foundation", + "core-graphics-types", + "libc", + "objc", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "collab" +version = "0.44.0" +dependencies = [ + "anthropic", + "anyhow", + "async-trait", + "async-tungstenite", + "audio", + "aws-config", + "aws-sdk-s3", + "axum", + "axum-extra", + "base64 0.13.1", + "call", + "channel", + "chrono", + "clickhouse", + "client", + "clock", + "collab_ui", + "collections", + "ctor", + "dashmap", + "dev_server_projects", + "editor", + "env_logger", + "envy", + "file_finder", + "fs", + "futures 0.3.28", + "git", + "git_hosting_providers", + "google_ai", + "gpui", + "headless", + "hex", + "http 0.1.0", + "indoc", + "language", + "live_kit_client", + "live_kit_server", + "log", + "lsp", + "menu", + "multi_buffer", + "nanoid", + "node_runtime", + "notifications", + "open_ai", + "parking_lot", + "pretty_assertions", + "project", + "prometheus", + "prost", + "rand 0.8.5", + "release_channel", + "reqwest", + "rpc", + "rustc-demangle", + "scrypt", + "sea-orm", + "semantic_version", + "semver", + "serde", + "serde_derive", + "serde_json", + "settings", + "sha2 0.10.7", + "sqlx", + "subtle", + "supermaven_api", + "telemetry_events", + "text", + "theme", + "time", + "tokio", + "toml 0.8.10", + "tower", + "tower-http 0.4.4", + "tracing", + "tracing-subscriber", + "unindent", + "util", + "uuid", + "workspace", +] + +[[package]] +name = "collab_ui" +version = "0.1.0" +dependencies = [ + "anyhow", + "auto_update", + "call", + "channel", + "client", + "collections", + "db", + "dev_server_projects", + "editor", + "emojis", + "extensions_ui", + "futures 0.3.28", + "fuzzy", + "gpui", + "http 0.1.0", + "language", + "lazy_static", + "menu", + "notifications", + "parking_lot", + "picker", + "pretty_assertions", + "project", + "recent_projects", + "release_channel", + "rich_text", + "rpc", + "schemars", + "serde", + "serde_derive", + "serde_json", + "settings", + "smallvec", + "story", + "theme", + "theme_selector", + "time", + "time_format", + "tree-sitter-markdown", + "ui", + "util", + "vcs_menu", + "workspace", + "zed_actions", +] + +[[package]] +name = "collections" +version = "0.1.0" +dependencies = [ + "rustc-hash", +] + +[[package]] +name = "color" +version = "0.1.0" +dependencies = [ + "palette", +] + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes 1.5.0", + "memchr", +] + +[[package]] +name = "command_palette" +version = "0.1.0" +dependencies = [ + "client", + "collections", + "command_palette_hooks", + "ctor", + "editor", + "env_logger", + "fuzzy", + "go_to_line", + "gpui", + "language", + "menu", + "picker", + "postage", + "project", + "serde", + "serde_json", + "settings", + "theme", + "ui", + "util", + "workspace", + "zed_actions", +] + +[[package]] +name = "command_palette_hooks" +version = "0.1.0" +dependencies = [ + "collections", + "derive_more", + "gpui", +] + +[[package]] +name = "concurrent-queue" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.45.0", +] + +[[package]] +name = "const-cstr" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3d0b5ff30645a68f35ece8cea4556ca14ef8a1651455f789a099a0513532a6" + +[[package]] +name = "const-oid" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "copilot" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-compression", + "async-std", + "async-tar", + "client", + "clock", + "collections", + "command_palette_hooks", + "editor", + "fs", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "indoc", + "language", + "lsp", + "menu", + "node_runtime", + "parking_lot", + "project", + "rpc", + "serde", + "serde_json", + "settings", + "smol", + "theme", + "ui", + "util", + "workspace", +] + +[[package]] +name = "copypasta" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deb85422867ca93da58b7f95fb5c0c10f6183ed6e1ef8841568968a896d3a858" +dependencies = [ + "clipboard-win", + "objc", + "objc-foundation", + "objc_id", + "smithay-clipboard", + "x11-clipboard", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys 0.8.6", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "core-graphics" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "970a29baf4110c26fedbc7f82107d42c23f7e88e404c4577ed73fe99ff85a212" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-graphics-types", + "foreign-types 0.5.0", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45390e6114f68f718cc7a830514a96f903cccd70d02a8f6d9f643ac4ba45afaf" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "libc", +] + +[[package]] +name = "core-services" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92567e81db522550ebaf742c5d875624ec7820c2c7ee5f8c60e4ce7c2ae3c0fd" +dependencies = [ + "core-foundation", +] + +[[package]] +name = "core-text" +version = "20.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9d2790b5c08465d49f8dc05c8bcae9fea467855947db39b0f8145c091aaced5" +dependencies = [ + "core-foundation", + "core-graphics", + "foreign-types 0.5.0", + "libc", +] + +[[package]] +name = "coreaudio-rs" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb17e2d1795b1996419648915df94bc7103c28f7b48062d7acf4652fc371b2ff" +dependencies = [ + "bitflags 1.3.2", + "core-foundation-sys 0.6.2", + "coreaudio-sys", +] + +[[package]] +name = "coreaudio-sys" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f034b2258e6c4ade2f73bf87b21047567fb913ee9550837c2316d139b0262b24" +dependencies = [ + "bindgen 0.64.0", +] + +[[package]] +name = "cosmic-text" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c578f2b9abb4d5f3fbb12aba4008084d435dc6a8425c195cfe0b3594bfea0c25" +dependencies = [ + "bitflags 2.4.2", + "fontdb", + "libm", + "log", + "rangemap", + "rustc-hash", + "rustybuzz", + "self_cell", + "swash", + "sys-locale", + "ttf-parser", + "unicode-bidi", + "unicode-linebreak", + "unicode-script", + "unicode-segmentation", +] + +[[package]] +name = "cpal" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d959d90e938c5493000514b446987c07aed46c668faaa7d34d6c7a67b1a578c" +dependencies = [ + "alsa", + "core-foundation-sys 0.8.6", + "coreaudio-rs", + "dasp_sample", + "jni 0.19.0", + "js-sys", + "libc", + "mach2", + "ndk", + "ndk-context", + "oboe", + "once_cell", + "parking_lot", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows 0.46.0", +] + +[[package]] +name = "cpp_demangle" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8227005286ec39567949b33df9896bcadfa6051bccca2488129f108ca23119" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "cranelift-bforest" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a535eb1cf5a6003197dc569320c40c1cb2d2f97ef5d5348eebf067f20957381" +dependencies = [ + "cranelift-entity", +] + +[[package]] +name = "cranelift-codegen" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11b5066db32cec1492573827183af2142d2d88fe85a83cfc9e73f0f63d3788d4" +dependencies = [ + "bumpalo", + "cranelift-bforest", + "cranelift-codegen-meta", + "cranelift-codegen-shared", + "cranelift-control", + "cranelift-entity", + "cranelift-isle", + "gimli", + "hashbrown 0.14.0", + "log", + "regalloc2", + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cranelift-codegen-meta" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64942e5774308e835fbad4dd25f253105412c90324631910e1ec27963147bddb" +dependencies = [ + "cranelift-codegen-shared", +] + +[[package]] +name = "cranelift-codegen-shared" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39c33db9a86dd6d8d04166a10c53deb477aeea3500eaaefca682e4eda9bb986" + +[[package]] +name = "cranelift-control" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b7fc4937613aea3156a0538800a17bf56f345a5da2e79ae3df58488c93d867f" +dependencies = [ + "arbitrary", +] + +[[package]] +name = "cranelift-entity" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f85575e79a153ce1ddbfb7fe1813519b4bfe1eb200cc9c8353b45ad123ae4d36" +dependencies = [ + "serde", + "serde_derive", +] + +[[package]] +name = "cranelift-frontend" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbc31d6c0ab2249fe0c21e988256b42f5f401ab2673b4fc40076c82a698bdfb9" +dependencies = [ + "cranelift-codegen", + "log", + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cranelift-isle" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc14f37e3314c0e4c53779c2f46753bf242efff76ee9473757a1fff3b495ad37" + +[[package]] +name = "cranelift-native" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ea5375f76ab31f9800a23fb2b440810286a6f669a3eb467cdd7ff255ea64268" +dependencies = [ + "cranelift-codegen", + "libc", + "target-lexicon", +] + +[[package]] +name = "cranelift-wasm" +version = "0.106.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79851dba01b1fa83fad95134aa27beca88dc4b027121d92ab19788582389dc5f" +dependencies = [ + "cranelift-codegen", + "cranelift-entity", + "cranelift-frontend", + "itertools 0.12.1", + "log", + "smallvec", + "wasmparser", + "wasmtime-types", +] + +[[package]] +name = "crc" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" + +[[package]] +name = "crc32c" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast", + "ciborium", + "clap 3.2.25", + "criterion-plot", + "itertools 0.10.5", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "ctor" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d2b3721e861707777e3195b0158f950ae6dc4a27e4d02ff9f67e3eb3de199e" +dependencies = [ + "quote", + "syn 2.0.59", +] + +[[package]] +name = "ctrlc" +version = "3.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" +dependencies = [ + "nix 0.28.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "curl" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22" +dependencies = [ + "curl-sys", + "libc", + "openssl-probe", + "openssl-sys", + "schannel", + "socket2 0.4.9", + "winapi", +] + +[[package]] +name = "curl-sys" +version = "0.4.67+curl-8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cc35d066510b197a0f72de863736641539957628c8a42e70e27c66849e77c34" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", + "windows-sys 0.48.0", +] + +[[package]] +name = "cursor-icon" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96a6ac251f4a2aca6b3f91340350eab87ae57c3f127ffeb585e92bd336717991" + +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.0", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "dasp_sample" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f" + +[[package]] +name = "data-encoding" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" + +[[package]] +name = "data-url" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" + +[[package]] +name = "db" +version = "0.1.0" +dependencies = [ + "anyhow", + "gpui", + "indoc", + "lazy_static", + "log", + "release_channel", + "smol", + "sqlez", + "sqlez_macros", + "tempfile", + "util", +] + +[[package]] +name = "deflate" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73770f8e1fe7d64df17ca66ad28994a0a623ea497fa69486e14984e715c5d174" +dependencies = [ + "adler32", + "byteorder", +] + +[[package]] +name = "deflate64" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83ace6c86376be0b6cdcf3fb41882e81d94b31587573d1cfa9d01cd06bba210d" + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +dependencies = [ + "serde", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + +[[package]] +name = "derive_refineable" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "dev_server_projects" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "gpui", + "rpc", + "serde", + "serde_json", +] + +[[package]] +name = "diagnostics" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "collections", + "ctor", + "editor", + "env_logger", + "futures 0.3.28", + "gpui", + "language", + "log", + "lsp", + "pretty_assertions", + "project", + "rand 0.8.5", + "schemars", + "serde", + "serde_json", + "settings", + "theme", + "ui", + "unindent", + "util", + "workspace", +] + +[[package]] +name = "dialoguer" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" +dependencies = [ + "console", + "fuzzy-matcher", + "shell-words", + "tempfile", + "thiserror", + "zeroize", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dlib" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" +dependencies = [ + "libloading 0.8.0", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "downcast-rs" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" + +[[package]] +name = "doxygen-rs" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "415b6ec780d34dcf624666747194393603d0373b7141eef01d12ee58881507d9" +dependencies = [ + "phf", +] + +[[package]] +name = "dwrote" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439a1c2ba5611ad3ed731280541d36d2e9c4ac5e7fb818a27b604bdc5a6aa65b" +dependencies = [ + "lazy_static", + "libc", + "winapi", + "wio", +] + +[[package]] +name = "dyn-clone" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d2f3407d9a573d666de4b5bdf10569d73ca9478087346697dcbae6244bfbcd" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der 0.6.1", + "elliptic-curve", + "rfc6979", + "signature 1.6.4", +] + +[[package]] +name = "editor" +version = "0.1.0" +dependencies = [ + "aho-corasick", + "anyhow", + "client", + "clock", + "collections", + "convert_case 0.6.0", + "ctor", + "db", + "emojis", + "env_logger", + "futures 0.3.28", + "fuzzy", + "git", + "gpui", + "http 0.1.0", + "indoc", + "itertools 0.11.0", + "language", + "lazy_static", + "linkify", + "log", + "lsp", + "multi_buffer", + "ordered-float 2.10.0", + "parking_lot", + "project", + "rand 0.8.5", + "release_channel", + "rpc", + "schemars", + "serde", + "serde_json", + "settings", + "smallvec", + "smol", + "snippet", + "sum_tree", + "task", + "text", + "theme", + "time", + "time_format", + "tree-sitter-html", + "tree-sitter-rust", + "tree-sitter-typescript", + "ui", + "unindent", + "url", + "util", + "workspace", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint 0.4.9", + "der 0.6.1", + "digest 0.10.7", + "ff", + "generic-array", + "group", + "pkcs8 0.9.0", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "embed-resource" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6985554d0688b687c5cb73898a34fbe3ad6c24c58c238a4d91d5e840670ee9d" +dependencies = [ + "cc", + "memchr", + "rustc_version", + "toml 0.8.10", + "vswhom", + "winreg 0.52.0", +] + +[[package]] +name = "emojis" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee61eb945bff65ee7d19d157d39c67c33290ff0742907413fd5eefd29edc979" +dependencies = [ + "phf", +] + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "endi" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3d8a32ae18130a3c84dd492d4215c3d913c3b07c6b63c2eb3eb7ff1101ab7bf" + +[[package]] +name = "enumflags2" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3278c9d5fb675e0a51dabcf4c0d355f692b064171535ba72361be1528a9d8e8d" +dependencies = [ + "enumflags2_derive", + "serde", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "envy" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965" +dependencies = [ + "serde", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "erased-serde" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "etagere" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf22f748754352918e082e0039335ee92454a5d62bcaf69b5e8daf5907d9644" +dependencies = [ + "euclid", + "svg_fmt", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "euclid" +version = "0.22.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f253bc5c813ca05792837a0ff4b3a580336b224512d48f7eda1d7dd9210787" +dependencies = [ + "num-traits", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener" +version = "5.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7ad6fd685ce13acd6d9541a30f6db6567a7a24c9ffd4ba2955d29e3f22c8b27" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" +dependencies = [ + "event-listener 4.0.3", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291" +dependencies = [ + "event-listener 5.1.0", + "pin-project-lite", +] + +[[package]] +name = "exec" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "886b70328cba8871bfc025858e1de4be16b1d5088f2ba50b57816f4210672615" +dependencies = [ + "errno 0.2.8", + "libc", +] + +[[package]] +name = "extension" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-compression", + "async-tar", + "async-trait", + "cap-std", + "client", + "collections", + "ctor", + "env_logger", + "fs", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "isahc", + "language", + "log", + "lsp", + "node_runtime", + "parking_lot", + "project", + "schemars", + "semantic_version", + "serde", + "serde_json", + "serde_json_lenient", + "settings", + "task", + "theme", + "toml 0.8.10", + "url", + "util", + "wasm-encoder", + "wasmparser", + "wasmtime", + "wasmtime-wasi", + "wit-component", +] + +[[package]] +name = "extension_cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.4", + "env_logger", + "extension", + "fs", + "language", + "log", + "rpc", + "serde", + "serde_json", + "theme", + "tokio", + "toml 0.8.10", + "tree-sitter", + "wasmtime", +] + +[[package]] +name = "extensions_ui" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "db", + "editor", + "extension", + "fs", + "fuzzy", + "gpui", + "language", + "picker", + "project", + "semantic_version", + "serde", + "settings", + "smallvec", + "theme", + "theme_selector", + "ui", + "util", + "workspace", +] + +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + +[[package]] +name = "fancy-regex" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7493d4c459da9f84325ad297371a6b2b8a162800873a22e3b6b6512e61d18c05" +dependencies = [ + "bit-set", + "regex", +] + +[[package]] +name = "fast-srgb8" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd2e7510819d6fbf51a5545c8f922716ecfb14df168a3242f7d33e0239efe6a1" + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] +name = "fd-lock" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e5768da2206272c81ef0b5e951a41862938a6070da63bcea197899942d3b947" +dependencies = [ + "cfg-if", + "rustix 0.38.32", + "windows-sys 0.52.0", +] + +[[package]] +name = "fdeflate" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f9bfee30e4dedf0ab8b422f03af778d9612b63f502710fc500a334ebe2de645" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "feature_flags" +version = "0.1.0" +dependencies = [ + "gpui", +] + +[[package]] +name = "feedback" +version = "0.1.0" +dependencies = [ + "anyhow", + "bitflags 2.4.2", + "client", + "db", + "editor", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "human_bytes", + "isahc", + "language", + "log", + "menu", + "project", + "regex", + "release_channel", + "serde", + "serde_derive", + "serde_json", + "smol", + "sysinfo", + "ui", + "urlencoding", + "util", + "workspace", +] + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "file_finder" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "ctor", + "editor", + "env_logger", + "futures 0.3.28", + "fuzzy", + "gpui", + "itertools 0.11.0", + "language", + "menu", + "picker", + "project", + "serde_json", + "settings", + "text", + "theme", + "ui", + "util", + "workspace", +] + +[[package]] +name = "file_icons" +version = "0.1.0" +dependencies = [ + "collections", + "gpui", + "serde", + "serde_derive", + "serde_json", + "util", +] + +[[package]] +name = "filedescriptor" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7199d965852c3bac31f779ef99cbb4537f80e952e2d6aa0ffeb30cce00f4f46e" +dependencies = [ + "libc", + "thiserror", + "winapi", +] + +[[package]] +name = "filetime" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.3.5", + "windows-sys 0.48.0", +] + +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flate2" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" +dependencies = [ + "crc32fast", + "miniz_oxide 0.7.1", +] + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" + +[[package]] +name = "float-ord" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bad48618fdb549078c333a7a8528acb57af271d0433bdecd523eb620628364e" + +[[package]] +name = "flume" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +dependencies = [ + "futures-core", + "futures-sink", + "nanorand", + "spin 0.9.8", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "font-kit" +version = "0.11.0" +source = "git+https://github.com/zed-industries/font-kit?rev=5a5c4d4#5a5c4d4ca395c74eb0abde38508e170ce0fd761a" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "core-foundation", + "core-graphics", + "core-text", + "dirs-next", + "dwrote", + "float-ord", + "freetype", + "lazy_static", + "libc", + "log", + "pathfinder_geometry", + "pathfinder_simd", + "walkdir", + "winapi", + "yeslogic-fontconfig-sys", +] + +[[package]] +name = "font-types" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bd7f3ea17572640b606b35df42cfb6ecdf003704b062580e59918692190b73d" + +[[package]] +name = "fontconfig-parser" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a595cb550439a117696039dfc69830492058211b771a2a165379f2a1a53d84d" +dependencies = [ + "roxmltree", +] + +[[package]] +name = "fontdb" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0299020c3ef3f60f526a4f64ab4a3d4ce116b1acbf24cdd22da0068e5d81dc3" +dependencies = [ + "fontconfig-parser", + "log", + "memmap2 0.9.4", + "slotmap", + "tinyvec", + "ttf-parser", +] + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared 0.1.1", +] + +[[package]] +name = "foreign-types" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" +dependencies = [ + "foreign-types-macros", + "foreign-types-shared 0.3.1", +] + +[[package]] +name = "foreign-types-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "foreign-types-shared" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" + +[[package]] +name = "fork" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60e74d3423998a57e9d906e49252fb79eb4a04d5cdfe188fb1b7ff9fc076a8ed" +dependencies = [ + "libc", +] + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "freetype" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee38378a9e3db1cc693b4f88d166ae375338a0ff75cb8263e1c601d51f35dc6" +dependencies = [ + "freetype-sys", + "libc", +] + +[[package]] +name = "freetype-sys" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a37d4011c0cc628dfa766fcc195454f4b068d7afdc2adfd28861191d866e731a" +dependencies = [ + "cmake", + "libc", + "pkg-config", +] + +[[package]] +name = "fs" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-tar", + "async-trait", + "cocoa", + "collections", + "fsevent", + "futures 0.3.28", + "git", + "git2", + "gpui", + "lazy_static", + "libc", + "notify", + "objc", + "parking_lot", + "rope", + "serde", + "serde_json", + "smol", + "tempfile", + "text", + "time", + "util", + "windows 0.56.0", +] + +[[package]] +name = "fs-set-times" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "033b337d725b97690d86893f9de22b67b80dcc4e9ad815f348254c38119db8fb" +dependencies = [ + "io-lifetimes 2.0.3", + "rustix 0.38.32", + "windows-sys 0.52.0", +] + +[[package]] +name = "fsevent" +version = "0.1.0" +dependencies = [ + "bitflags 2.4.2", + "core-foundation", + "fsevent-sys 3.1.0", + "parking_lot", + "tempfile", +] + +[[package]] +name = "fsevent-sys" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca6f5e6817058771c10f0eb0f05ddf1e35844266f972004fe8e4b21fda295bd5" +dependencies = [ + "libc", +] + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-batch" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f444c45a1cb86f2a7e301469fd50a82084a60dadc25d94529a8312276ecb71a" +dependencies = [ + "futures 0.3.28", + "futures-timer", + "pin-utils", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-lite" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" +dependencies = [ + "fastrand 2.0.0", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures 0.1.31", + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", + "tokio-io", +] + +[[package]] +name = "fuzzy" +version = "0.1.0" +dependencies = [ + "gpui", + "util", +] + +[[package]] +name = "fuzzy-matcher" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" +dependencies = [ + "thread_local", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "gethostname" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0176e0459c2e4a1fe232f984bca6890e681076abb9934f6cea7c326f3fc47818" +dependencies = [ + "libc", + "windows-targets 0.48.5", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "gif" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "gimli" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +dependencies = [ + "fallible-iterator", + "indexmap 2.0.0", + "stable_deref_trait", +] + +[[package]] +name = "git" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "clock", + "collections", + "derive_more", + "git2", + "gpui", + "http 0.1.0", + "lazy_static", + "log", + "parking_lot", + "pretty_assertions", + "rope", + "serde", + "serde_json", + "smol", + "sum_tree", + "text", + "time", + "unindent", + "url", + "util", + "windows 0.56.0", +] + +[[package]] +name = "git2" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" +dependencies = [ + "bitflags 2.4.2", + "libc", + "libgit2-sys", + "log", + "url", +] + +[[package]] +name = "git_hosting_providers" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "futures 0.3.28", + "git", + "gpui", + "http 0.1.0", + "isahc", + "pretty_assertions", + "regex", + "serde", + "serde_json", + "unindent", + "url", +] + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "globset" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata 0.4.5", + "regex-syntax 0.8.2", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "glow" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd348e04c43b32574f2de31c8bb397d96c9fcfa1371bd4ca6d8bdc464ab121b1" +dependencies = [ + "js-sys", + "slotmap", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "go_to_line" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "gpui", + "indoc", + "language", + "menu", + "project", + "rope", + "schemars", + "serde", + "serde_json", + "settings", + "text", + "theme", + "tree-sitter-rust", + "tree-sitter-typescript", + "ui", + "util", + "workspace", +] + +[[package]] +name = "google_ai" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.28", + "http 0.1.0", + "serde", + "serde_json", +] + +[[package]] +name = "gpu-alloc" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbcd2dba93594b227a1f57ee09b8b9da8892c34d55aa332e034a228d0fe6a171" +dependencies = [ + "bitflags 2.4.2", + "gpu-alloc-types", +] + +[[package]] +name = "gpu-alloc-ash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbda7a18a29bc98c2e0de0435c347df935bf59489935d0cbd0b73f1679b6f79a" +dependencies = [ + "ash", + "gpu-alloc-types", + "tinyvec", +] + +[[package]] +name = "gpu-alloc-types" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98ff03b468aa837d70984d55f5d3f846f6ec31fe34bbb97c4f85219caeee1ca4" +dependencies = [ + "bitflags 2.4.2", +] + +[[package]] +name = "gpui" +version = "0.1.0" +dependencies = [ + "anyhow", + "as-raw-xcb-connection", + "ashpd", + "async-task", + "backtrace", + "bindgen 0.65.1", + "blade-graphics", + "blade-macros", + "block", + "bytemuck", + "calloop", + "calloop-wayland-source", + "cbindgen", + "cocoa", + "collections", + "copypasta", + "core-foundation", + "core-graphics", + "core-text", + "cosmic-text", + "ctor", + "derive_more", + "embed-resource", + "env_logger", + "etagere", + "filedescriptor", + "flume", + "font-kit", + "foreign-types 0.5.0", + "futures 0.3.28", + "gpui_macros", + "http 0.1.0", + "image", + "itertools 0.11.0", + "lazy_static", + "linkme", + "log", + "media", + "metal", + "num_cpus", + "objc", + "oo7", + "open", + "parking", + "parking_lot", + "pathfinder_geometry", + "postage", + "profiling", + "rand 0.8.5", + "raw-window-handle 0.6.0", + "refineable", + "resvg", + "schemars", + "seahash", + "semantic_version", + "serde", + "serde_derive", + "serde_json", + "slotmap", + "smallvec", + "smol", + "sum_tree", + "taffy", + "thiserror", + "time", + "usvg", + "util", + "uuid", + "waker-fn", + "wayland-backend", + "wayland-client", + "wayland-cursor", + "wayland-protocols", + "wayland-protocols-plasma", + "windows 0.56.0", + "windows-core 0.56.0", + "x11rb", + "xim", + "xkbcommon", +] + +[[package]] +name = "gpui_macros" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "grid" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d196ffc1627db18a531359249b2bf8416178d84b729f3cebeb278f285fb9b58c" + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +dependencies = [ + "bytes 1.5.0", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.9", + "indexmap 1.9.3", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.8", +] + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.8", +] + +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +dependencies = [ + "ahash 0.8.8", + "allocator-api2", +] + +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.0", +] + +[[package]] +name = "headers" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" +dependencies = [ + "base64 0.21.7", + "bytes 1.5.0", + "headers-core", + "http 0.2.9", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http 0.2.9", +] + +[[package]] +name = "headless" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "fs", + "futures 0.3.28", + "gpui", + "language", + "log", + "node_runtime", + "postage", + "project", + "rpc", + "settings", + "shellexpand", + "signal-hook", + "util", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heed" +version = "0.20.0-alpha.9" +source = "git+https://github.com/meilisearch/heed?rev=036ac23f73a021894974b9adc815bc95b3e0482a#036ac23f73a021894974b9adc815bc95b3e0482a" +dependencies = [ + "bitflags 2.4.2", + "byteorder", + "heed-traits", + "heed-types", + "libc", + "lmdb-master-sys", + "once_cell", + "page_size", + "serde", + "synchronoise", + "url", +] + +[[package]] +name = "heed-traits" +version = "0.20.0-alpha.9" +source = "git+https://github.com/meilisearch/heed?rev=036ac23f73a021894974b9adc815bc95b3e0482a#036ac23f73a021894974b9adc815bc95b3e0482a" + +[[package]] +name = "heed-types" +version = "0.20.0-alpha.9" +source = "git+https://github.com/meilisearch/heed?rev=036ac23f73a021894974b9adc815bc95b3e0482a#036ac23f73a021894974b9adc815bc95b3e0482a" +dependencies = [ + "bincode", + "byteorder", + "heed-traits", + "serde", + "serde_json", +] + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hexf-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df" + +[[package]] +name = "hidden-trait" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68ed9e850438ac849bec07e7d09fbe9309cbd396a5988c30b010580ce08860df" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "hkdf" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +dependencies = [ + "hmac 0.12.1", +] + +[[package]] +name = "hmac" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "hound" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d13cdbd5dbb29f9c88095bbdc2590c9cba0d0a1269b983fef6b2cdd7e9f4db1" + +[[package]] +name = "http" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.28", + "futures-lite 1.13.0", + "isahc", + "log", + "serde", + "serde_json", + "url", +] + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes 1.5.0", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" +dependencies = [ + "bytes 1.5.0", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes 1.5.0", + "http 0.2.9", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "human_bytes" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes 1.5.0", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http 0.2.9", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.4.9", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.9", + "hyper", + "log", + "rustls", + "rustls-native-certs", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes 1.5.0", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +dependencies = [ + "android_system_properties", + "core-foundation-sys 0.8.6", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows 0.48.0", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "id-arena" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "ignore" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +dependencies = [ + "crossbeam-deque", + "globset", + "log", + "memchr", + "regex-automata 0.4.5", + "same-file", + "walkdir", + "winapi-util", +] + +[[package]] +name = "image" +version = "0.23.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24ffcb7e7244a9bf19d35bf2883b9c080c4ced3c07a9895572178cdb8f13f6a1" +dependencies = [ + "bytemuck", + "byteorder", + "color_quant", + "gif", + "jpeg-decoder", + "num-iter", + "num-rational 0.3.2", + "num-traits", + "png 0.16.8", + "scoped_threadpool", + "tiff", +] + +[[package]] +name = "image_viewer" +version = "0.1.0" +dependencies = [ + "anyhow", + "db", + "gpui", + "project", + "ui", + "util", + "workspace", +] + +[[package]] +name = "imagesize" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "029d73f573d8e8d63e6d5020011d3255b28c3ba85d6cf870a07184ed23de9284" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", + "serde", +] + +[[package]] +name = "indoc" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" + +[[package]] +name = "inherent" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce243b1bfa62ffc028f1cc3b6034ec63d649f3031bc8a4fbbb004e1ac17d1f68" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "inline_completion_button" +version = "0.1.0" +dependencies = [ + "anyhow", + "copilot", + "editor", + "fs", + "futures 0.3.28", + "gpui", + "indoc", + "language", + "lsp", + "project", + "serde_json", + "settings", + "supermaven", + "theme", + "ui", + "util", + "workspace", + "zed_actions", +] + +[[package]] +name = "inotify" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "install_cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "gpui", + "smol", + "util", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-extras" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c301e73fb90e8a29e600a9f402d095765f74310d582916a952f618836a1bd1ed" +dependencies = [ + "io-lifetimes 2.0.3", + "windows-sys 0.52.0", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.3", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "io-lifetimes" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a611371471e98973dbcab4e0ec66c31a10bc356eeb4d54a0e05eac8158fe38c" + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +dependencies = [ + "libc", +] + +[[package]] +name = "ipc-channel" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ab3a34c91b7e84a72643bd75d1bac3afd241f78f9859fe0b5e5b2a6a75732c2" +dependencies = [ + "bincode", + "crossbeam-channel", + "fnv", + "lazy_static", + "libc", + "mio", + "rand 0.8.5", + "serde", + "tempfile", + "uuid", + "windows 0.48.0", +] + +[[package]] +name = "ipnet" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" + +[[package]] +name = "is-docker" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3" +dependencies = [ + "once_cell", +] + +[[package]] +name = "is-wsl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5" +dependencies = [ + "is-docker", + "once_cell", +] + +[[package]] +name = "isahc" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" +dependencies = [ + "async-channel 1.9.0", + "castaway", + "crossbeam-utils", + "curl", + "curl-sys", + "encoding_rs", + "event-listener 2.5.3", + "futures-lite 1.13.0", + "http 0.2.9", + "log", + "mime", + "once_cell", + "polling 2.8.0", + "slab", + "sluice", + "tracing", + "tracing-futures", + "url", + "waker-fn", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "jni" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "039022cdf4d7b1cf548d31f60ae783138e5fd42013f6271049d7df7afadef96c" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jobserver" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" +dependencies = [ + "libc", +] + +[[package]] +name = "journal" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "editor", + "gpui", + "log", + "schemars", + "serde", + "settings", + "shellexpand", + "workspace", +] + +[[package]] +name = "jpeg-decoder" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229d53d58899083193af11e15917b5640cd40b29ff475a1fe4ef725deb02d0f2" +dependencies = [ + "rayon", +] + +[[package]] +name = "js-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jwt" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6204285f77fe7d9784db3fdc449ecce1a0114927a51d5a41c4c7a292011c015f" +dependencies = [ + "base64 0.13.1", + "crypto-common", + "digest 0.10.7", + "hmac 0.12.1", + "serde", + "serde_json", + "sha2 0.10.7", +] + +[[package]] +name = "khronos-egl" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1382b16c04aeb821453d6215a3c80ba78f24c6595c5aa85653378aabe0c83e3" +dependencies = [ + "libc", + "libloading 0.8.0", +] + +[[package]] +name = "kqueue" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "kurbo" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e5aa9f0f96a938266bdb12928a67169e8d22c6a786fda8ed984b85e6ba93c3c" +dependencies = [ + "arrayvec", + "smallvec", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "language" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "clock", + "collections", + "ctor", + "env_logger", + "futures 0.3.28", + "fuzzy", + "git", + "globset", + "gpui", + "http 0.1.0", + "indoc", + "itertools 0.11.0", + "lazy_static", + "log", + "lsp", + "parking_lot", + "postage", + "pulldown-cmark", + "rand 0.8.5", + "regex", + "rpc", + "schemars", + "serde", + "serde_json", + "settings", + "similar", + "smallvec", + "smol", + "sum_tree", + "task", + "text", + "theme", + "tree-sitter", + "tree-sitter-elixir", + "tree-sitter-embedded-template", + "tree-sitter-heex", + "tree-sitter-html", + "tree-sitter-json 0.20.0", + "tree-sitter-markdown", + "tree-sitter-ruby", + "tree-sitter-rust", + "tree-sitter-typescript", + "unicase", + "unindent", + "util", +] + +[[package]] +name = "language_selector" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "fuzzy", + "gpui", + "language", + "picker", + "project", + "ui", + "util", + "workspace", +] + +[[package]] +name = "language_tools" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "collections", + "copilot", + "editor", + "env_logger", + "futures 0.3.28", + "gpui", + "language", + "lsp", + "project", + "release_channel", + "serde_json", + "settings", + "theme", + "tree-sitter", + "ui", + "util", + "workspace", +] + +[[package]] +name = "languages" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-compression", + "async-tar", + "async-trait", + "collections", + "feature_flags", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "language", + "lazy_static", + "log", + "lsp", + "node_runtime", + "project", + "regex", + "rope", + "rust-embed", + "serde", + "serde_json", + "settings", + "smol", + "task", + "text", + "theme", + "toml 0.8.10", + "tree-sitter", + "tree-sitter-bash", + "tree-sitter-c", + "tree-sitter-cpp", + "tree-sitter-css", + "tree-sitter-go", + "tree-sitter-gomod", + "tree-sitter-gowork", + "tree-sitter-jsdoc", + "tree-sitter-json 0.20.0", + "tree-sitter-markdown", + "tree-sitter-proto", + "tree-sitter-python", + "tree-sitter-regex", + "tree-sitter-rust", + "tree-sitter-typescript", + "tree-sitter-yaml", + "unindent", + "util", + "workspace", +] + +[[package]] +name = "lazy-bytes-cast" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10257499f089cd156ad82d0a9cd57d9501fa2c989068992a97eb3c27836f206b" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +dependencies = [ + "spin 0.5.2", +] + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "libgit2-sys" +version = "0.16.2+1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "libloading" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d580318f95776505201b28cf98eb1fa5e4be3b689633ba6a3e6cd880ff22d8cb" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81eb4061c0582dedea1cbc7aff2240300dd6982e0239d1c99e65c1dbf4a30ba7" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "line-wrap" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" +dependencies = [ + "safemem", +] + +[[package]] +name = "linkify" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1dfa36d52c581e9ec783a7ce2a5e0143da6237be5811a0b3153fedfdbe9f780" +dependencies = [ + "memchr", +] + +[[package]] +name = "linkme" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ed2ee9464ff9707af8e9ad834cffa4802f072caad90639c583dd3c62e6e608" +dependencies = [ + "linkme-impl", +] + +[[package]] +name = "linkme-impl" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba125974b109d512fccbc6c0244e7580143e460895dfd6ea7f8bbb692fd94396" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" + +[[package]] +name = "live_kit_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-broadcast", + "async-trait", + "collections", + "core-foundation", + "futures 0.3.28", + "gpui", + "live_kit_server", + "log", + "media", + "nanoid", + "parking_lot", + "postage", + "serde", + "serde_json", + "sha2 0.10.7", + "simplelog", +] + +[[package]] +name = "live_kit_server" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "hmac 0.12.1", + "jwt", + "log", + "prost", + "prost-build", + "prost-types", + "reqwest", + "serde", + "sha2 0.10.7", +] + +[[package]] +name = "lmdb-master-sys" +version = "0.1.0" +source = "git+https://github.com/meilisearch/heed?rev=036ac23f73a021894974b9adc815bc95b3e0482a#036ac23f73a021894974b9adc815bc95b3e0482a" +dependencies = [ + "cc", + "doxygen-rs", + "libc", +] + +[[package]] +name = "lock_api" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +dependencies = [ + "serde", + "value-bag", +] + +[[package]] +name = "lsp" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-pipe", + "collections", + "ctor", + "env_logger", + "futures 0.3.28", + "gpui", + "log", + "lsp-types", + "parking_lot", + "postage", + "release_channel", + "serde", + "serde_json", + "smol", + "util", + "windows 0.56.0", +] + +[[package]] +name = "lsp-types" +version = "0.94.1" +source = "git+https://github.com/zed-industries/lsp-types?branch=updated-completion-list-item-defaults#90a040a1d195687bd19e1df47463320a44e93d7a" +dependencies = [ + "bitflags 1.3.2", + "serde", + "serde_json", + "serde_repr", + "url", +] + +[[package]] +name = "lz4" +version = "1.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9e2dd86df36ce760a60f6ff6ad526f7ba1f14ba0356f8254fb6905e6494df1" +dependencies = [ + "libc", + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] + +[[package]] +name = "mach2" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8" +dependencies = [ + "libc", +] + +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + +[[package]] +name = "markdown" +version = "0.1.0" +dependencies = [ + "anyhow", + "assets", + "env_logger", + "futures 0.3.28", + "gpui", + "language", + "languages", + "linkify", + "log", + "node_runtime", + "pulldown-cmark", + "settings", + "theme", + "ui", + "util", +] + +[[package]] +name = "markdown_preview" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-recursion 1.0.5", + "collections", + "editor", + "gpui", + "language", + "linkify", + "log", + "pretty_assertions", + "pulldown-cmark", + "theme", + "ui", + "workspace", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "maybe-owned" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4" + +[[package]] +name = "md-5" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "media" +version = "0.1.0" +dependencies = [ + "anyhow", + "bindgen 0.65.1", + "core-foundation", + "foreign-types 0.5.0", + "metal", + "objc", +] + +[[package]] +name = "memchr" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" + +[[package]] +name = "memfd" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" +dependencies = [ + "rustix 0.38.32", +] + +[[package]] +name = "memmap2" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a5a03cefb0d953ec0be133036f14e109412fa594edc2f77227249db66cc3ed" +dependencies = [ + "libc", +] + +[[package]] +name = "memmap2" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" +dependencies = [ + "libc", +] + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "menu" +version = "0.1.0" +dependencies = [ + "gpui", + "serde", +] + +[[package]] +name = "metal" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "550b24b0cd4cf923f36bae78eca457b3a10d8a6a14a9c84cb2687b527e6a84af" +dependencies = [ + "bitflags 1.3.2", + "block", + "core-graphics-types", + "foreign-types 0.5.0", + "log", + "objc", + "paste", +] + +[[package]] +name = "mimalloc" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f41a2280ded0da56c8cf898babb86e8f10651a34adcfff190ae9a1159c6908d" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791daaae1ed6889560f8c4359194f56648355540573244a5448a83ba1ecc7435" +dependencies = [ + "adler32", +] + +[[package]] +name = "miniz_oxide" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", + "simd-adler32", +] + +[[package]] +name = "mint" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e53debba6bda7a793e5f99b8dacf19e626084f525f7829104ba9898f367d85ff" + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.48.0", +] + +[[package]] +name = "miow" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "multi_buffer" +version = "0.1.0" +dependencies = [ + "anyhow", + "clock", + "collections", + "ctor", + "env_logger", + "futures 0.3.28", + "git", + "gpui", + "itertools 0.11.0", + "language", + "log", + "parking_lot", + "rand 0.8.5", + "serde", + "settings", + "smallvec", + "sum_tree", + "text", + "theme", + "util", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "naga" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae585df4b6514cf8842ac0f1ab4992edc975892704835b549cf818dc0191249e" +dependencies = [ + "bit-set", + "bitflags 2.4.2", + "codespan-reporting", + "hexf-parse", + "indexmap 2.0.0", + "log", + "num-traits", + "rustc-hash", + "spirv", + "termcolor", + "thiserror", + "unicode-xid", +] + +[[package]] +name = "nanoid" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8" +dependencies = [ + "rand 0.8.5", +] + +[[package]] +name = "nanorand" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3" +dependencies = [ + "getrandom 0.2.10", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "ndk" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "451422b7e4718271c8b5b3aadf5adedba43dc76312454b387e98fae0fc951aa0" +dependencies = [ + "bitflags 1.3.2", + "jni-sys", + "ndk-sys", + "num_enum", + "raw-window-handle 0.5.2", + "thiserror", +] + +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "ndk-sys" +version = "0.4.1+23.1.7779620" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cf2aae958bd232cac5069850591667ad422d263686d75b52a065f9badeee5a3" +dependencies = [ + "jni-sys", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" + +[[package]] +name = "nix" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", +] + +[[package]] +name = "nix" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "libc", + "memoffset", +] + +[[package]] +name = "nix" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "cfg_aliases", + "libc", +] + +[[package]] +name = "node_runtime" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-compression", + "async-std", + "async-tar", + "async-trait", + "async_zip", + "futures 0.3.28", + "http 0.1.0", + "log", + "semver", + "serde", + "serde_json", + "smol", + "tempfile", + "util", + "walkdir", + "windows 0.56.0", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "notifications" +version = "0.1.0" +dependencies = [ + "anyhow", + "channel", + "client", + "collections", + "db", + "gpui", + "rpc", + "settings", + "sum_tree", + "time", + "util", +] + +[[package]] +name = "notify" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" +dependencies = [ + "bitflags 2.4.2", + "crossbeam-channel", + "filetime", + "fsevent-sys 4.1.0", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "walkdir", + "windows-sys 0.48.0", +] + +[[package]] +name = "ntapi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational 0.4.1", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9bc3e36fd683e004fd59c64a425e0e991616f5a8b617c3b9a933a93c168facc" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "serde", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-complex" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg", + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.3", + "libc", +] + +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "nvim-rs" +version = "0.6.0-pre" +source = "git+https://github.com/KillTheMule/nvim-rs?branch=master#0d2b1c884f3c39a76b5b7aac0b429f4624843954" +dependencies = [ + "async-trait", + "futures 0.3.28", + "log", + "parity-tokio-ipc", + "rmp", + "rmpv", + "tokio", + "tokio-util", +] + +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", + "objc_exception", +] + +[[package]] +name = "objc-foundation" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" +dependencies = [ + "block", + "objc", + "objc_id", +] + +[[package]] +name = "objc_exception" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad970fb455818ad6cba4c122ad012fae53ae8b4795f86378bce65e4f6bab2ca4" +dependencies = [ + "cc", +] + +[[package]] +name = "objc_id" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" +dependencies = [ + "objc", +] + +[[package]] +name = "object" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +dependencies = [ + "crc32fast", + "hashbrown 0.14.0", + "indexmap 2.0.0", + "memchr", +] + +[[package]] +name = "oboe" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8868cc237ee02e2d9618539a23a8d228b9bb3fc2e7a5b11eed3831de77c395d0" +dependencies = [ + "jni 0.20.0", + "ndk", + "ndk-context", + "num-derive", + "num-traits", + "oboe-sys", +] + +[[package]] +name = "oboe-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f44155e7fb718d3cfddcf70690b2b51ac4412f347cd9e4fbe511abe9cd7b5f2" +dependencies = [ + "cc", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "oo7" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37558cac1af63a81fd2ff7f3469c02a4da06b163c5671791553b8dac10f07c82" +dependencies = [ + "aes", + "async-fs 2.1.1", + "async-io 2.3.1", + "async-lock 3.3.0", + "blocking", + "cbc", + "cipher 0.4.4", + "digest 0.10.7", + "futures-lite 2.2.0", + "futures-util", + "hkdf", + "hmac 0.12.1", + "num", + "num-bigint-dig 0.8.4", + "pbkdf2 0.12.2", + "rand 0.8.5", + "serde", + "sha2 0.10.7", + "zbus", + "zeroize", + "zvariant", +] + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "open" +version = "5.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449f0ff855d85ddbf1edd5b646d65249ead3f5e422aaa86b7d2d0b049b103e32" +dependencies = [ + "is-wsl", + "libc", + "pathdiff", +] + +[[package]] +name = "open_ai" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.28", + "http 0.1.0", + "isahc", + "schemars", + "serde", + "serde_json", +] + +[[package]] +name = "openssl" +version = "0.10.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "foreign-types 0.3.2", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "300.2.3+3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "ordered-float" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ordered-float" +version = "3.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a54938017eacd63036332b4ae5c8a49fc8c0c1d6d629893057e4f13609edd06" +dependencies = [ + "num-traits", +] + +[[package]] +name = "ordered-stream" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "os_str_bytes" +version = "6.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" + +[[package]] +name = "ouroboros" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2ba07320d39dfea882faa70554b4bd342a5f273ed59ba7c1c6b4c840492c954" +dependencies = [ + "aliasable", + "ouroboros_macro", + "static_assertions", +] + +[[package]] +name = "ouroboros_macro" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec4c6225c69b4ca778c0aea097321a64c421cf4577b331c61b229267edabb6f8" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "outline" +version = "0.1.0" +dependencies = [ + "editor", + "fuzzy", + "gpui", + "indoc", + "language", + "menu", + "ordered-float 2.10.0", + "picker", + "project", + "rope", + "serde_json", + "settings", + "smol", + "theme", + "tree-sitter-rust", + "tree-sitter-typescript", + "ui", + "util", + "workspace", +] + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p256" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" +dependencies = [ + "ecdsa", + "elliptic-curve", + "sha2 0.10.7", +] + +[[package]] +name = "page_size" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "palette" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebfc23a4b76642983d57e4ad00bb4504eb30a8ce3c70f4aee1f725610e36d97a" +dependencies = [ + "approx", + "fast-srgb8", + "palette_derive", +] + +[[package]] +name = "palette_derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8890702dbec0bad9116041ae586f84805b13eecd1d8b1df27c29998a9969d6d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "parity-tokio-ipc" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9981e32fb75e004cc148f5fb70342f393830e0a4aa62e3cc93b50976218d42b6" +dependencies = [ + "futures 0.3.28", + "libc", + "log", + "rand 0.7.3", + "tokio", + "winapi", +] + +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.3.5", + "smallvec", + "windows-targets 0.48.5", +] + +[[package]] +name = "password-hash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1a5d4e9c205d2c1ae73b84aab6240e98218c0e72e63b50422cfb2d1ca952282" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + +[[package]] +name = "pathfinder_geometry" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b7e7b4ea703700ce73ebf128e1450eb69c3a8329199ffbfb9b2a0418e5ad3" +dependencies = [ + "log", + "pathfinder_simd", +] + +[[package]] +name = "pathfinder_simd" +version = "0.5.3" +source = "git+https://github.com/servo/pathfinder.git?rev=30419d07660dc11a21e42ef4a7fa329600cff152#30419d07660dc11a21e42ef4a7fa329600cff152" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "pbkdf2" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95f5254224e617595d2cc3cc73ff0a5eaf2637519e25f03388154e9378b6ffa" +dependencies = [ + "crypto-mac", +] + +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest 0.10.7", + "hmac 0.12.1", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" +dependencies = [ + "base64 0.13.1", + "once_cell", + "regex", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "petgraph" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +dependencies = [ + "fixedbitset", + "indexmap 2.0.0", +] + +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_macros", + "phf_shared", +] + +[[package]] +name = "phf_generator" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +dependencies = [ + "phf_shared", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "picker" +version = "0.1.0" +dependencies = [ + "anyhow", + "ctor", + "editor", + "env_logger", + "gpui", + "menu", + "serde", + "serde_json", + "ui", + "workspace", +] + +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + +[[package]] +name = "pin-project" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.0", + "futures-io", +] + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der 0.7.8", + "pkcs8 0.10.2", + "spki 0.7.2", +] + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der 0.6.1", + "spki 0.6.0", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der 0.7.8", + "spki 0.7.2", +] + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "plist" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a4a0cfc5fb21a09dc6af4bf834cf10d4a32fccd9e2ea468c4b1751a097487aa" +dependencies = [ + "base64 0.21.7", + "indexmap 1.9.3", + "line-wrap", + "quick-xml 0.30.0", + "serde", + "time", +] + +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "png" +version = "0.16.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3287920cb847dee3de33d301c463fba14dda99db24214ddf93f83d3021f4c6" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "deflate", + "miniz_oxide 0.3.7", +] + +[[package]] +name = "png" +version = "0.17.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06e4b0d3d1312775e782c86c91a111aa1f910cbb65e1337f9975b5f9a554b5e1" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide 0.7.1", +] + +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite", + "windows-sys 0.48.0", +] + +[[package]] +name = "polling" +version = "3.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "545c980a3880efd47b2e262f6a4bb6daad6555cf3367aa9c4e52895f69537a41" +dependencies = [ + "cfg-if", + "concurrent-queue", + "pin-project-lite", + "rustix 0.38.32", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "pollster" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" + +[[package]] +name = "postage" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1" +dependencies = [ + "atomic", + "crossbeam-queue", + "futures 0.3.28", + "log", + "parking_lot", + "pin-project", + "pollster", + "static_assertions", + "thiserror", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "prettier" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "fs", + "gpui", + "language", + "log", + "lsp", + "node_runtime", + "parking_lot", + "serde", + "serde_json", + "util", +] + +[[package]] +name = "pretty_assertions" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "prettyplease" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" +dependencies = [ + "proc-macro2", + "syn 2.0.59", +] + +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml 0.5.11", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +dependencies = [ + "toml_edit 0.21.1", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "profiling" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d84d1d7a6ac92673717f9f6d1518374ef257669c24ebc5ac25d5033828be58" +dependencies = [ + "profiling-procmacros", +] + +[[package]] +name = "profiling-procmacros" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd" +dependencies = [ + "quote", + "syn 2.0.59", +] + +[[package]] +name = "project" +version = "0.1.0" +dependencies = [ + "aho-corasick", + "anyhow", + "async-trait", + "client", + "clock", + "collections", + "dev_server_projects", + "env_logger", + "fs", + "futures 0.3.28", + "fuzzy", + "git", + "git2", + "globset", + "gpui", + "http 0.1.0", + "itertools 0.11.0", + "language", + "log", + "lsp", + "node_runtime", + "parking_lot", + "postage", + "prettier", + "pretty_assertions", + "rand 0.8.5", + "regex", + "release_channel", + "rpc", + "schemars", + "serde", + "serde_json", + "settings", + "sha2 0.10.7", + "similar", + "smol", + "task", + "terminal", + "text", + "unindent", + "util", + "which 6.0.0", + "worktree", +] + +[[package]] +name = "project_panel" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "collections", + "db", + "editor", + "file_icons", + "git", + "gpui", + "language", + "menu", + "pretty_assertions", + "project", + "schemars", + "search", + "serde", + "serde_derive", + "serde_json", + "settings", + "theme", + "ui", + "unicase", + "util", + "workspace", +] + +[[package]] +name = "project_symbols" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "futures 0.3.28", + "fuzzy", + "gpui", + "language", + "lsp", + "ordered-float 2.10.0", + "picker", + "project", + "release_channel", + "serde_json", + "settings", + "theme", + "util", + "workspace", +] + +[[package]] +name = "prometheus" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes 1.5.0", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes 1.5.0", + "heck 0.3.3", + "itertools 0.10.5", + "lazy_static", + "log", + "multimap", + "petgraph", + "prost", + "prost-types", + "regex", + "tempfile", + "which 4.4.2", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools 0.10.5", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes 1.5.0", + "prost", +] + +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" + +[[package]] +name = "psm" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +dependencies = [ + "cc", +] + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pulldown-cmark" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce76ce678ffc8e5675b22aa1405de0b7037e2fdf8913fea40d1926c6fe1e6e7" +dependencies = [ + "bitflags 2.4.2", + "memchr", + "unicase", +] + +[[package]] +name = "quick-xml" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eff6510e86862b57b210fd8cbe8ed3f0d7d600b9c2863cd4549a2e033c66e956" +dependencies = [ + "memchr", +] + +[[package]] +name = "quick-xml" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +dependencies = [ + "memchr", +] + +[[package]] +name = "quick_action_bar" +version = "0.1.0" +dependencies = [ + "assistant", + "editor", + "gpui", + "search", + "settings", + "ui", + "workspace", +] + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rangemap" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "977b1e897f9d764566891689e642653e5ed90c6895106acd005eb4c1d0203991" + +[[package]] +name = "raw-window-handle" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9" + +[[package]] +name = "raw-window-handle" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42a9830a0e1b9fb145ebb365b8bc4ccd75f290f98c0247deafbbe2c75cefb544" + +[[package]] +name = "raw-window-metal" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76e8caa82e31bb98fee12fa8f051c94a6aa36b07cddb03f0d4fc558988360ff1" +dependencies = [ + "cocoa", + "core-graphics", + "objc", + "raw-window-handle 0.6.0", +] + +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "read-fonts" +version = "0.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1362980db95801b70031dd592dc052a44b1810ca9da8fbcf7b25983f3174ed0" +dependencies = [ + "font-types", +] + +[[package]] +name = "recent_projects" +version = "0.1.0" +dependencies = [ + "anyhow", + "dev_server_projects", + "editor", + "feature_flags", + "fuzzy", + "gpui", + "language", + "markdown", + "menu", + "ordered-float 2.10.0", + "picker", + "project", + "rpc", + "serde", + "serde_json", + "smol", + "ui", + "ui_text_field", + "util", + "workspace", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom 0.2.10", + "redox_syscall 0.2.16", + "thiserror", +] + +[[package]] +name = "refineable" +version = "0.1.0" +dependencies = [ + "derive_refineable", +] + +[[package]] +name = "regalloc2" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" +dependencies = [ + "hashbrown 0.13.2", + "log", + "rustc-hash", + "slice-group-by", + "smallvec", +] + +[[package]] +name = "regex" +version = "1.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.5", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" + +[[package]] +name = "regex-automata" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-lite" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "release_channel" +version = "0.1.0" +dependencies = [ + "gpui", + "once_cell", +] + +[[package]] +name = "rend" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "repair_json" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee191e184125fe72cb59b74160e25584e3908f2aaa84cbda1e161347102aa15" +dependencies = [ + "thiserror", +] + +[[package]] +name = "reqwest" +version = "0.11.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" +dependencies = [ + "base64 0.21.7", + "bytes 1.5.0", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http 0.2.9", + "http-body", + "hyper", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg 0.50.0", +] + +[[package]] +name = "resvg" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2327ced609dadeed3e9702fec3e6b2ddd208758a9268d13e06566c6101ba533" +dependencies = [ + "log", + "pico-args", + "rgb", + "svgtypes", + "tiny-skia", + "usvg", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac 0.12.1", + "zeroize", +] + +[[package]] +name = "rgb" +version = "0.8.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20ec2d3e3fc7a92ced357df9cebd5a10b6fb2aa1ee797bf7e9ce2f17dffc8f59" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "rich_text" +version = "0.1.0" +dependencies = [ + "futures 0.3.28", + "gpui", + "language", + "linkify", + "pulldown-cmark", + "theme", + "ui", + "util", +] + +[[package]] +name = "ring" +version = "0.17.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" +dependencies = [ + "cc", + "getrandom 0.2.10", + "libc", + "spin 0.9.8", + "untrusted", + "windows-sys 0.48.0", +] + +[[package]] +name = "rkyv" +version = "0.7.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58" +dependencies = [ + "bitvec", + "bytecheck", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rmp" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmpv" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e0e0214a4a2b444ecce41a4025792fc31f77c7bb89c46d253953ea8c65701ec" +dependencies = [ + "num-traits", + "rmp", +] + +[[package]] +name = "rodio" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdf1d4dea18dff2e9eb6dca123724f8b60ef44ad74a9ad283cdfe025df7e73fa" +dependencies = [ + "cpal", + "hound", +] + +[[package]] +name = "rope" +version = "0.1.0" +dependencies = [ + "arrayvec", + "criterion", + "gpui", + "log", + "rand 0.8.5", + "smallvec", + "sum_tree", + "unicode-segmentation", + "util", +] + +[[package]] +name = "roxmltree" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cd14fd5e3b777a7422cca79358c57a8f6e3a703d9ac187448d0daf220c2407f" + +[[package]] +name = "rpc" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-tungstenite", + "base64 0.13.1", + "chrono", + "collections", + "env_logger", + "futures 0.3.28", + "gpui", + "parking_lot", + "prost", + "prost-build", + "rand 0.8.5", + "rsa 0.4.0", + "serde", + "serde_json", + "strum", + "tracing", + "util", + "zstd", +] + +[[package]] +name = "rsa" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68ef841a26fc5d040ced0417c6c6a64ee851f42489df11cdf0218e545b6f8d28" +dependencies = [ + "byteorder", + "digest 0.9.0", + "lazy_static", + "num-bigint-dig 0.7.1", + "num-integer", + "num-iter", + "num-traits", + "pem", + "rand 0.8.5", + "simple_asn1", + "subtle", + "zeroize", +] + +[[package]] +name = "rsa" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" +dependencies = [ + "byteorder", + "const-oid", + "digest 0.10.7", + "num-bigint-dig 0.8.4", + "num-integer", + "num-iter", + "num-traits", + "pkcs1", + "pkcs8 0.10.2", + "rand_core 0.6.4", + "signature 2.1.0", + "spki 0.7.2", + "subtle", + "zeroize", +] + +[[package]] +name = "rust-embed" +version = "8.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19549741604902eb99a7ed0ee177a0663ee1eda51a29f71401f166e47e77806a" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] + +[[package]] +name = "rust-embed-impl" +version = "8.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb9f96e283ec64401f30d3df8ee2aaeb2561f34c824381efa24a35f79bf40ee4" +dependencies = [ + "proc-macro2", + "quote", + "rust-embed-utils", + "syn 2.0.59", + "walkdir", +] + +[[package]] +name = "rust-embed-utils" +version = "8.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38c74a686185620830701348de757fd36bef4aa9680fd23c49fc539ddcc1af32" +dependencies = [ + "globset", + "sha2 0.10.7", + "walkdir", +] + +[[package]] +name = "rust_decimal" +version = "1.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd" +dependencies = [ + "arrayvec", + "borsh", + "bytes 1.5.0", + "num-traits", + "rand 0.8.5", + "rkyv", + "serde", + "serde_json", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.37.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +dependencies = [ + "bitflags 1.3.2", + "errno 0.3.8", + "io-lifetimes 1.0.11", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +dependencies = [ + "bitflags 2.4.2", + "errno 0.3.8", + "itoa", + "libc", + "linux-raw-sys 0.4.12", + "once_cell", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustix-openpty" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a25c3aad9fc1424eb82c88087789a7d938e1829724f3e4043163baf0d13cfc12" +dependencies = [ + "errno 0.3.8", + "libc", + "rustix 0.38.32", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "rustybuzz" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0ae5692c5beaad6a9e22830deeed7874eae8a4e3ba4076fb48e12c56856222c" +dependencies = [ + "bitflags 2.4.2", + "bytemuck", + "libm", + "smallvec", + "ttf-parser", + "unicode-bidi-mirroring", + "unicode-ccc", + "unicode-properties", + "unicode-script", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "safemem" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" + +[[package]] +name = "salsa20" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecbd2eb639fd7cab5804a0837fe373cc2172d15437e804c054a9fb885cb923b0" +dependencies = [ + "cipher 0.3.0", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "schemars" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c" +dependencies = [ + "dyn-clone", + "indexmap 1.9.3", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 1.0.109", +] + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scoped_threadpool" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d51f5df5af43ab3f1360b429fa5e0152ac5ce8c0bd6485cae490332e96846a8" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scrypt" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879588d8f90906e73302547e20fffefdd240eb3e0e744e142321f5d49dea0518" +dependencies = [ + "base64ct", + "hmac 0.11.0", + "password-hash", + "pbkdf2 0.8.0", + "salsa20", + "sha2 0.9.9", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "sea-bae" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bd3534a9978d0aa7edd2808dc1f8f31c4d0ecd31ddf71d997b3c98e9f3c9114" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "sea-orm" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da5b2d70c255bc5cbe1d49f69c3c8eadae0fbbaeb18ee978edbf2f75775cb94d" +dependencies = [ + "async-stream", + "async-trait", + "bigdecimal", + "chrono", + "futures 0.3.28", + "log", + "ouroboros", + "rust_decimal", + "sea-orm-macros", + "sea-query", + "sea-query-binder", + "serde", + "serde_json", + "sqlx", + "strum", + "thiserror", + "time", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "sea-orm-macros" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7c8d455fad40194fb9774fdc4810c0f2700ff0dc0e93bd5ce9d641cc3f5dd75" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "sea-bae", + "syn 2.0.59", + "unicode-ident", +] + +[[package]] +name = "sea-query" +version = "0.30.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb3e6bba153bb198646c8762c48414942a38db27d142e44735a133cabddcc820" +dependencies = [ + "bigdecimal", + "chrono", + "derivative", + "inherent", + "ordered-float 3.9.1", + "rust_decimal", + "serde_json", + "time", + "uuid", +] + +[[package]] +name = "sea-query-binder" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36bbb68df92e820e4d5aeb17b4acd5cc8b5d18b2c36a4dd6f4626aabfa7ab1b9" +dependencies = [ + "bigdecimal", + "chrono", + "rust_decimal", + "sea-query", + "serde_json", + "sqlx", + "time", + "uuid", +] + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "sealed" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b5e421024b5e5edfbaa8e60ecf90bda9dbffc602dbb230e6028763f85f0c68c" +dependencies = [ + "heck 0.3.3", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "search" +version = "0.1.0" +dependencies = [ + "any_vec", + "anyhow", + "bitflags 2.4.2", + "client", + "collections", + "editor", + "futures 0.3.28", + "gpui", + "language", + "menu", + "project", + "serde", + "serde_json", + "settings", + "smol", + "theme", + "ui", + "unindent", + "util", + "workspace", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der 0.6.1", + "generic-array", + "pkcs8 0.9.0", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys 0.8.6", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys 0.8.6", + "libc", +] + +[[package]] +name = "self_cell" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58bf37232d3bb9a2c4e641ca2a11d83b5062066f88df7fed36c28772046d65ba" + +[[package]] +name = "semantic_index" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "clock", + "collections", + "env_logger", + "fs", + "futures 0.3.28", + "futures-batch", + "gpui", + "heed", + "http 0.1.0", + "language", + "languages", + "log", + "open_ai", + "parking_lot", + "project", + "serde", + "serde_json", + "settings", + "sha2 0.10.7", + "smol", + "tempfile", + "theme", + "tree-sitter", + "ui", + "unindent", + "util", + "workspace", + "worktree", +] + +[[package]] +name = "semantic_version" +version = "0.1.0" +dependencies = [ + "anyhow", + "serde", +] + +[[package]] +name = "semver" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" + +[[package]] +name = "serde" +version = "1.0.196" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.196" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "serde_derive_internals" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "serde_fmt" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d4ddca14104cd60529e8c7f7ba71a2c8acd8f7f5cfcdc2faf97eeb7c3010a4" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_json" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +dependencies = [ + "indexmap 2.0.0", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_json_lenient" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26386958a1344003f2b2bcff51a23fbe70461a478ef29247c6c6ab2c1656f53e" +dependencies = [ + "indexmap 2.0.0", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd154a240de39fdebcf5775d2675c204d7c13cf39a4c697be6493c8e734337c" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "serde_spanned" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "settings" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "fs", + "futures 0.3.28", + "gpui", + "indoc", + "lazy_static", + "pretty_assertions", + "release_channel", + "rust-embed", + "schemars", + "serde", + "serde_derive", + "serde_json", + "serde_json_lenient", + "smallvec", + "tree-sitter", + "tree-sitter-json 0.19.0", + "unindent", + "util", +] + +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha1_smol" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shell-words" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" + +[[package]] +name = "shellexpand" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4" +dependencies = [ + "dirs 4.0.0", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "signature" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "simdutf8" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" + +[[package]] +name = "similar" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad1d488a557b235fc46dae55512ffbfc429d2482b08b4d9435ab07384ca8aec" + +[[package]] +name = "simple_asn1" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eb4ea60fb301dc81dfc113df680571045d375ab7345d171c5dc7d7e13107a80" +dependencies = [ + "chrono", + "num-bigint", + "num-traits", + "thiserror", +] + +[[package]] +name = "simplecss" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a11be7c62927d9427e9f40f3444d5499d868648e2edbc4e2116de69e7ec0e89d" +dependencies = [ + "log", +] + +[[package]] +name = "simplelog" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bc0ffd69814a9b251d43afcabf96dad1b29f5028378056257be9e3fecc9f720" +dependencies = [ + "chrono", + "log", + "termcolor", +] + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "slice-group-by" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" + +[[package]] +name = "slotmap" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1e08e261d0e8f5c43123b7adf3e4ca1690d655377ac93a03b2c9d3e98de1342" +dependencies = [ + "version_check", +] + +[[package]] +name = "sluice" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" +dependencies = [ + "async-channel 1.9.0", + "futures-core", + "futures-io", +] + +[[package]] +name = "smallvec" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" + +[[package]] +name = "smithay-client-toolkit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "922fd3eeab3bd820d76537ce8f582b1cf951eceb5475c28500c7457d9d17f53a" +dependencies = [ + "bitflags 2.4.2", + "calloop", + "calloop-wayland-source", + "cursor-icon", + "libc", + "log", + "memmap2 0.9.4", + "rustix 0.38.32", + "thiserror", + "wayland-backend", + "wayland-client", + "wayland-csd-frame", + "wayland-cursor", + "wayland-protocols", + "wayland-protocols-wlr", + "wayland-scanner", + "xkeysym", +] + +[[package]] +name = "smithay-clipboard" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c091e7354ea8059d6ad99eace06dd13ddeedbb0ac72d40a9a6e7ff790525882d" +dependencies = [ + "libc", + "smithay-client-toolkit", + "wayland-backend", +] + +[[package]] +name = "smol" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" +dependencies = [ + "async-channel 1.9.0", + "async-executor", + "async-fs 1.6.0", + "async-io 1.13.0", + "async-lock 2.8.0", + "async-net 1.7.0", + "async-process 1.7.0", + "blocking", + "futures-lite 1.13.0", +] + +[[package]] +name = "snippet" +version = "0.1.0" +dependencies = [ + "anyhow", + "smallvec", +] + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "spdx" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29ef1a0fa1e39ac22972c8db23ff89aea700ab96aa87114e1fb55937a631a0c9" +dependencies = [ + "smallvec", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spirv" +version = "0.2.0+1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "246bfa38fe3db3f1dfc8ca5a2cdeb7348c78be2112740cc0ec8ef18b6d94f830" +dependencies = [ + "bitflags 1.3.2", + "num-traits", +] + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der 0.6.1", +] + +[[package]] +name = "spki" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +dependencies = [ + "base64ct", + "der 0.7.8", +] + +[[package]] +name = "sptr" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" + +[[package]] +name = "sqlez" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "futures 0.3.28", + "indoc", + "lazy_static", + "libsqlite3-sys", + "parking_lot", + "smol", + "thread_local", + "util", + "uuid", +] + +[[package]] +name = "sqlez_macros" +version = "0.1.0" +dependencies = [ + "lazy_static", + "sqlez", + "sqlformat", + "syn 1.0.109", +] + +[[package]] +name = "sqlformat" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" +dependencies = [ + "itertools 0.11.0", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d" +dependencies = [ + "ahash 0.8.8", + "atoi", + "bigdecimal", + "byteorder", + "bytes 1.5.0", + "chrono", + "crc", + "crossbeam-queue", + "dotenvy", + "either", + "event-listener 2.5.3", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashlink", + "hex", + "indexmap 2.0.0", + "log", + "memchr", + "once_cell", + "paste", + "percent-encoding", + "rust_decimal", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "sha2 0.10.7", + "smallvec", + "sqlformat", + "thiserror", + "time", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", + "webpki-roots", +] + +[[package]] +name = "sqlx-macros" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 1.0.109", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc" +dependencies = [ + "dotenvy", + "either", + "heck 0.4.1", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2 0.10.7", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 1.0.109", + "tempfile", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" +dependencies = [ + "atoi", + "base64 0.21.7", + "bigdecimal", + "bitflags 2.4.2", + "byteorder", + "bytes 1.5.0", + "chrono", + "crc", + "digest 0.10.7", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac 0.12.1", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa 0.9.2", + "rust_decimal", + "serde", + "sha1", + "sha2 0.10.7", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "time", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" +dependencies = [ + "atoi", + "base64 0.21.7", + "bigdecimal", + "bitflags 2.4.2", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "hex", + "hkdf", + "hmac 0.12.1", + "home", + "itoa", + "log", + "md-5", + "memchr", + "num-bigint", + "once_cell", + "rand 0.8.5", + "rust_decimal", + "serde", + "serde_json", + "sha1", + "sha2 0.10.7", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "time", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "sqlx-core", + "time", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "story" +version = "0.1.0" +dependencies = [ + "gpui", + "itertools 0.10.5", + "smallvec", +] + +[[package]] +name = "storybook" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.4", + "collab_ui", + "ctrlc", + "dialoguer", + "editor", + "fuzzy", + "gpui", + "indoc", + "language", + "log", + "menu", + "picker", + "project", + "rust-embed", + "settings", + "simplelog", + "story", + "strum", + "theme", + "ui", +] + +[[package]] +name = "strict-num" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731" +dependencies = [ + "float-cmp", +] + +[[package]] +name = "stringprep" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +dependencies = [ + "finl_unicode", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.59", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "sum_tree" +version = "0.1.0" +dependencies = [ + "arrayvec", + "ctor", + "env_logger", + "log", + "rand 0.8.5", + "rayon", +] + +[[package]] +name = "supermaven" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "collections", + "editor", + "env_logger", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "language", + "log", + "postage", + "project", + "serde", + "serde_json", + "settings", + "smol", + "supermaven_api", + "theme", + "ui", + "util", +] + +[[package]] +name = "supermaven_api" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.28", + "http 0.1.0", + "serde", + "serde_json", + "smol", + "util", +] + +[[package]] +name = "sval" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d11eec9fbe2bc8bc71e7349f0e7534db9a96d961fb9f302574275b7880ad06" + +[[package]] +name = "sval_buffer" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b7451f69a93c5baf2653d5aa8bb4178934337f16c22830a50b06b386f72d761" +dependencies = [ + "sval", + "sval_ref", +] + +[[package]] +name = "sval_dynamic" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f5a2cc12b4da2adfb59d5eedfd9b174a23cc3fae84cec71dcbcd9302068f5" +dependencies = [ + "sval", +] + +[[package]] +name = "sval_fmt" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f578b2301341e246d00b35957f2952c4ec554ad9c7cfaee10bc86bc92896578" +dependencies = [ + "itoa", + "ryu", + "sval", +] + +[[package]] +name = "sval_json" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8346c00f5dc6efe18bea8d13c1f7ca4f112b20803434bf3657ac17c0f74cbc4b" +dependencies = [ + "itoa", + "ryu", + "sval", +] + +[[package]] +name = "sval_ref" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6617cc89952f792aebc0f4a1a76bc51e80c70b18c491bd52215c7989c4c3dd06" +dependencies = [ + "sval", +] + +[[package]] +name = "sval_serde" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe3d1e59f023341d9af75d86f3bc148a6704f3f831eef0dd90bbe9cb445fa024" +dependencies = [ + "serde", + "sval", + "sval_buffer", + "sval_fmt", +] + +[[package]] +name = "svg_fmt" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fb1df15f412ee2e9dfc1c504260fa695c1c3f10fe9f4a6ee2d2184d7d6450e2" + +[[package]] +name = "svgtypes" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97ca9a891c9c70da8139ac9d8e8ea36a210fa21bb50eccd75d4a9561c83e87f" +dependencies = [ + "kurbo", + "siphasher 1.0.1", +] + +[[package]] +name = "swash" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06ff4664af8923625604261c645f5c4cc610cc83c84bec74b50d76237089de7" +dependencies = [ + "read-fonts", + "yazi", + "zeno", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a6531ffc7b071655e4ce2e04bd464c4830bb585a61cabb96cf808f05172615a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synchronoise" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dbc01390fc626ce8d1cffe3376ded2b72a11bb70e1c75f404a210e4daa4def2" +dependencies = [ + "crossbeam-queue", +] + +[[package]] +name = "sys-locale" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e801cf239ecd6ccd71f03d270d67dd53d13e90aab208bf4b8fe4ad957ea949b0" +dependencies = [ + "libc", +] + +[[package]] +name = "sysinfo" +version = "0.30.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c385888ef380a852a16209afc8cfad22795dd8873d69c9a14d2e2088f118d18" +dependencies = [ + "cfg-if", + "core-foundation-sys 0.8.6", + "libc", + "ntapi", + "once_cell", + "rayon", + "windows 0.52.0", +] + +[[package]] +name = "system-interface" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aef1f9d4c1dbdd1cb3a63be9efd2f04d8ddbc919d46112982c76818ffc2f1a7" +dependencies = [ + "bitflags 2.4.2", + "cap-fs-ext", + "cap-std", + "fd-lock", + "io-lifetimes 2.0.3", + "rustix 0.38.32", + "windows-sys 0.52.0", + "winx", +] + +[[package]] +name = "tab_switcher" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "ctor", + "editor", + "env_logger", + "gpui", + "language", + "menu", + "picker", + "project", + "serde", + "serde_json", + "terminal_view", + "theme", + "ui", + "util", + "workspace", +] + +[[package]] +name = "taffy" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2e140b328c6cb5e744bb2c65910b47df86b239afc793ee2c52262569cf9225" +dependencies = [ + "arrayvec", + "grid", + "num-traits", + "serde", + "slotmap", +] + +[[package]] +name = "take-until" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bdb6fa0dfa67b38c1e66b7041ba9dcf23b99d8121907cd31c807a332f7a0bbb" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "target-lexicon" +version = "0.12.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69758bda2e78f098e4ccb393021a0963bb3442eac05f135c30f61b7370bbafae" + +[[package]] +name = "task" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "futures 0.3.28", + "gpui", + "hex", + "parking_lot", + "schemars", + "serde", + "serde_json_lenient", + "sha2 0.10.7", + "shellexpand", + "util", +] + +[[package]] +name = "tasks_ui" +version = "0.1.0" +dependencies = [ + "editor", + "file_icons", + "fuzzy", + "gpui", + "language", + "menu", + "picker", + "project", + "schemars", + "serde", + "serde_json", + "settings", + "task", + "tree-sitter-rust", + "tree-sitter-typescript", + "ui", + "util", + "workspace", +] + +[[package]] +name = "telemetry_events" +version = "0.1.0" +dependencies = [ + "semantic_version", + "serde", +] + +[[package]] +name = "tempfile" +version = "3.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +dependencies = [ + "cfg-if", + "fastrand 2.0.0", + "redox_syscall 0.4.1", + "rustix 0.38.32", + "windows-sys 0.52.0", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", +] + +[[package]] +name = "termcolor" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "terminal" +version = "0.1.0" +dependencies = [ + "alacritty_terminal", + "anyhow", + "collections", + "dirs 4.0.0", + "futures 0.3.28", + "gpui", + "libc", + "rand 0.8.5", + "schemars", + "serde", + "serde_derive", + "serde_json", + "settings", + "smol", + "sysinfo", + "task", + "theme", + "thiserror", + "util", + "windows 0.56.0", +] + +[[package]] +name = "terminal_view" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "collections", + "db", + "dirs 4.0.0", + "editor", + "futures 0.3.28", + "gpui", + "itertools 0.11.0", + "language", + "project", + "rand 0.8.5", + "search", + "serde", + "serde_json", + "settings", + "shellexpand", + "smol", + "task", + "tasks_ui", + "terminal", + "theme", + "ui", + "util", + "workspace", +] + +[[package]] +name = "text" +version = "0.1.0" +dependencies = [ + "anyhow", + "clock", + "collections", + "ctor", + "env_logger", + "gpui", + "http 0.1.0", + "lazy_static", + "log", + "parking_lot", + "postage", + "rand 0.8.5", + "regex", + "rope", + "smallvec", + "sum_tree", + "util", +] + +[[package]] +name = "textwrap" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" + +[[package]] +name = "theme" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "color", + "derive_more", + "fs", + "futures 0.3.28", + "gpui", + "indexmap 1.9.3", + "palette", + "parking_lot", + "refineable", + "schemars", + "serde", + "serde_derive", + "serde_json", + "serde_json_lenient", + "serde_repr", + "settings", + "story", + "util", + "uuid", +] + +[[package]] +name = "theme_importer" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.4", + "gpui", + "indexmap 1.9.3", + "log", + "palette", + "rust-embed", + "schemars", + "serde", + "serde_json", + "serde_json_lenient", + "simplelog", + "strum", + "theme", + "vscode_theme", +] + +[[package]] +name = "theme_selector" +version = "0.1.0" +dependencies = [ + "client", + "feature_flags", + "fs", + "fuzzy", + "gpui", + "log", + "picker", + "serde", + "settings", + "theme", + "ui", + "util", + "workspace", +] + +[[package]] +name = "thiserror" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "tiff" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a53f4706d65497df0c4349241deddf35f84cee19c87ed86ea8ca590f4464437" +dependencies = [ + "jpeg-decoder", + "miniz_oxide 0.4.4", + "weezl", +] + +[[package]] +name = "tiktoken-rs" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c314e7ce51440f9e8f5a497394682a57b7c323d0f4d0a6b1b13c429056e0e234" +dependencies = [ + "anyhow", + "base64 0.21.7", + "bstr", + "fancy-regex", + "lazy_static", + "parking_lot", + "rustc-hash", +] + +[[package]] +name = "time" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f6bb557fd245c28e6411aa56b6403c689ad95061f50e4be16c274e70a17e48" +dependencies = [ + "deranged", + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" + +[[package]] +name = "time-macros" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a942f44339478ef67935ab2bbaec2fb0322496cf3cbe84b261e06ac3814c572" +dependencies = [ + "time-core", +] + +[[package]] +name = "time_format" +version = "0.1.0" +dependencies = [ + "core-foundation", + "core-foundation-sys 0.8.6", + "sys-locale", + "time", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-skia" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d13394d44dae3207b52a326c0c85a8bf87f1541f23b0d143811088497b09ab" +dependencies = [ + "arrayref", + "arrayvec", + "bytemuck", + "cfg-if", + "log", + "png 0.17.13", + "tiny-skia-path", +] + +[[package]] +name = "tiny-skia-path" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e7fc0c2e86a30b117d0462aa261b72b7a99b7ebd7deb3a14ceda95c5bdc93" +dependencies = [ + "arrayref", + "bytemuck", + "strict-num", +] + +[[package]] +name = "tiny_http" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce51b50006056f590c9b7c3808c3bd70f0d1101666629713866c227d6e58d39" +dependencies = [ + "ascii", + "chrono", + "chunked_transfer", + "log", + "url", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" +dependencies = [ + "backtrace", + "bytes 1.5.0", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.5.4", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" +dependencies = [ + "bytes 0.4.12", + "futures 0.1.31", + "log", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.20.1", +] + +[[package]] +name = "tokio-util" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +dependencies = [ + "bytes 1.5.0", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.19.15", +] + +[[package]] +name = "toml" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a9aad4a3066010876e8dcf5a8a06e70a558751117a145c6ce2b82c2e2054290" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.22.6", +] + +[[package]] +name = "toml_datetime" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.0.0", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.5.15", +] + +[[package]] +name = "toml_edit" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +dependencies = [ + "indexmap 2.0.0", + "toml_datetime", + "winnow 0.5.15", +] + +[[package]] +name = "toml_edit" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c1b5fd4128cc8d3e0cb74d4ed9a9cc7c7284becd4df68f5f940e1ad123606f6" +dependencies = [ + "indexmap 2.0.0", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.6.1", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" +dependencies = [ + "bitflags 1.3.2", + "bytes 1.5.0", + "futures-core", + "futures-util", + "http 0.2.9", + "http-body", + "http-range-header", + "pin-project-lite", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +dependencies = [ + "bitflags 2.4.2", + "bytes 1.5.0", + "futures-core", + "futures-util", + "http 0.2.9", + "http-body", + "http-range-header", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "tree-sitter" +version = "0.20.100" +source = "git+https://github.com/tree-sitter/tree-sitter?rev=7b4894ba2ae81b988846676f54c0988d4027ef4f#7b4894ba2ae81b988846676f54c0988d4027ef4f" +dependencies = [ + "cc", + "regex", + "wasmtime-c-api-impl", +] + +[[package]] +name = "tree-sitter-bash" +version = "0.20.4" +source = "git+https://github.com/tree-sitter/tree-sitter-bash?rev=7331995b19b8f8aba2d5e26deb51d2195c18bc94#7331995b19b8f8aba2d5e26deb51d2195c18bc94" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-c" +version = "0.20.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b03bdf218020057abee831581a74bff8c298323d6c6cd1a70556430ded9f4b" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-cpp" +version = "0.20.0" +source = "git+https://github.com/tree-sitter/tree-sitter-cpp?rev=f44509141e7e483323d2ec178f2d2e6c0fc041c1#f44509141e7e483323d2ec178f2d2e6c0fc041c1" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-css" +version = "0.19.0" +source = "git+https://github.com/tree-sitter/tree-sitter-css?rev=769203d0f9abe1a9a691ac2b9fe4bb4397a73c51#769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-elixir" +version = "0.1.0" +source = "git+https://github.com/elixir-lang/tree-sitter-elixir?rev=a2861e88a730287a60c11ea9299c033c7d076e30#a2861e88a730287a60c11ea9299c033c7d076e30" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-embedded-template" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33817ade928c73a32d4f904a602321e09de9fc24b71d106f3b4b3f8ab30dcc38" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-go" +version = "0.20.0" +source = "git+https://github.com/tree-sitter/tree-sitter-go?rev=b82ab803d887002a0af11f6ce63d72884580bf33#b82ab803d887002a0af11f6ce63d72884580bf33" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-gomod" +version = "1.0.2" +source = "git+https://github.com/camdencheek/tree-sitter-go-mod#bbe2fe3be4b87e06a613e685250f473d2267f430" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-gowork" +version = "0.0.1" +source = "git+https://github.com/d1y/tree-sitter-go-work#a2a4b99b53b3740855ff33f0b54cab0bb4ce6f45" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-heex" +version = "0.0.1" +source = "git+https://github.com/phoenixframework/tree-sitter-heex?rev=2e1348c3cf2c9323e87c2744796cf3f3868aa82a#2e1348c3cf2c9323e87c2744796cf3f3868aa82a" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-html" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "184e6b77953a354303dc87bf5fe36558c83569ce92606e7b382a0dc1b7443443" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-jsdoc" +version = "0.20.0" +source = "git+https://github.com/tree-sitter/tree-sitter-jsdoc?rev=6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55#6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-json" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90b04c4e1a92139535eb9fca4ec8fa9666cc96b618005d3ae35f3c957fa92f92" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-json" +version = "0.20.0" +source = "git+https://github.com/tree-sitter/tree-sitter-json?rev=40a81c01a40ac48744e0c8ccabbaba1920441199#40a81c01a40ac48744e0c8ccabbaba1920441199" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-markdown" +version = "0.0.1" +source = "git+https://github.com/MDeiml/tree-sitter-markdown?rev=330ecab87a3e3a7211ac69bbadc19eabecdb1cca#330ecab87a3e3a7211ac69bbadc19eabecdb1cca" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-proto" +version = "0.0.2" +source = "git+https://github.com/rewinfrey/tree-sitter-proto?rev=36d54f288aee112f13a67b550ad32634d0c2cb52#36d54f288aee112f13a67b550ad32634d0c2cb52" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-python" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c93b1b1fbd0d399db3445f51fd3058e43d0b4dcff62ddbdb46e66550978aa5" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-regex" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb5a53e9c990757895476216796b170fd81e4d173d08f8b082279c4e6ff8c5c" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-ruby" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ac30cbb1560363ae76e1ccde543d6d99087421e228cc47afcec004b86bb711a" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-rust" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0832309b0b2b6d33760ce5c0e818cb47e1d72b468516bfe4134408926fa7594" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-typescript" +version = "0.20.2" +source = "git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259#5d20856f34315b068c41edaee2ac8a100081d259" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "tree-sitter-yaml" +version = "0.0.1" +source = "git+https://github.com/zed-industries/tree-sitter-yaml?rev=f545a41f57502e1b5ddf2a6668896c1b0620f930#f545a41f57502e1b5ddf2a6668896c1b0620f930" +dependencies = [ + "cc", + "tree-sitter", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "ttf-parser" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f77d76d837a7830fe1d4f12b7b4ba4192c1888001c7164257e4bc6d21d96b4" + +[[package]] +name = "tungstenite" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1" +dependencies = [ + "base64 0.13.1", + "byteorder", + "bytes 1.5.0", + "http 0.2.9", + "httparse", + "log", + "native-tls", + "rand 0.8.5", + "sha-1", + "thiserror", + "url", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +dependencies = [ + "byteorder", + "bytes 1.5.0", + "data-encoding", + "http 0.2.9", + "httparse", + "log", + "rand 0.8.5", + "sha1", + "thiserror", + "url", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "uds_windows" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" +dependencies = [ + "memoffset", + "tempfile", + "winapi", +] + +[[package]] +name = "ui" +version = "0.1.0" +dependencies = [ + "chrono", + "gpui", + "itertools 0.11.0", + "menu", + "settings", + "smallvec", + "story", + "strum", + "theme", + "windows 0.56.0", +] + +[[package]] +name = "ui_text_field" +version = "0.1.0" +dependencies = [ + "editor", + "gpui", + "settings", + "theme", + "ui", +] + +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-bidi-mirroring" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56d12260fb92d52f9008be7e4bca09f584780eb2266dc8fecc6a192bec561694" + +[[package]] +name = "unicode-ccc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2520efa644f8268dce4dcd3050eaa7fc044fca03961e9998ac7e2e92b77cf1" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" + +[[package]] +name = "unicode-script" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d817255e1bed6dfd4ca47258685d14d2bdcfbc64fdc9e3819bd5848057b8ecc" + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "unindent" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "usvg" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c704361d822337cfc00387672c7b59eaa72a1f0744f62b2a68aa228a0c6927d" +dependencies = [ + "base64 0.22.0", + "data-url", + "flate2", + "imagesize", + "kurbo", + "log", + "pico-args", + "roxmltree", + "simplecss", + "siphasher 1.0.1", + "strict-num", + "svgtypes", + "tiny-skia-path", + "xmlwriter", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "util" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-fs 1.6.0", + "collections", + "dirs 3.0.2", + "futures 0.3.28", + "futures-lite 1.13.0", + "git2", + "globset", + "lazy_static", + "log", + "rand 0.8.5", + "regex", + "rust-embed", + "serde", + "serde_json", + "take-until", + "tempfile", + "tendril", + "unicase", +] + +[[package]] +name = "uuid" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +dependencies = [ + "getrandom 0.2.10", + "serde", + "sha1_smol", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d92ccd67fb88503048c01b59152a04effd0782d035a83a6d256ce6085f08f4a3" +dependencies = [ + "value-bag-serde1", + "value-bag-sval2", +] + +[[package]] +name = "value-bag-serde1" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0b9f3feef403a50d4d67e9741a6d8fc688bcbb4e4f31bd4aab72cc690284394" +dependencies = [ + "erased-serde", + "serde", + "serde_fmt", +] + +[[package]] +name = "value-bag-sval2" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b24f4146b6f3361e91cbf527d1fb35e9376c3c0cef72ca5ec5af6d640fad7d" +dependencies = [ + "sval", + "sval_buffer", + "sval_dynamic", + "sval_fmt", + "sval_json", + "sval_ref", + "sval_serde", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vcs_menu" +version = "0.1.0" +dependencies = [ + "anyhow", + "fuzzy", + "git", + "gpui", + "picker", + "ui", + "util", + "workspace", +] + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vim" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-compat", + "async-trait", + "collections", + "command_palette", + "command_palette_hooks", + "editor", + "futures 0.3.28", + "gpui", + "indoc", + "itertools 0.11.0", + "language", + "log", + "lsp", + "multi_buffer", + "nvim-rs", + "parking_lot", + "regex", + "release_channel", + "schemars", + "search", + "serde", + "serde_derive", + "serde_json", + "settings", + "tokio", + "ui", + "util", + "workspace", + "zed_actions", +] + +[[package]] +name = "vscode_theme" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b3666211944f2e6ba2c359bc9efc1891157e910b1b11c3900892ea9f18179d2" +dependencies = [ + "serde", +] + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "vswhom" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" +dependencies = [ + "libc", + "vswhom-sys", +] + +[[package]] +name = "vswhom-sys" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3b17ae1f6c8a2b28506cd96d412eebf83b4a0ff2cbefeeb952f2f9dfa44ba18" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "vte" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40eb22ae96f050e0c0d6f7ce43feeae26c348fc4dea56928ca81537cfaa6188b" +dependencies = [ + "bitflags 2.4.2", + "cursor-icon", + "log", + "serde", + "utf8parse", + "vte_generate_state_changes", +] + +[[package]] +name = "vte_generate_state_changes" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.59", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "wasm-encoder" +version = "0.201.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9c7d2731df60006819b013f64ccc2019691deccf6e11a1804bc850cd6748f1a" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasm-metadata" +version = "0.201.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fd83062c17b9f4985d438603cde0a5e8c5c8198201a6937f778b607924c7da2" +dependencies = [ + "anyhow", + "indexmap 2.0.0", + "serde", + "serde_derive", + "serde_json", + "spdx", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.201.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84e5df6dba6c0d7fafc63a450f1738451ed7a0b52295d83e868218fa286bf708" +dependencies = [ + "bitflags 2.4.2", + "indexmap 2.0.0", + "semver", +] + +[[package]] +name = "wasmprinter" +version = "0.201.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a67e66da702706ba08729a78e3c0079085f6bfcb1a62e4799e97bbf728c2c265" +dependencies = [ + "anyhow", + "wasmparser", +] + +[[package]] +name = "wasmtime" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a08af88fa3d324cc5cf6d388d90ef396a787b3fb4bbd51ba185f8645dc0f02c" +dependencies = [ + "anyhow", + "async-trait", + "bincode", + "bumpalo", + "cfg-if", + "encoding_rs", + "gimli", + "indexmap 2.0.0", + "libc", + "log", + "object", + "once_cell", + "paste", + "rustix 0.38.32", + "semver", + "serde", + "serde_derive", + "serde_json", + "target-lexicon", + "wasmparser", + "wasmtime-component-macro", + "wasmtime-component-util", + "wasmtime-cranelift", + "wasmtime-environ", + "wasmtime-fiber", + "wasmtime-jit-icache-coherence", + "wasmtime-runtime", + "wasmtime-slab", + "wasmtime-winch", + "windows-sys 0.52.0", +] + +[[package]] +name = "wasmtime-asm-macros" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16cdbfcf28542bcda0b5fd68d44603e53e5ad126cbe7b9f25c130e1249fd8211" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "wasmtime-c-api-impl" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67dea28073e105735210b9e932b5e654198d5e28ed31f1314037cd7664ceda2b" +dependencies = [ + "anyhow", + "log", + "once_cell", + "tracing", + "wasmtime", + "wasmtime-c-api-macros", +] + +[[package]] +name = "wasmtime-c-api-macros" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cfe12050fa28b17ab8434ab757fee281dd0d5c7715fa1bc5e4c0b29d1705415" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "wasmtime-component-macro" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cdcf690257c623506eeec3a502864b282aab0fdfd6981c1ebb63c7e98f4a23a" +dependencies = [ + "anyhow", + "proc-macro2", + "quote", + "syn 2.0.59", + "wasmtime-component-util", + "wasmtime-wit-bindgen", + "wit-parser", +] + +[[package]] +name = "wasmtime-component-util" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3ae7bf66e2fae1e332ab3634f332d7422e878a6eecc47c8f8f78cc1f24e501" + +[[package]] +name = "wasmtime-cranelift" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ea025c969a09117818732fa6f96848e858a7953d4659dab8081a6eea3c0523" +dependencies = [ + "anyhow", + "cfg-if", + "cranelift-codegen", + "cranelift-control", + "cranelift-entity", + "cranelift-frontend", + "cranelift-native", + "cranelift-wasm", + "gimli", + "log", + "object", + "target-lexicon", + "thiserror", + "wasmparser", + "wasmtime-cranelift-shared", + "wasmtime-environ", + "wasmtime-versioned-export-macros", +] + +[[package]] +name = "wasmtime-cranelift-shared" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcd6dd2f8d8d4860b384f61f89b597633a5b5f0943c546210e5084c5d321fe20" +dependencies = [ + "anyhow", + "cranelift-codegen", + "cranelift-control", + "cranelift-native", + "gimli", + "object", + "target-lexicon", + "wasmtime-environ", +] + +[[package]] +name = "wasmtime-environ" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f60f3f717658dd77745de03b750d5852126e9be6dad465848c77f90387c44c9" +dependencies = [ + "anyhow", + "bincode", + "cpp_demangle", + "cranelift-entity", + "gimli", + "indexmap 2.0.0", + "log", + "object", + "rustc-demangle", + "serde", + "serde_derive", + "target-lexicon", + "thiserror", + "wasm-encoder", + "wasmparser", + "wasmprinter", + "wasmtime-component-util", + "wasmtime-types", +] + +[[package]] +name = "wasmtime-fiber" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf8cd22ab1041bf0e54b6283e57824557902e4fed8b1f3a7eef29cbaba89eebf" +dependencies = [ + "anyhow", + "cc", + "cfg-if", + "rustix 0.38.32", + "wasmtime-asm-macros", + "wasmtime-versioned-export-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "wasmtime-jit-icache-coherence" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796e4b4989db62899d2117e1e0258b839d088c044591b14e3a0396e7b3ae53a" +dependencies = [ + "cfg-if", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "wasmtime-runtime" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bf2b7745df452a4f41b9aab21d3f7ba1347b12da2fdc5241e59306127884a68" +dependencies = [ + "anyhow", + "cc", + "cfg-if", + "encoding_rs", + "indexmap 2.0.0", + "libc", + "log", + "mach", + "memfd", + "memoffset", + "paste", + "psm", + "rustix 0.38.32", + "sptr", + "wasm-encoder", + "wasmtime-asm-macros", + "wasmtime-environ", + "wasmtime-fiber", + "wasmtime-versioned-export-macros", + "wasmtime-wmemcheck", + "windows-sys 0.52.0", +] + +[[package]] +name = "wasmtime-slab" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83448ef600ad95977019ebaea84a5516fdbc9561d0a8e26b1e099351f993b527" + +[[package]] +name = "wasmtime-types" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf6fe7ed3fd18ed4b1e4465fe5c8674acc9f03523fca5b1b9f975b2560cd741b" +dependencies = [ + "cranelift-entity", + "serde", + "serde_derive", + "thiserror", + "wasmparser", +] + +[[package]] +name = "wasmtime-versioned-export-macros" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6d967f01032da7d4c6303da32f6a00d5efe1bac124b156e7342d8ace6ffdfc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "wasmtime-wasi" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "371d828b6849ea06d598ae7dd1c316e8dd9e99b76f77d93d5886cb25c7f8e188" +dependencies = [ + "anyhow", + "async-trait", + "bitflags 2.4.2", + "bytes 1.5.0", + "cap-fs-ext", + "cap-net-ext", + "cap-rand", + "cap-std", + "cap-time-ext", + "fs-set-times", + "futures 0.3.28", + "io-extras", + "io-lifetimes 2.0.3", + "once_cell", + "rustix 0.38.32", + "system-interface", + "thiserror", + "tokio", + "tracing", + "url", + "wasmtime", + "wiggle", + "windows-sys 0.52.0", +] + +[[package]] +name = "wasmtime-winch" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb8b3fcbc455105760e4a2aa8ee3f39b8357183a62201383b3f72d4836ca2be8" +dependencies = [ + "anyhow", + "cranelift-codegen", + "gimli", + "object", + "target-lexicon", + "wasmparser", + "wasmtime-cranelift-shared", + "wasmtime-environ", + "winch-codegen", +] + +[[package]] +name = "wasmtime-wit-bindgen" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96326c9800fb6c099f50d1bd2126d636fc2f96950e1675acf358c0f52516cd38" +dependencies = [ + "anyhow", + "heck 0.4.1", + "indexmap 2.0.0", + "wit-parser", +] + +[[package]] +name = "wasmtime-wmemcheck" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36bd91a4dc55af0bf55e9e2ab0ea13724cfb5c5a1acdf8873039769208f59490" + +[[package]] +name = "wast" +version = "35.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ef140f1b49946586078353a453a1d28ba90adfc54dde75710bc1931de204d68" +dependencies = [ + "leb128", +] + +[[package]] +name = "wayland-backend" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d50fa61ce90d76474c87f5fc002828d81b32677340112b4ef08079a9d459a40" +dependencies = [ + "cc", + "downcast-rs", + "rustix 0.38.32", + "scoped-tls", + "smallvec", + "wayland-sys", +] + +[[package]] +name = "wayland-client" +version = "0.31.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82fb96ee935c2cea6668ccb470fb7771f6215d1691746c2d896b447a00ad3f1f" +dependencies = [ + "bitflags 2.4.2", + "rustix 0.38.32", + "wayland-backend", + "wayland-scanner", +] + +[[package]] +name = "wayland-csd-frame" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "625c5029dbd43d25e6aa9615e88b829a5cad13b2819c4ae129fdbb7c31ab4c7e" +dependencies = [ + "bitflags 2.4.2", + "cursor-icon", + "wayland-backend", +] + +[[package]] +name = "wayland-cursor" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71ce5fa868dd13d11a0d04c5e2e65726d0897be8de247c0c5a65886e283231ba" +dependencies = [ + "rustix 0.38.32", + "wayland-client", + "xcursor", +] + +[[package]] +name = "wayland-protocols" +version = "0.31.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f81f365b8b4a97f422ac0e8737c438024b5951734506b0e1d775c73030561f4" +dependencies = [ + "bitflags 2.4.2", + "wayland-backend", + "wayland-client", + "wayland-scanner", +] + +[[package]] +name = "wayland-protocols-plasma" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23803551115ff9ea9bce586860c5c5a971e360825a0309264102a9495a5ff479" +dependencies = [ + "bitflags 2.4.2", + "wayland-backend", + "wayland-client", + "wayland-protocols", + "wayland-scanner", +] + +[[package]] +name = "wayland-protocols-wlr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad1f61b76b6c2d8742e10f9ba5c3737f6530b4c243132c2a2ccc8aa96fe25cd6" +dependencies = [ + "bitflags 2.4.2", + "wayland-backend", + "wayland-client", + "wayland-protocols", + "wayland-scanner", +] + +[[package]] +name = "wayland-scanner" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63b3a62929287001986fb58c789dce9b67604a397c15c611ad9f747300b6c283" +dependencies = [ + "proc-macro2", + "quick-xml 0.31.0", + "quote", +] + +[[package]] +name = "wayland-sys" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15a0c8eaff5216d07f226cb7a549159267f3467b289d9a2e52fd3ef5aae2b7af" +dependencies = [ + "dlib", + "log", + "once_cell", + "pkg-config", +] + +[[package]] +name = "web-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" +dependencies = [ + "rustls-webpki", +] + +[[package]] +name = "weezl" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" + +[[package]] +name = "welcome" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "db", + "editor", + "extensions_ui", + "fuzzy", + "gpui", + "inline_completion_button", + "install_cli", + "picker", + "project", + "schemars", + "serde", + "settings", + "theme_selector", + "ui", + "util", + "vim", + "workspace", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix 0.38.32", +] + +[[package]] +name = "which" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fa5e0c10bf77f44aac573e498d1a82d5fbd5e91f6fc0a99e7be4b38e85e101c" +dependencies = [ + "either", + "home", + "once_cell", + "rustix 0.38.32", + "windows-sys 0.52.0", +] + +[[package]] +name = "whoami" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" + +[[package]] +name = "wiggle" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae1136a209614ace00b0c11f04dc7cf42540773be3b22eff6ad165110aba29c1" +dependencies = [ + "anyhow", + "async-trait", + "bitflags 2.4.2", + "thiserror", + "tracing", + "wasmtime", + "wiggle-macro", +] + +[[package]] +name = "wiggle-generate" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c2bd99ce26046f4246d720a4198f6a8fc95bc5da82ae4ef62263e24641c3076" +dependencies = [ + "anyhow", + "heck 0.4.1", + "proc-macro2", + "quote", + "shellexpand", + "syn 2.0.59", + "witx", +] + +[[package]] +name = "wiggle-macro" +version = "19.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512d816dbcd0113103b2eb2402ec9018e7f0755202a5b3e67db726f229d8dcae" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", + "wiggle-generate", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "winch-codegen" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d285c833af9453c037cd220765f86c5c9961e8906a815829107c8801d535b8e4" +dependencies = [ + "anyhow", + "cranelift-codegen", + "gimli", + "regalloc2", + "smallvec", + "target-lexicon", + "wasmparser", + "wasmtime-environ", +] + +[[package]] +name = "windows" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +dependencies = [ + "windows-core 0.52.0", + "windows-targets 0.52.5", +] + +[[package]] +name = "windows" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132" +dependencies = [ + "windows-core 0.56.0", + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-core" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result", + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-implement" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "windows-interface" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "windows-result" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "749f0da9cc72d82e600d8d2e44cadd0b9eedb9038f71a1c58556ac1c5791813b" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winnow" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d90f4e0f530c4c69f62b80d839e9ef3855edc9cba471a160c4d692deed62b401" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "winresource" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77e2aaaf8cfa92078c0c0375423d631f82f2f57979c2884fdd5f604a11e45329" +dependencies = [ + "toml 0.7.8", + "version_check", +] + +[[package]] +name = "winx" +version = "0.36.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9643b83820c0cd246ecabe5fa454dd04ba4fa67996369466d0747472d337346" +dependencies = [ + "bitflags 2.4.2", + "windows-sys 0.52.0", +] + +[[package]] +name = "wio" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d129932f4644ac2396cb456385cbf9e63b5b30c6e8dc4820bdca4eb082037a5" +dependencies = [ + "winapi", +] + +[[package]] +name = "wit-bindgen" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "288f992ea30e6b5c531b52cdd5f3be81c148554b09ea416f058d16556ba92c27" +dependencies = [ + "bitflags 2.4.2", + "wit-bindgen-rt", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e85e72719ffbccf279359ad071497e47eb0675fe22106dea4ed2d8a7fcb60ba4" +dependencies = [ + "anyhow", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb8738270f32a2d6739973cbbb7c1b6dd8959ce515578a6e19165853272ee64" + +[[package]] +name = "wit-bindgen-rust" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a39a15d1ae2077688213611209849cad40e9e5cccf6e61951a425850677ff3" +dependencies = [ + "anyhow", + "heck 0.4.1", + "indexmap 2.0.0", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d376d3ae5850526dfd00d937faea0d81a06fa18f7ac1e26f386d760f241a8f4b" +dependencies = [ + "anyhow", + "proc-macro2", + "quote", + "syn 2.0.59", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.201.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "421c0c848a0660a8c22e2fd217929a0191f14476b68962afd2af89fd22e39825" +dependencies = [ + "anyhow", + "bitflags 2.4.2", + "indexmap 2.0.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.201.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196d3ecfc4b759a8573bf86a9b3f8996b304b3732e4c7de81655f875f6efdca6" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.0.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "witx" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e366f27a5cabcddb2706a78296a40b8fcc451e1a6aba2fc1d94b4a01bdaaef4b" +dependencies = [ + "anyhow", + "log", + "thiserror", + "wast", +] + +[[package]] +name = "workspace" +version = "0.1.0" +dependencies = [ + "any_vec", + "anyhow", + "async-recursion 1.0.5", + "bincode", + "call", + "client", + "clock", + "collections", + "db", + "derive_more", + "dev_server_projects", + "env_logger", + "fs", + "futures 0.3.28", + "gpui", + "http 0.1.0", + "itertools 0.11.0", + "language", + "lazy_static", + "log", + "node_runtime", + "parking_lot", + "postage", + "project", + "schemars", + "serde", + "serde_json", + "settings", + "smallvec", + "sqlez", + "task", + "theme", + "ui", + "util", + "uuid", +] + +[[package]] +name = "worktree" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "clock", + "collections", + "fs", + "futures 0.3.28", + "fuzzy", + "git", + "git2", + "gpui", + "http 0.1.0", + "ignore", + "itertools 0.11.0", + "language", + "log", + "lsp", + "parking_lot", + "postage", + "pretty_assertions", + "rand 0.8.5", + "rpc", + "schemars", + "serde", + "serde_json", + "settings", + "smol", + "sum_tree", + "text", + "util", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "x11-clipboard" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98785a09322d7446e28a13203d2cae1059a0dd3dfb32cb06d0a225f023d8286" +dependencies = [ + "libc", + "x11rb", +] + +[[package]] +name = "x11rb" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8f25ead8c7e4cba123243a6367da5d3990e0d3affa708ea19dce96356bd9f1a" +dependencies = [ + "as-raw-xcb-connection", + "gethostname", + "libc", + "rustix 0.38.32", + "x11rb-protocol", +] + +[[package]] +name = "x11rb-protocol" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e63e71c4b8bd9ffec2c963173a4dc4cbde9ee96961d4fcb4429db9929b606c34" + +[[package]] +name = "xattr" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc" +dependencies = [ + "libc", +] + +[[package]] +name = "xcursor" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a0ccd7b4a5345edfcd0c3535718a4e9ff7798ffc536bb5b5a0e26ff84732911" + +[[package]] +name = "xdg-home" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21e5a325c3cb8398ad6cf859c1135b25dd29e186679cf2da7581d9679f63b38e" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "xim" +version = "0.4.0" +source = "git+https://github.com/npmania/xim-rs?rev=27132caffc5b9bc9c432ca4afad184ab6e7c16af#27132caffc5b9bc9c432ca4afad184ab6e7c16af" +dependencies = [ + "ahash 0.8.8", + "hashbrown 0.14.0", + "log", + "x11rb", + "xim-ctext", + "xim-parser", +] + +[[package]] +name = "xim-ctext" +version = "0.3.0" +source = "git+https://github.com/npmania/xim-rs?rev=27132caffc5b9bc9c432ca4afad184ab6e7c16af#27132caffc5b9bc9c432ca4afad184ab6e7c16af" +dependencies = [ + "encoding_rs", +] + +[[package]] +name = "xim-parser" +version = "0.2.1" +source = "git+https://github.com/npmania/xim-rs?rev=27132caffc5b9bc9c432ca4afad184ab6e7c16af#27132caffc5b9bc9c432ca4afad184ab6e7c16af" +dependencies = [ + "bitflags 2.4.2", +] + +[[package]] +name = "xkbcommon" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13867d259930edc7091a6c41b4ce6eee464328c6ff9659b7e4c668ca20d4c91e" +dependencies = [ + "as-raw-xcb-connection", + "libc", + "memmap2 0.8.0", + "xkeysym", +] + +[[package]] +name = "xkeysym" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054a8e68b76250b253f671d1268cb7f1ae089ec35e195b2efb2a4e9a836d0621" + +[[package]] +name = "xmlparser" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d25c75bf9ea12c4040a97f829154768bbbce366287e2dc044af160cd79a13fd" + +[[package]] +name = "xmlwriter" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9" + +[[package]] +name = "xtask" +version = "0.1.0" +dependencies = [ + "anyhow", + "cargo_toml", + "clap 4.4.4", + "toml 0.8.10", +] + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "yazi" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94451ac9513335b5e23d7a8a2b61a7102398b8cca5160829d313e84c9d98be1" + +[[package]] +name = "yeslogic-fontconfig-sys" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2bbd69036d397ebbff671b1b8e4d918610c181c5a16073b96f984a38d08c386" +dependencies = [ + "const-cstr", + "dlib", + "once_cell", + "pkg-config", +] + +[[package]] +name = "zbus" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b8e3d6ae3342792a6cc2340e4394334c7402f3d793b390d2c5494a4032b3030" +dependencies = [ + "async-broadcast", + "async-executor", + "async-fs 2.1.1", + "async-io 2.3.1", + "async-lock 3.3.0", + "async-process 2.1.0", + "async-recursion 1.0.5", + "async-task", + "async-trait", + "blocking", + "derivative", + "enumflags2", + "event-listener 5.1.0", + "futures-core", + "futures-sink", + "futures-util", + "hex", + "nix 0.27.1", + "ordered-stream", + "rand 0.8.5", + "serde", + "serde_repr", + "sha1", + "static_assertions", + "tracing", + "uds_windows", + "windows-sys 0.52.0", + "xdg-home", + "zbus_macros", + "zbus_names", + "zvariant", +] + +[[package]] +name = "zbus_macros" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7a3e850ff1e7217a3b7a07eba90d37fe9bb9e89a310f718afcde5885ca9b6d7" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "regex", + "syn 1.0.109", + "zvariant_utils", +] + +[[package]] +name = "zbus_names" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b9b1fef7d021261cc16cba64c351d291b715febe0fa10dc3a443ac5a5022e6c" +dependencies = [ + "serde", + "static_assertions", + "zvariant", +] + +[[package]] +name = "zed" +version = "0.137.0" +dependencies = [ + "activity_indicator", + "anyhow", + "assets", + "assistant", + "audio", + "auto_update", + "backtrace", + "breadcrumbs", + "call", + "channel", + "chrono", + "clap 4.4.4", + "cli", + "client", + "collab_ui", + "collections", + "command_palette", + "copilot", + "db", + "dev_server_projects", + "diagnostics", + "editor", + "env_logger", + "extension", + "extensions_ui", + "feedback", + "file_finder", + "file_icons", + "fs", + "futures 0.3.28", + "git", + "git_hosting_providers", + "go_to_line", + "gpui", + "headless", + "http 0.1.0", + "image_viewer", + "inline_completion_button", + "install_cli", + "isahc", + "journal", + "language", + "language_selector", + "language_tools", + "languages", + "libc", + "log", + "markdown_preview", + "menu", + "mimalloc", + "nix 0.28.0", + "node_runtime", + "notifications", + "outline", + "parking_lot", + "profiling", + "project", + "project_panel", + "project_symbols", + "quick_action_bar", + "recent_projects", + "release_channel", + "rope", + "search", + "serde", + "serde_json", + "settings", + "simplelog", + "smol", + "supermaven", + "tab_switcher", + "task", + "tasks_ui", + "telemetry_events", + "terminal_view", + "theme", + "theme_selector", + "tree-sitter-rust", + "urlencoding", + "util", + "uuid", + "vim", + "welcome", + "winresource", + "workspace", + "zed_actions", +] + +[[package]] +name = "zed_actions" +version = "0.1.0" +dependencies = [ + "gpui", + "serde", +] + +[[package]] +name = "zed_astro" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_clojure" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_csharp" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_dart" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_deno" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_elixir" +version = "0.0.4" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_elm" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_emmet" +version = "0.0.3" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_erlang" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_extension_api" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5c51cad4152bb5eb35b20dccdcbfb36f48d8952a2ed2d3e25b70361007d953b" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "zed_extension_api" +version = "0.0.6" +dependencies = [ + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "zed_extension_api" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca8bcaea3feb2d2ce9dbeb061ee48365312a351faa7014c417b0365fe9e459" +dependencies = [ + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "zed_gleam" +version = "0.1.3" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_glsl" +version = "0.1.0" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_haskell" +version = "0.1.0" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_html" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_lua" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_ocaml" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_php" +version = "0.0.3" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_prisma" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_purescript" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_ruby" +version = "0.0.3" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_svelte" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_terraform" +version = "0.0.3" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_toml" +version = "0.1.1" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_uiua" +version = "0.0.1" +dependencies = [ + "zed_extension_api 0.0.4", +] + +[[package]] +name = "zed_vue" +version = "0.0.2" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zed_zig" +version = "0.1.2" +dependencies = [ + "zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zeno" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd15f8e0dbb966fd9245e7498c7e9e5055d9e5c8b676b95bd67091cd11a1e697" + +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "zeroize" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "zstd" +version = "0.11.2+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "5.0.2+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.8+zstd.1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "zvariant" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c1b3ca6db667bfada0f1ebfc94b2b1759ba25472ee5373d4551bb892616389a" +dependencies = [ + "endi", + "enumflags2", + "serde", + "static_assertions", + "url", + "zvariant_derive", +] + +[[package]] +name = "zvariant_derive" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7a4b236063316163b69039f77ce3117accb41a09567fd24c168e43491e521bc" +dependencies = [ + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", + "syn 1.0.109", + "zvariant_utils", +] + +[[package]] +name = "zvariant_utils" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00bedb16a193cc12451873fee2a1bc6550225acece0e36f333e68326c73c8172" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..2346b63 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,492 @@ +[workspace] +members = [ + "crates/activity_indicator", + "crates/anthropic", + "crates/assets", + "crates/assistant", + "crates/assistant2", + "crates/assistant_tooling", + "crates/audio", + "crates/auto_update", + "crates/breadcrumbs", + "crates/call", + "crates/channel", + "crates/cli", + "crates/client", + "crates/clock", + "crates/collab", + "crates/collab_ui", + "crates/collections", + "crates/command_palette", + "crates/command_palette_hooks", + "crates/copilot", + "crates/db", + "crates/diagnostics", + "crates/editor", + "crates/extension", + "crates/extension_api", + "crates/extension_cli", + "crates/extensions_ui", + "crates/feature_flags", + "crates/feedback", + "crates/file_finder", + "crates/file_icons", + "crates/fs", + "crates/fsevent", + "crates/fuzzy", + "crates/git", + "crates/git_hosting_providers", + "crates/go_to_line", + "crates/google_ai", + "crates/gpui", + "crates/gpui_macros", + "crates/headless", + "crates/http", + "crates/image_viewer", + "crates/inline_completion_button", + "crates/install_cli", + "crates/journal", + "crates/language", + "crates/language_selector", + "crates/language_tools", + "crates/languages", + "crates/live_kit_client", + "crates/live_kit_server", + "crates/lsp", + "crates/markdown", + "crates/markdown_preview", + "crates/media", + "crates/menu", + "crates/multi_buffer", + "crates/node_runtime", + "crates/notifications", + "crates/open_ai", + "crates/outline", + "crates/picker", + "crates/prettier", + "crates/project", + "crates/project_panel", + "crates/project_symbols", + "crates/quick_action_bar", + "crates/recent_projects", + "crates/refineable", + "crates/refineable/derive_refineable", + "crates/release_channel", + "crates/dev_server_projects", + "crates/rich_text", + "crates/rope", + "crates/rpc", + "crates/task", + "crates/tasks_ui", + "crates/search", + "crates/semantic_index", + "crates/semantic_version", + "crates/settings", + "crates/snippet", + "crates/sqlez", + "crates/sqlez_macros", + "crates/story", + "crates/storybook", + "crates/sum_tree", + "crates/tab_switcher", + "crates/supermaven", + "crates/supermaven_api", + "crates/terminal", + "crates/terminal_view", + "crates/text", + "crates/theme", + "crates/theme_importer", + "crates/theme_selector", + "crates/telemetry_events", + "crates/time_format", + "crates/ui", + "crates/ui_text_field", + "crates/util", + "crates/vcs_menu", + "crates/vim", + "crates/welcome", + "crates/workspace", + "crates/worktree", + "crates/zed", + "crates/zed_actions", + + "extensions/astro", + "extensions/clojure", + "extensions/csharp", + "extensions/dart", + "extensions/deno", + "extensions/elixir", + "extensions/elm", + "extensions/emmet", + "extensions/erlang", + "extensions/gleam", + "extensions/glsl", + "extensions/haskell", + "extensions/html", + "extensions/lua", + "extensions/ocaml", + "extensions/php", + "extensions/prisma", + "extensions/purescript", + "extensions/ruby", + "extensions/svelte", + "extensions/terraform", + "extensions/toml", + "extensions/uiua", + "extensions/vue", + "extensions/zig", + + "tooling/xtask", +] +default-members = ["crates/zed"] +resolver = "2" + +[workspace.dependencies] +activity_indicator = { path = "crates/activity_indicator" } +ai = { path = "crates/ai" } +anthropic = { path = "crates/anthropic" } +assets = { path = "crates/assets" } +assistant = { path = "crates/assistant" } +assistant2 = { path = "crates/assistant2" } +assistant_tooling = { path = "crates/assistant_tooling" } +audio = { path = "crates/audio" } +auto_update = { path = "crates/auto_update" } +base64 = "0.13" +breadcrumbs = { path = "crates/breadcrumbs" } +call = { path = "crates/call" } +channel = { path = "crates/channel" } +cli = { path = "crates/cli" } +client = { path = "crates/client" } +clock = { path = "crates/clock" } +collab = { path = "crates/collab" } +collab_ui = { path = "crates/collab_ui" } +collections = { path = "crates/collections" } +color = { path = "crates/color" } +command_palette = { path = "crates/command_palette" } +command_palette_hooks = { path = "crates/command_palette_hooks" } +copilot = { path = "crates/copilot" } +db = { path = "crates/db" } +diagnostics = { path = "crates/diagnostics" } +editor = { path = "crates/editor" } +extension = { path = "crates/extension" } +extensions_ui = { path = "crates/extensions_ui" } +feature_flags = { path = "crates/feature_flags" } +feedback = { path = "crates/feedback" } +file_finder = { path = "crates/file_finder" } +file_icons = { path = "crates/file_icons" } +fs = { path = "crates/fs" } +fsevent = { path = "crates/fsevent" } +fuzzy = { path = "crates/fuzzy" } +git = { path = "crates/git" } +git_hosting_providers = { path = "crates/git_hosting_providers" } +go_to_line = { path = "crates/go_to_line" } +google_ai = { path = "crates/google_ai" } +gpui = { path = "crates/gpui" } +gpui_macros = { path = "crates/gpui_macros" } +headless = { path = "crates/headless" } +http = { path = "crates/http" } +install_cli = { path = "crates/install_cli" } +image_viewer = { path = "crates/image_viewer" } +inline_completion_button = { path = "crates/inline_completion_button" } +journal = { path = "crates/journal" } +language = { path = "crates/language" } +language_selector = { path = "crates/language_selector" } +language_tools = { path = "crates/language_tools" } +languages = { path = "crates/languages" } +live_kit_client = { path = "crates/live_kit_client" } +live_kit_server = { path = "crates/live_kit_server" } +lsp = { path = "crates/lsp" } +markdown = { path = "crates/markdown" } +markdown_preview = { path = "crates/markdown_preview" } +media = { path = "crates/media" } +menu = { path = "crates/menu" } +multi_buffer = { path = "crates/multi_buffer" } +node_runtime = { path = "crates/node_runtime" } +notifications = { path = "crates/notifications" } +open_ai = { path = "crates/open_ai" } +outline = { path = "crates/outline" } +picker = { path = "crates/picker" } +plugin = { path = "crates/plugin" } +plugin_macros = { path = "crates/plugin_macros" } +prettier = { path = "crates/prettier" } +project = { path = "crates/project" } +worktree = { path = "crates/worktree" } +project_panel = { path = "crates/project_panel" } +project_symbols = { path = "crates/project_symbols" } +quick_action_bar = { path = "crates/quick_action_bar" } +recent_projects = { path = "crates/recent_projects" } +release_channel = { path = "crates/release_channel" } +dev_server_projects = { path = "crates/dev_server_projects" } +rich_text = { path = "crates/rich_text" } +rope = { path = "crates/rope" } +rpc = { path = "crates/rpc" } +task = { path = "crates/task" } +tasks_ui = { path = "crates/tasks_ui" } +search = { path = "crates/search" } +semantic_index = { path = "crates/semantic_index" } +semantic_version = { path = "crates/semantic_version" } +settings = { path = "crates/settings" } +snippet = { path = "crates/snippet" } +sqlez = { path = "crates/sqlez" } +sqlez_macros = { path = "crates/sqlez_macros" } +supermaven = { path = "crates/supermaven" } +supermaven_api = { path = "crates/supermaven_api" } +story = { path = "crates/story" } +storybook = { path = "crates/storybook" } +sum_tree = { path = "crates/sum_tree" } +tab_switcher = { path = "crates/tab_switcher" } +terminal = { path = "crates/terminal" } +terminal_view = { path = "crates/terminal_view" } +text = { path = "crates/text" } +theme = { path = "crates/theme" } +theme_importer = { path = "crates/theme_importer" } +theme_selector = { path = "crates/theme_selector" } +telemetry_events = { path = "crates/telemetry_events" } +time_format = { path = "crates/time_format" } +ui = { path = "crates/ui" } +ui_text_field = { path = "crates/ui_text_field" } +util = { path = "crates/util" } +vcs_menu = { path = "crates/vcs_menu" } +vim = { path = "crates/vim" } +welcome = { path = "crates/welcome" } +workspace = { path = "crates/workspace" } +zed = { path = "crates/zed" } +zed_actions = { path = "crates/zed_actions" } + +anyhow = "1.0.57" +any_vec = "0.13" +async-compression = { version = "0.4", features = ["gzip", "futures-io"] } +async-fs = "1.6" +async-recursion = "1.0.0" +async-tar = "0.4.2" +async-trait = "0.1" +async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } +bitflags = "2.4.2" +blade-graphics = { git = "https://github.com/kvark/blade", rev = "e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c" } +blade-macros = { git = "https://github.com/kvark/blade", rev = "e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c" } +cap-std = "3.0" +cargo_toml = "0.20" +chrono = { version = "0.4", features = ["serde"] } +clap = { version = "4.4", features = ["derive"] } +clickhouse = { version = "0.11.6" } +ctor = "0.2.6" +signal-hook = "0.3.17" +core-foundation = { version = "0.9.3" } +core-foundation-sys = "0.8.6" +derive_more = "0.99.17" +emojis = "0.6.1" +env_logger = "0.9" +exec = "0.3.1" +fork = "0.1.23" +futures = "0.3" +futures-batch = "0.6.1" +futures-lite = "1.13" +git2 = { version = "0.18", default-features = false } +globset = "0.4" +heed = { git = "https://github.com/meilisearch/heed", rev = "036ac23f73a021894974b9adc815bc95b3e0482a", features = [ + "read-txn-no-tls", +] } +hex = "0.4.3" +ignore = "0.4.22" +indoc = "1" +# We explicitly disable http2 support in isahc. +isahc = { version = "1.7.2", default-features = false, features = [ + "static-curl", + "text-decoding", +] } +itertools = "0.11.0" +lazy_static = "1.4.0" +libc = "0.2" +linkify = "0.10.0" +log = { version = "0.4.16", features = ["kv_unstable_serde"] } +nanoid = "0.4" +nix = "0.28" +once_cell = "1.19.0" +ordered-float = "2.1.1" +palette = { version = "0.7.5", default-features = false, features = ["std"] } +parking_lot = "0.12.1" +profiling = "1" +postage = { version = "0.5", features = ["futures-traits"] } +pretty_assertions = "1.3.0" +prost = "0.9" +prost-build = "0.9" +prost-types = "0.9" +pulldown-cmark = { version = "0.10.0", default-features = false } +rand = "0.8.5" +refineable = { path = "./crates/refineable" } +regex = "1.5" +repair_json = "0.1.0" +rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] } +rust-embed = { version = "8.4", features = ["include-exclude"] } +schemars = "0.8" +semver = "1.0" +serde = { version = "1.0", features = ["derive", "rc"] } +serde_derive = { version = "1.0", features = ["deserialize_in_place"] } +serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } +serde_json_lenient = { version = "0.1", features = [ + "preserve_order", + "raw_value", +] } +serde_repr = "0.1" +sha2 = "0.10" +shellexpand = "2.1.0" +smallvec = { version = "1.6", features = ["union"] } +smol = "1.2" +strum = { version = "0.25.0", features = ["derive"] } +subtle = "2.5.0" +sysinfo = "0.30.7" +tempfile = "3.9.0" +thiserror = "1.0.29" +tiktoken-rs = "0.5.9" +time = { version = "0.3", features = [ + "macros", + "parsing", + "serde", + "serde-well-known", + "formatting", +] } +toml = "0.8" +tokio = { version = "1", features = ["full"] } +tower-http = "0.4.4" +tree-sitter = { version = "0.20", features = ["wasm"] } +tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "7331995b19b8f8aba2d5e26deb51d2195c18bc94" } +tree-sitter-c = "0.20.1" +tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "f44509141e7e483323d2ec178f2d2e6c0fc041c1" } +tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" } +tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" } +tree-sitter-embedded-template = "0.20.0" +tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "b82ab803d887002a0af11f6ce63d72884580bf33" } +tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod" } +tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" } +rustc-demangle = "0.1.23" +tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" } +tree-sitter-html = "0.19.0" +tree-sitter-jsdoc = { git = "https://github.com/tree-sitter/tree-sitter-jsdoc", rev = "6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" } +tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" } +tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" } +tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" } +tree-sitter-python = "0.20.2" +tree-sitter-regex = "0.20.0" +tree-sitter-ruby = "0.20.0" +tree-sitter-rust = "0.20.3" +tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" } +tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" } +unindent = "0.1.7" +unicase = "2.6" +unicode-segmentation = "1.10" +url = "2.2" +uuid = { version = "1.1.2", features = ["v4", "v5"] } +wasmparser = "0.201" +wasm-encoder = "0.201" +wasmtime = { version = "19.0.0", default-features = false, features = [ + "async", + "demangle", + "runtime", + "cranelift", + "component-model", +] } +wasmtime-wasi = "19.0.0" +which = "6.0.0" +wit-component = "0.201" +sys-locale = "0.3.1" + +[workspace.dependencies.windows] +version = "0.56.0" +features = [ + "implement", + "Foundation_Numerics", + "System", + "System_Threading", + "Wdk_System_SystemServices", + "Win32_Globalization", + "Win32_Graphics_Direct2D", + "Win32_Graphics_Direct2D_Common", + "Win32_Graphics_DirectWrite", + "Win32_Graphics_Dwm", + "Win32_Graphics_Dxgi_Common", + "Win32_Graphics_Gdi", + "Win32_Graphics_Imaging", + "Win32_Graphics_Imaging_D2D", + "Win32_Security", + "Win32_Security_Credentials", + "Win32_Storage_FileSystem", + "Win32_System_LibraryLoader", + "Win32_System_Com", + "Win32_System_Com_StructuredStorage", + "Win32_System_DataExchange", + "Win32_System_LibraryLoader", + "Win32_System_Ole", + "Win32_System_SystemInformation", + "Win32_System_SystemServices", + "Win32_System_Threading", + "Win32_System_Time", + "Win32_System_WinRT", + "Win32_UI_Controls", + "Win32_UI_HiDpi", + "Win32_UI_Input_Ime", + "Win32_UI_Input_KeyboardAndMouse", + "Win32_UI_Shell", + "Win32_UI_WindowsAndMessaging", +] + +[patch.crates-io] +tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7b4894ba2ae81b988846676f54c0988d4027ef4f" } +# Workaround for a broken nightly build of gpui: See #7644 and revisit once 0.5.3 is released. +pathfinder_simd = { git = "https://github.com/servo/pathfinder.git", rev = "30419d07660dc11a21e42ef4a7fa329600cff152" } + +[profile.dev] +split-debuginfo = "unpacked" +debug = "limited" + +[profile.dev.package] +taffy = { opt-level = 3 } +cranelift-codegen = { opt-level = 3 } +resvg = { opt-level = 3 } +rustybuzz = { opt-level = 3 } +ttf-parser = { opt-level = 3 } +wasmtime-cranelift = { opt-level = 3 } +wasmtime = { opt-level = 3 } + +[profile.release] +debug = "limited" +lto = "thin" +codegen-units = 1 + +[profile.release.package] +zed = { codegen-units = 16 } + +[workspace.lints.clippy] +dbg_macro = "deny" +todo = "deny" + +# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so +# warning on this rule produces a lot of noise. +single_range_in_vec_init = "allow" + +# These are all of the rules that currently have violations in the Zed +# codebase. +# +# We'll want to drive this list down by either: +# 1. fixing violations of the rule and begin enforcing it +# 2. deciding we want to allow the rule permanently, at which point +# we should codify that separately above. +# +# This list shouldn't be added to; it should only get shorter. +# ============================================================================= + +# There are a bunch of rules currently failing in the `style` group, so +# allow all of those, for now. +style = "allow" + +# Individual rules that have violations in the codebase: +almost_complete_range = "allow" +arc_with_non_send_sync = "allow" +borrowed_box = "allow" +let_underscore_future = "allow" +map_entry = "allow" +non_canonical_partial_ord_impl = "allow" +reversed_empty_ranges = "allow" +type_complexity = "allow" + +[workspace.metadata.cargo-machete] +ignored = ["bindgen", "cbindgen", "prost_build", "serde"] diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..0614ae5 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,31 @@ +# syntax = docker/dockerfile:1.2 + +FROM rust:1.78-bookworm as builder +WORKDIR app +COPY . . + +# Compile collab server +ARG CARGO_PROFILE_RELEASE_PANIC=abort +ARG GITHUB_SHA + +ENV GITHUB_SHA=$GITHUB_SHA +RUN --mount=type=cache,target=./script/node_modules \ + --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=./target \ + cargo build --release --package collab --bin collab + +# Copy collab server binary out of cached directory +RUN --mount=type=cache,target=./target \ + cp /app/target/release/collab /app/collab + +# Copy collab server binary to the runtime image +FROM debian:bookworm-slim as runtime +RUN apt-get update; \ + apt-get install -y --no-install-recommends libcurl4-openssl-dev ca-certificates \ + linux-perf binutils +WORKDIR app +COPY --from=builder /app/collab /app/collab +COPY --from=builder /app/crates/collab/migrations /app/migrations +ENV MIGRATIONS_PATH=/app/migrations +ENTRYPOINT ["/app/collab"] diff --git a/LICENSE-AGPL b/LICENSE-AGPL new file mode 100644 index 0000000..66a5b08 --- /dev/null +++ b/LICENSE-AGPL @@ -0,0 +1,788 @@ +Copyright 2022 - 2024 Zed Industries, Inc. + + + + +This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. +This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. +You should have received a copy of the GNU Affero General Public License along with this program. If not, see . + + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + Preamble + + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + + The precise terms and conditions for copying, distribution and +modification follow. + + + TERMS AND CONDITIONS + + + 0. Definitions. + + + "This License" refers to version 3 of the GNU Affero General Public License. + + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + + A "covered work" means either the unmodified Program or a work based +on the Program. + + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + + 1. Source Code. + + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + + The Corresponding Source for a work in source code form is that +same work. + + + 2. Basic Permissions. + + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + + 4. Conveying Verbatim Copies. + + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + + 5. Conveying Modified Source Versions. + + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + + 6. Conveying Non-Source Forms. + + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + + 7. Additional Terms. + + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + + 8. Termination. + + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + + 9. Acceptance Not Required for Having Copies. + + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + + 10. Automatic Licensing of Downstream Recipients. + + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + + 11. Patents. + + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + + 12. No Surrender of Others' Freedom. + + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + + 13. Remote Network Interaction; Use with the GNU General Public License. + + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + + 14. Revised Versions of this License. + + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + + 15. Disclaimer of Warranty. + + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + + 16. Limitation of Liability. + + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + + 17. Interpretation of Sections 15 and 16. + + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + + END OF TERMS AND CONDITIONS + + + How to Apply These Terms to Your New Programs + + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + + Copyright (C) + + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + + +Also add information on how to contact you by electronic and paper mail. + + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000..f5f68b6 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,222 @@ +Copyright 2022 - 2024 Zed Industries, Inc. + + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + + http://www.apache.org/licenses/LICENSE-2.0 + + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + + +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + + 1. Definitions. + + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + + END OF TERMS AND CONDITIONS diff --git a/LICENSE-GPL b/LICENSE-GPL new file mode 100644 index 0000000..cb82534 --- /dev/null +++ b/LICENSE-GPL @@ -0,0 +1,200 @@ +GNU GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The GNU General Public License is a free, copyleft license for software and other kinds of works. + +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. + +To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. + +The precise terms and conditions for copying, distribution and modification follow. + +TERMS AND CONDITIONS + +0. Definitions. +"This License" refers to version 3 of the GNU General Public License. + +"Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. + +"The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. + +To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. + +A "covered work" means either the unmodified Program or a work based on the Program. + +To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. + +To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. + +1. Source Code. +The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. +A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. + +The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. + +The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. + +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. + +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: +a) The work must carry prominent notices stating that you modified it, and giving a relevant date. +b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". +c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. +d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. + +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: +a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. +b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. +c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. +d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. +e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. + +A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. + +"Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). + +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. + +7. Additional Terms. +"Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: + +a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or +b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or +c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or +d) Limiting the use for publicity purposes of names of licensors or authors of the material; or +e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or +f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. +All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. + +8. Termination. +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. + +9. Acceptance Not Required for Having Copies. +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. +10. Automatic Licensing of Downstream Recipients. +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. +An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. + +11. Patents. +A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". +A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. + +In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. + +A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. + +12. No Surrender of Others' Freedom. +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. +13. Use with the GNU Affero General Public License. +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. +14. Revised Versions of this License. +The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. + +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. + +15. Disclaimer of Warranty. +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. +16. Limitation of Liability. +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. +17. Interpretation of Sections 15 and 16. +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + +Copyright (C) + +This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: + + Copyright (C) +This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. +This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". + +You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . + +The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . diff --git a/Procfile b/Procfile new file mode 100644 index 0000000..c74eeea --- /dev/null +++ b/Procfile @@ -0,0 +1,3 @@ +collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve +livekit: livekit-server --dev +blob_store: ./script/run-local-minio diff --git a/README.md b/README.md new file mode 100644 index 0000000..a5ae33d --- /dev/null +++ b/README.md @@ -0,0 +1,50 @@ +# Zed + +[![CI](https://github.com/zed-industries/zed/actions/workflows/ci.yml/badge.svg)](https://github.com/zed-industries/zed/actions/workflows/ci.yml) + +Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter). + +## Installation + +You can [download](https://zed.dev/download) Zed today for macOS (v10.15+). + +Support for additional platforms is on our [roadmap](https://zed.dev/roadmap): + +- Linux ([tracking issue](https://github.com/zed-industries/zed/issues/7015)) +- Windows ([tracking issue](https://github.com/zed-industries/zed/issues/5394)) +- Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396)) + +For macOS users, you can also install Zed using [Homebrew](https://brew.sh/): + +```sh +brew install --cask zed +``` + +Alternatively, to install the Preview release: + +```sh +brew install --cask zed@preview +``` + +## Developing Zed + +- [Building Zed for macOS](./docs/src/development/macos.md) +- [Building Zed for Linux](./docs/src/development/linux.md) +- [Building Zed for Windows](./docs/src/development/windows.md) +- [Running Collaboration Locally](./docs/src/development/local-collaboration.md) + +## Contributing + +See [CONTRIBUTING.md](./CONTRIBUTING.md) for ways you can contribute to Zed. + +Also... we're hiring! Check out our [jobs](https://zed.dev/jobs) page for open roles. + +## Licensing + +License information for third party dependencies must be correctly provided for CI to pass. + +We use [`cargo-about`](https://github.com/EmbarkStudios/cargo-about) to automatically comply with open source licenses. If CI is failing, check the following: + +- Is it showing a `no license specified` error for a crate you've created? If so, add `publish = false` under `[package]` in your crate's Cargo.toml. +- Is the error `failed to satisfy license requirements` for a dependency? If so, first determine what license the project has and whether this system is sufficient to comply with this license's requirements. If you're unsure, ask a lawyer. Once you've verified that this system is acceptable add the license's SPDX identifier to the `accepted` array in `script/licenses/zed-licenses.toml`. +- Is `cargo-about` unable to find the license for a dependency? If so, add a clarification field at the end of `script/licenses/zed-licenses.toml`, as specified in the [cargo-about book](https://embarkstudios.github.io/cargo-about/cli/generate/config.html#crate-configuration). diff --git a/assets/fonts/zed-mono/zed-mono-extended.ttf b/assets/fonts/zed-mono/zed-mono-extended.ttf new file mode 100644 index 0000000..05b8c70 Binary files /dev/null and b/assets/fonts/zed-mono/zed-mono-extended.ttf differ diff --git a/assets/fonts/zed-mono/zed-mono-extendedbold.ttf b/assets/fonts/zed-mono/zed-mono-extendedbold.ttf new file mode 100644 index 0000000..d5dde1b Binary files /dev/null and b/assets/fonts/zed-mono/zed-mono-extendedbold.ttf differ diff --git a/assets/fonts/zed-mono/zed-mono-extendedbolditalic.ttf b/assets/fonts/zed-mono/zed-mono-extendedbolditalic.ttf new file mode 100644 index 0000000..bcd8c7e Binary files /dev/null and b/assets/fonts/zed-mono/zed-mono-extendedbolditalic.ttf differ diff --git a/assets/fonts/zed-mono/zed-mono-extendeditalic.ttf b/assets/fonts/zed-mono/zed-mono-extendeditalic.ttf new file mode 100644 index 0000000..023c5a8 Binary files /dev/null and b/assets/fonts/zed-mono/zed-mono-extendeditalic.ttf differ diff --git a/assets/fonts/zed-sans/zed-sans-extended.ttf b/assets/fonts/zed-sans/zed-sans-extended.ttf new file mode 100644 index 0000000..07a9685 Binary files /dev/null and b/assets/fonts/zed-sans/zed-sans-extended.ttf differ diff --git a/assets/fonts/zed-sans/zed-sans-extendedbold.ttf b/assets/fonts/zed-sans/zed-sans-extendedbold.ttf new file mode 100644 index 0000000..696c3cd Binary files /dev/null and b/assets/fonts/zed-sans/zed-sans-extendedbold.ttf differ diff --git a/assets/fonts/zed-sans/zed-sans-extendedbolditalic.ttf b/assets/fonts/zed-sans/zed-sans-extendedbolditalic.ttf new file mode 100644 index 0000000..74cb8f7 Binary files /dev/null and b/assets/fonts/zed-sans/zed-sans-extendedbolditalic.ttf differ diff --git a/assets/fonts/zed-sans/zed-sans-extendeditalic.ttf b/assets/fonts/zed-sans/zed-sans-extendeditalic.ttf new file mode 100644 index 0000000..9460e5a Binary files /dev/null and b/assets/fonts/zed-sans/zed-sans-extendeditalic.ttf differ diff --git a/assets/icons/LICENSES b/assets/icons/LICENSES new file mode 100644 index 0000000..7a2fc3b --- /dev/null +++ b/assets/icons/LICENSES @@ -0,0 +1,9 @@ +Lucide License + +ISC License + +Copyright (c) for portions of Lucide are held by Cole Bemis 2013-2022 as part of Feather (MIT). All other copyright (c) for Lucide are held by Lucide Contributors 2022. + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/assets/icons/ai.svg b/assets/icons/ai.svg new file mode 100644 index 0000000..d60396a --- /dev/null +++ b/assets/icons/ai.svg @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assets/icons/arrow_circle.svg b/assets/icons/arrow_circle.svg new file mode 100644 index 0000000..90e352b --- /dev/null +++ b/assets/icons/arrow_circle.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/arrow_down.svg b/assets/icons/arrow_down.svg new file mode 100644 index 0000000..7d78497 --- /dev/null +++ b/assets/icons/arrow_down.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/arrow_left.svg b/assets/icons/arrow_left.svg new file mode 100644 index 0000000..57ee750 --- /dev/null +++ b/assets/icons/arrow_left.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/arrow_right.svg b/assets/icons/arrow_right.svg new file mode 100644 index 0000000..7a5b117 --- /dev/null +++ b/assets/icons/arrow_right.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/arrow_up.svg b/assets/icons/arrow_up.svg new file mode 100644 index 0000000..81dfee8 --- /dev/null +++ b/assets/icons/arrow_up.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/arrow_up_right.svg b/assets/icons/arrow_up_right.svg new file mode 100644 index 0000000..3712b31 --- /dev/null +++ b/assets/icons/arrow_up_right.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/at_sign.svg b/assets/icons/at_sign.svg new file mode 100644 index 0000000..4cf8cd4 --- /dev/null +++ b/assets/icons/at_sign.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/backspace.svg b/assets/icons/backspace.svg new file mode 100644 index 0000000..f7f1cf1 --- /dev/null +++ b/assets/icons/backspace.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/bell.svg b/assets/icons/bell.svg new file mode 100644 index 0000000..f9b2a97 --- /dev/null +++ b/assets/icons/bell.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/bell_dot.svg b/assets/icons/bell_dot.svg new file mode 100644 index 0000000..09a1740 --- /dev/null +++ b/assets/icons/bell_dot.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/bell_off.svg b/assets/icons/bell_off.svg new file mode 100644 index 0000000..98cbd1e --- /dev/null +++ b/assets/icons/bell_off.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/bell_ring.svg b/assets/icons/bell_ring.svg new file mode 100644 index 0000000..e411e75 --- /dev/null +++ b/assets/icons/bell_ring.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/bolt.svg b/assets/icons/bolt.svg new file mode 100644 index 0000000..543e72a --- /dev/null +++ b/assets/icons/bolt.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/caret_down.svg b/assets/icons/caret_down.svg new file mode 100644 index 0000000..ff8b8c3 --- /dev/null +++ b/assets/icons/caret_down.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/caret_up.svg b/assets/icons/caret_up.svg new file mode 100644 index 0000000..53026b8 --- /dev/null +++ b/assets/icons/caret_up.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/case_insensitive.svg b/assets/icons/case_insensitive.svg new file mode 100644 index 0000000..8c943e7 --- /dev/null +++ b/assets/icons/case_insensitive.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/assets/icons/check.svg b/assets/icons/check.svg new file mode 100644 index 0000000..3935268 --- /dev/null +++ b/assets/icons/check.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/check_circle.svg b/assets/icons/check_circle.svg new file mode 100644 index 0000000..b48fe34 --- /dev/null +++ b/assets/icons/check_circle.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/chevron_down.svg b/assets/icons/chevron_down.svg new file mode 100644 index 0000000..b971555 --- /dev/null +++ b/assets/icons/chevron_down.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/chevron_left.svg b/assets/icons/chevron_left.svg new file mode 100644 index 0000000..8e61bee --- /dev/null +++ b/assets/icons/chevron_left.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/chevron_right.svg b/assets/icons/chevron_right.svg new file mode 100644 index 0000000..fcd9d83 --- /dev/null +++ b/assets/icons/chevron_right.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/chevron_up.svg b/assets/icons/chevron_up.svg new file mode 100644 index 0000000..171cdd6 --- /dev/null +++ b/assets/icons/chevron_up.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/circle_check.svg b/assets/icons/circle_check.svg new file mode 100644 index 0000000..adfc8ce --- /dev/null +++ b/assets/icons/circle_check.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/code.svg b/assets/icons/code.svg new file mode 100644 index 0000000..757c5a1 --- /dev/null +++ b/assets/icons/code.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/command.svg b/assets/icons/command.svg new file mode 100644 index 0000000..d38389a --- /dev/null +++ b/assets/icons/command.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/control.svg b/assets/icons/control.svg new file mode 100644 index 0000000..94189dc --- /dev/null +++ b/assets/icons/control.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/conversations.svg b/assets/icons/conversations.svg new file mode 100644 index 0000000..f4ff185 --- /dev/null +++ b/assets/icons/conversations.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/copilot.svg b/assets/icons/copilot.svg new file mode 100644 index 0000000..06dbf17 --- /dev/null +++ b/assets/icons/copilot.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/assets/icons/copilot_disabled.svg b/assets/icons/copilot_disabled.svg new file mode 100644 index 0000000..eba36a2 --- /dev/null +++ b/assets/icons/copilot_disabled.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/assets/icons/copilot_error.svg b/assets/icons/copilot_error.svg new file mode 100644 index 0000000..6069c55 --- /dev/null +++ b/assets/icons/copilot_error.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/copilot_init.svg b/assets/icons/copilot_init.svg new file mode 100644 index 0000000..6cbf63f --- /dev/null +++ b/assets/icons/copilot_init.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/copy.svg b/assets/icons/copy.svg new file mode 100644 index 0000000..8b755e8 --- /dev/null +++ b/assets/icons/copy.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/countdown_timer.svg b/assets/icons/countdown_timer.svg new file mode 100644 index 0000000..b9b7479 --- /dev/null +++ b/assets/icons/countdown_timer.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/dash.svg b/assets/icons/dash.svg new file mode 100644 index 0000000..efff9ea --- /dev/null +++ b/assets/icons/dash.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/delete.svg b/assets/icons/delete.svg new file mode 100644 index 0000000..a7edbb6 --- /dev/null +++ b/assets/icons/delete.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/desktop.svg b/assets/icons/desktop.svg new file mode 100644 index 0000000..ad252e6 --- /dev/null +++ b/assets/icons/desktop.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/disconnected.svg b/assets/icons/disconnected.svg new file mode 100644 index 0000000..37d0ee9 --- /dev/null +++ b/assets/icons/disconnected.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/download.svg b/assets/icons/download.svg new file mode 100644 index 0000000..bcf66df --- /dev/null +++ b/assets/icons/download.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/icons/ellipsis.svg b/assets/icons/ellipsis.svg new file mode 100644 index 0000000..1858c65 --- /dev/null +++ b/assets/icons/ellipsis.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/error.svg b/assets/icons/error.svg new file mode 100644 index 0000000..593629b --- /dev/null +++ b/assets/icons/error.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/escape.svg b/assets/icons/escape.svg new file mode 100644 index 0000000..00c772a --- /dev/null +++ b/assets/icons/escape.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/exit.svg b/assets/icons/exit.svg new file mode 100644 index 0000000..2cc6ce1 --- /dev/null +++ b/assets/icons/exit.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/expand_vertical.svg b/assets/icons/expand_vertical.svg new file mode 100644 index 0000000..e278911 --- /dev/null +++ b/assets/icons/expand_vertical.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/external_link.svg b/assets/icons/external_link.svg new file mode 100644 index 0000000..561f012 --- /dev/null +++ b/assets/icons/external_link.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/feedback.svg b/assets/icons/feedback.svg new file mode 100644 index 0000000..0f5e95f --- /dev/null +++ b/assets/icons/feedback.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file.svg b/assets/icons/file.svg new file mode 100644 index 0000000..5f256b4 --- /dev/null +++ b/assets/icons/file.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/file_icons/ai.svg b/assets/icons/file_icons/ai.svg new file mode 100644 index 0000000..d60396a --- /dev/null +++ b/assets/icons/file_icons/ai.svg @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assets/icons/file_icons/archive.svg b/assets/icons/file_icons/archive.svg new file mode 100644 index 0000000..fd37801 --- /dev/null +++ b/assets/icons/file_icons/archive.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/astro.svg b/assets/icons/file_icons/astro.svg new file mode 100644 index 0000000..0b95d64 --- /dev/null +++ b/assets/icons/file_icons/astro.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/audio.svg b/assets/icons/file_icons/audio.svg new file mode 100644 index 0000000..5152efb --- /dev/null +++ b/assets/icons/file_icons/audio.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/book.svg b/assets/icons/file_icons/book.svg new file mode 100644 index 0000000..3b11995 --- /dev/null +++ b/assets/icons/file_icons/book.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/bun.svg b/assets/icons/file_icons/bun.svg new file mode 100644 index 0000000..48af8b3 --- /dev/null +++ b/assets/icons/file_icons/bun.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/assets/icons/file_icons/c.svg b/assets/icons/file_icons/c.svg new file mode 100644 index 0000000..dab784f --- /dev/null +++ b/assets/icons/file_icons/c.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/icons/file_icons/camera.svg b/assets/icons/file_icons/camera.svg new file mode 100644 index 0000000..b040935 --- /dev/null +++ b/assets/icons/file_icons/camera.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/chevron_down.svg b/assets/icons/file_icons/chevron_down.svg new file mode 100644 index 0000000..9e60e40 --- /dev/null +++ b/assets/icons/file_icons/chevron_down.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/chevron_left.svg b/assets/icons/file_icons/chevron_left.svg new file mode 100644 index 0000000..a2aa9ad --- /dev/null +++ b/assets/icons/file_icons/chevron_left.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/chevron_right.svg b/assets/icons/file_icons/chevron_right.svg new file mode 100644 index 0000000..06608c9 --- /dev/null +++ b/assets/icons/file_icons/chevron_right.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/chevron_up.svg b/assets/icons/file_icons/chevron_up.svg new file mode 100644 index 0000000..fd3d5e4 --- /dev/null +++ b/assets/icons/file_icons/chevron_up.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/code.svg b/assets/icons/file_icons/code.svg new file mode 100644 index 0000000..5f012f8 --- /dev/null +++ b/assets/icons/file_icons/code.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/coffeescript.svg b/assets/icons/file_icons/coffeescript.svg new file mode 100644 index 0000000..fc49df6 --- /dev/null +++ b/assets/icons/file_icons/coffeescript.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/conversations.svg b/assets/icons/file_icons/conversations.svg new file mode 100644 index 0000000..cef7646 --- /dev/null +++ b/assets/icons/file_icons/conversations.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/cpp.svg b/assets/icons/file_icons/cpp.svg new file mode 100644 index 0000000..e3385c1 --- /dev/null +++ b/assets/icons/file_icons/cpp.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/icons/file_icons/css.svg b/assets/icons/file_icons/css.svg new file mode 100644 index 0000000..f0f78c3 --- /dev/null +++ b/assets/icons/file_icons/css.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/dart.svg b/assets/icons/file_icons/dart.svg new file mode 100644 index 0000000..fd3ab01 --- /dev/null +++ b/assets/icons/file_icons/dart.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/database.svg b/assets/icons/file_icons/database.svg new file mode 100644 index 0000000..10fbdcb --- /dev/null +++ b/assets/icons/file_icons/database.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/docker.svg b/assets/icons/file_icons/docker.svg new file mode 100644 index 0000000..7c9cb3d --- /dev/null +++ b/assets/icons/file_icons/docker.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/assets/icons/file_icons/elixir.svg b/assets/icons/file_icons/elixir.svg new file mode 100644 index 0000000..fdfd2b0 --- /dev/null +++ b/assets/icons/file_icons/elixir.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/elm.svg b/assets/icons/file_icons/elm.svg new file mode 100644 index 0000000..ff63582 --- /dev/null +++ b/assets/icons/file_icons/elm.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/assets/icons/file_icons/erlang.svg b/assets/icons/file_icons/erlang.svg new file mode 100644 index 0000000..9c937d3 --- /dev/null +++ b/assets/icons/file_icons/erlang.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/eslint.svg b/assets/icons/file_icons/eslint.svg new file mode 100644 index 0000000..0f42abe --- /dev/null +++ b/assets/icons/file_icons/eslint.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/file.svg b/assets/icons/file_icons/file.svg new file mode 100644 index 0000000..3c72bd3 --- /dev/null +++ b/assets/icons/file_icons/file.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json new file mode 100644 index 0000000..ad8d25a --- /dev/null +++ b/assets/icons/file_icons/file_types.json @@ -0,0 +1,357 @@ +{ + "stems": { + "Podfile": "ruby", + "Procfile": "heroku", + "Dockerfile": "docker" + }, + "suffixes": { + "astro": "astro", + "Emakefile": "erlang", + "aac": "audio", + "accdb": "storage", + "app.src": "erlang", + "avi": "video", + "avif": "image", + "bak": "backup", + "bash": "terminal", + "bash_aliases": "terminal", + "bash_logout": "terminal", + "bash_profile": "terminal", + "bashrc": "terminal", + "bmp": "image", + "c": "c", + "cc": "cpp", + "cjs": "javascript", + "conf": "settings", + "cpp": "cpp", + "css": "css", + "csv": "storage", + "cts": "typescript", + "coffee": "coffeescript", + "dart": "dart", + "dat": "storage", + "db": "storage", + "dbf": "storage", + "dll": "storage", + "doc": "document", + "docx": "document", + "eex": "elixir", + "elm": "elm", + "erl": "erlang", + "escript": "erlang", + "eslintrc": "eslint", + "eslintrc.js": "eslint", + "eslintrc.json": "eslint", + "ex": "elixir", + "exs": "elixir", + "fish": "terminal", + "flac": "audio", + "fmp": "storage", + "fp7": "storage", + "frm": "storage", + "fs": "fsharp", + "gdb": "storage", + "gif": "image", + "gitattributes": "vcs", + "gitignore": "vcs", + "gitkeep": "vcs", + "gitmodules": "vcs", + "go": "go", + "graphql": "graphql", + "h": "c", + "hpp": "cpp", + "handlebars": "code", + "hbs": "template", + "heex": "elixir", + "heif": "image", + "heic": "image", + "hrl": "erlang", + "hs": "haskell", + "htm": "template", + "html": "template", + "ib": "storage", + "ico": "image", + "ini": "settings", + "j2k": "image", + "java": "java", + "jfif": "image", + "jp2": "image", + "jpeg": "image", + "jpg": "image", + "js": "javascript", + "jsx": "react", + "json": "storage", + "jsonc": "storage", + "jxl": "image", + "kt": "kotlin", + "ldf": "storage", + "lock": "lock", + "lockb": "bun", + "log": "log", + "lua": "lua", + "m4a": "audio", + "m4v": "video", + "md": "document", + "mdb": "storage", + "mdf": "storage", + "mdx": "document", + "metadata": "code", + "mkv": "video", + "mjs": "javascript", + "mka": "audio", + "ml": "ocaml", + "mli": "ocaml", + "mov": "video", + "mp3": "audio", + "mp4": "video", + "mts": "typescript", + "myd": "storage", + "myi": "storage", + "nu": "terminal", + "nim": "nim", + "odp": "document", + "ods": "document", + "odt": "document", + "ogg": "audio", + "opus": "audio", + "otf": "font", + "pdb": "storage", + "pdf": "document", + "php": "php", + "plist": "template", + "png": "image", + "ppt": "document", + "pptx": "document", + "prettierignore": "prettier", + "prettierrc": "prettier", + "prisma": "prisma", + "profile": "terminal", + "ps1": "terminal", + "psd": "image", + "py": "python", + "qoi": "image", + "rb": "ruby", + "rebar.config": "erlang", + "rkt": "code", + "rs": "rust", + "r": "r", + "rtf": "document", + "sav": "storage", + "scm": "code", + "sdf": "storage", + "sh": "terminal", + "sqlite": "storage", + "svelte": "template", + "svg": "image", + "sc": "scala", + "scala": "scala", + "sql": "storage", + "swift": "swift", + "tf": "terraform", + "tfvars": "terraform", + "tiff": "image", + "toml": "toml", + "ts": "typescript", + "tsv": "storage", + "ttf": "font", + "tsx": "react", + "txt": "document", + "tcl": "tcl", + "vue": "vue", + "wav": "audio", + "webm": "video", + "webp": "image", + "wma": "audio", + "wmv": "video", + "woff": "font", + "woff2": "font", + "wv": "audio", + "xls": "document", + "xlsx": "document", + "xml": "template", + "xrl": "erlang", + "yaml": "settings", + "yml": "settings", + "yrl": "erlang", + "zlogin": "terminal", + "zsh": "terminal", + "zsh_aliases": "terminal", + "zsh_histfile": "terminal", + "zsh_profile": "terminal", + "zshenv": "terminal", + "zshrc": "terminal" + }, + "types": { + "astro": { + "icon": "icons/file_icons/astro.svg" + }, + "audio": { + "icon": "icons/file_icons/audio.svg" + }, + "code": { + "icon": "icons/file_icons/code.svg" + }, + "collapsed_chevron": { + "icon": "icons/file_icons/chevron_right.svg" + }, + "collapsed_folder": { + "icon": "icons/file_icons/folder.svg" + }, + "c": { + "icon": "icons/file_icons/c.svg" + }, + "cpp": { + "icon": "icons/file_icons/cpp.svg" + }, + "css": { + "icon": "icons/file_icons/css.svg" + }, + "coffeescript": { + "icon": "icons/file_icons/coffeescript.svg" + }, + "dart": { + "icon": "icons/file_icons/dart.svg" + }, + "default": { + "icon": "icons/file_icons/file.svg" + }, + "docker": { + "icon": "icons/file_icons/docker.svg" + }, + "document": { + "icon": "icons/file_icons/book.svg" + }, + "elixir": { + "icon": "icons/file_icons/elixir.svg" + }, + "elm": { + "icon": "icons/file_icons/elm.svg" + }, + "erlang": { + "icon": "icons/file_icons/erlang.svg" + }, + "eslint": { + "icon": "icons/file_icons/eslint.svg" + }, + "expanded_chevron": { + "icon": "icons/file_icons/chevron_down.svg" + }, + "expanded_folder": { + "icon": "icons/file_icons/folder_open.svg" + }, + "font": { + "icon": "icons/file_icons/font.svg" + }, + "fsharp": { + "icon": "icons/file_icons/fsharp.svg" + }, + "haskell": { + "icon": "icons/file_icons/haskell.svg" + }, + "heroku": { + "icon": "icons/file_icons/heroku.svg" + }, + "go": { + "icon": "icons/file_icons/go.svg" + }, + "graphql": { + "icon": "icons/file_icons/graphql.svg" + }, + "image": { + "icon": "icons/file_icons/image.svg" + }, + "java": { + "icon": "icons/file_icons/java.svg" + }, + "javascript": { + "icon": "icons/file_icons/javascript.svg" + }, + "kotlin": { + "icon": "icons/file_icons/kotlin.svg" + }, + "lock": { + "icon": "icons/file_icons/lock.svg" + }, + "bun": { + "icon": "icons/file_icons/bun.svg" + }, + "log": { + "icon": "icons/file_icons/info.svg" + }, + "lua": { + "icon": "icons/file_icons/lua.svg" + }, + "ocaml": { + "icon": "icons/file_icons/ocaml.svg" + }, + "nim": { + "icon": "icons/file_icons/nim.svg" + }, + "phoenix": { + "icon": "icons/file_icons/phoenix.svg" + }, + "php": { + "icon": "icons/file_icons/php.svg" + }, + "prettier": { + "icon": "icons/file_icons/prettier.svg" + }, + "prisma": { + "icon": "icons/file_icons/prisma.svg" + }, + "python": { + "icon": "icons/file_icons/python.svg" + }, + "r": { + "icon": "icons/file_icons/r.svg" + }, + "react": { + "icon": "icons/file_icons/react.svg" + }, + "ruby": { + "icon": "icons/file_icons/ruby.svg" + }, + "rust": { + "icon": "icons/file_icons/rust.svg" + }, + "settings": { + "icon": "icons/file_icons/settings.svg" + }, + "storage": { + "icon": "icons/file_icons/database.svg" + }, + "scala": { + "icon": "icons/file_icons/scala.svg" + }, + "swift": { + "icon": "icons/file_icons/swift.svg" + }, + "template": { + "icon": "icons/file_icons/html.svg" + }, + "terraform": { + "icon": "icons/file_icons/terraform.svg" + }, + "terminal": { + "icon": "icons/file_icons/terminal.svg" + }, + "toml": { + "icon": "icons/file_icons/toml.svg" + }, + "typescript": { + "icon": "icons/file_icons/typescript.svg" + }, + "tcl": { + "icon": "icons/file_icons/tcl.svg" + }, + "vcs": { + "icon": "icons/file_icons/git.svg" + }, + "video": { + "icon": "icons/file_icons/video.svg" + }, + "vue": { + "icon": "icons/file_icons/vue.svg" + } + } +} diff --git a/assets/icons/file_icons/folder.svg b/assets/icons/file_icons/folder.svg new file mode 100644 index 0000000..a76dc63 --- /dev/null +++ b/assets/icons/file_icons/folder.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/folder_open.svg b/assets/icons/file_icons/folder_open.svg new file mode 100644 index 0000000..ef37f55 --- /dev/null +++ b/assets/icons/file_icons/folder_open.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/font.svg b/assets/icons/file_icons/font.svg new file mode 100644 index 0000000..4cb01a2 --- /dev/null +++ b/assets/icons/file_icons/font.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/fsharp.svg b/assets/icons/file_icons/fsharp.svg new file mode 100644 index 0000000..9dd7c15 --- /dev/null +++ b/assets/icons/file_icons/fsharp.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/git.svg b/assets/icons/file_icons/git.svg new file mode 100644 index 0000000..197db2e --- /dev/null +++ b/assets/icons/file_icons/git.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/go.svg b/assets/icons/file_icons/go.svg new file mode 100644 index 0000000..756dd2c --- /dev/null +++ b/assets/icons/file_icons/go.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/graphql.svg b/assets/icons/file_icons/graphql.svg new file mode 100644 index 0000000..9688472 --- /dev/null +++ b/assets/icons/file_icons/graphql.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/icons/file_icons/hash.svg b/assets/icons/file_icons/hash.svg new file mode 100644 index 0000000..2241904 --- /dev/null +++ b/assets/icons/file_icons/hash.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/haskell.svg b/assets/icons/file_icons/haskell.svg new file mode 100644 index 0000000..f7519dc --- /dev/null +++ b/assets/icons/file_icons/haskell.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/heroku.svg b/assets/icons/file_icons/heroku.svg new file mode 100644 index 0000000..826a886 --- /dev/null +++ b/assets/icons/file_icons/heroku.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/html.svg b/assets/icons/file_icons/html.svg new file mode 100644 index 0000000..41f254d --- /dev/null +++ b/assets/icons/file_icons/html.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/image.svg b/assets/icons/file_icons/image.svg new file mode 100644 index 0000000..75e64c0 --- /dev/null +++ b/assets/icons/file_icons/image.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/file_icons/info.svg b/assets/icons/file_icons/info.svg new file mode 100644 index 0000000..5d9bef7 --- /dev/null +++ b/assets/icons/file_icons/info.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/java.svg b/assets/icons/file_icons/java.svg new file mode 100644 index 0000000..63ce6e7 --- /dev/null +++ b/assets/icons/file_icons/java.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/file_icons/javascript.svg b/assets/icons/file_icons/javascript.svg new file mode 100644 index 0000000..c2aa1cf --- /dev/null +++ b/assets/icons/file_icons/javascript.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/kotlin.svg b/assets/icons/file_icons/kotlin.svg new file mode 100644 index 0000000..5d70c99 --- /dev/null +++ b/assets/icons/file_icons/kotlin.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/lock.svg b/assets/icons/file_icons/lock.svg new file mode 100644 index 0000000..6bfef24 --- /dev/null +++ b/assets/icons/file_icons/lock.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/lua.svg b/assets/icons/file_icons/lua.svg new file mode 100644 index 0000000..6035c43 --- /dev/null +++ b/assets/icons/file_icons/lua.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/magnifying_glass.svg b/assets/icons/file_icons/magnifying_glass.svg new file mode 100644 index 0000000..75c3e76 --- /dev/null +++ b/assets/icons/file_icons/magnifying_glass.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/nim.svg b/assets/icons/file_icons/nim.svg new file mode 100644 index 0000000..1750bbf --- /dev/null +++ b/assets/icons/file_icons/nim.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/notebook.svg b/assets/icons/file_icons/notebook.svg new file mode 100644 index 0000000..b72ebc3 --- /dev/null +++ b/assets/icons/file_icons/notebook.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/assets/icons/file_icons/ocaml.svg b/assets/icons/file_icons/ocaml.svg new file mode 100644 index 0000000..7d59015 --- /dev/null +++ b/assets/icons/file_icons/ocaml.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/package.svg b/assets/icons/file_icons/package.svg new file mode 100644 index 0000000..12889e8 --- /dev/null +++ b/assets/icons/file_icons/package.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/phoenix.svg b/assets/icons/file_icons/phoenix.svg new file mode 100644 index 0000000..b61b8be --- /dev/null +++ b/assets/icons/file_icons/phoenix.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/php.svg b/assets/icons/file_icons/php.svg new file mode 100644 index 0000000..2f26ad7 --- /dev/null +++ b/assets/icons/file_icons/php.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/plus.svg b/assets/icons/file_icons/plus.svg new file mode 100644 index 0000000..f343d5d --- /dev/null +++ b/assets/icons/file_icons/plus.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/prettier.svg b/assets/icons/file_icons/prettier.svg new file mode 100644 index 0000000..835bd3a --- /dev/null +++ b/assets/icons/file_icons/prettier.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/assets/icons/file_icons/prisma.svg b/assets/icons/file_icons/prisma.svg new file mode 100644 index 0000000..2c7349d --- /dev/null +++ b/assets/icons/file_icons/prisma.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/project.svg b/assets/icons/file_icons/project.svg new file mode 100644 index 0000000..86a15d4 --- /dev/null +++ b/assets/icons/file_icons/project.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/python.svg b/assets/icons/file_icons/python.svg new file mode 100644 index 0000000..de904d8 --- /dev/null +++ b/assets/icons/file_icons/python.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/r.svg b/assets/icons/file_icons/r.svg new file mode 100644 index 0000000..903b051 --- /dev/null +++ b/assets/icons/file_icons/r.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/react.svg b/assets/icons/file_icons/react.svg new file mode 100644 index 0000000..c4c9238 --- /dev/null +++ b/assets/icons/file_icons/react.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/replace.svg b/assets/icons/file_icons/replace.svg new file mode 100644 index 0000000..837cb23 --- /dev/null +++ b/assets/icons/file_icons/replace.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/assets/icons/file_icons/replace_all.svg b/assets/icons/file_icons/replace_all.svg new file mode 100644 index 0000000..d3cf503 --- /dev/null +++ b/assets/icons/file_icons/replace_all.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/replace_next.svg b/assets/icons/file_icons/replace_next.svg new file mode 100644 index 0000000..72511be --- /dev/null +++ b/assets/icons/file_icons/replace_next.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/ruby.svg b/assets/icons/file_icons/ruby.svg new file mode 100644 index 0000000..cd30f83 --- /dev/null +++ b/assets/icons/file_icons/ruby.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/file_icons/rust.svg b/assets/icons/file_icons/rust.svg new file mode 100644 index 0000000..5db7536 --- /dev/null +++ b/assets/icons/file_icons/rust.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/scala.svg b/assets/icons/file_icons/scala.svg new file mode 100644 index 0000000..9e89d1f --- /dev/null +++ b/assets/icons/file_icons/scala.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/file_icons/settings.svg b/assets/icons/file_icons/settings.svg new file mode 100644 index 0000000..081d25b --- /dev/null +++ b/assets/icons/file_icons/settings.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/swift.svg b/assets/icons/file_icons/swift.svg new file mode 100644 index 0000000..69745f0 --- /dev/null +++ b/assets/icons/file_icons/swift.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/tcl.svg b/assets/icons/file_icons/tcl.svg new file mode 100644 index 0000000..bb15b0f --- /dev/null +++ b/assets/icons/file_icons/tcl.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/terminal.svg b/assets/icons/file_icons/terminal.svg new file mode 100644 index 0000000..d3742fa --- /dev/null +++ b/assets/icons/file_icons/terminal.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/terraform.svg b/assets/icons/file_icons/terraform.svg new file mode 100644 index 0000000..47bdc0f --- /dev/null +++ b/assets/icons/file_icons/terraform.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/toml.svg b/assets/icons/file_icons/toml.svg new file mode 100644 index 0000000..9ab78af --- /dev/null +++ b/assets/icons/file_icons/toml.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/typescript.svg b/assets/icons/file_icons/typescript.svg new file mode 100644 index 0000000..e317743 --- /dev/null +++ b/assets/icons/file_icons/typescript.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/file_icons/video.svg b/assets/icons/file_icons/video.svg new file mode 100644 index 0000000..b96e359 --- /dev/null +++ b/assets/icons/file_icons/video.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_icons/vue.svg b/assets/icons/file_icons/vue.svg new file mode 100644 index 0000000..1cbe08d --- /dev/null +++ b/assets/icons/file_icons/vue.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/filter.svg b/assets/icons/filter.svg new file mode 100644 index 0000000..80ce656 --- /dev/null +++ b/assets/icons/filter.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/github.svg b/assets/icons/github.svg new file mode 100644 index 0000000..28148b9 --- /dev/null +++ b/assets/icons/github.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/hash.svg b/assets/icons/hash.svg new file mode 100644 index 0000000..f685245 --- /dev/null +++ b/assets/icons/hash.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/history_rerun.svg b/assets/icons/history_rerun.svg new file mode 100644 index 0000000..530465f --- /dev/null +++ b/assets/icons/history_rerun.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/indicator.svg b/assets/icons/indicator.svg new file mode 100644 index 0000000..40f9151 --- /dev/null +++ b/assets/icons/indicator.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/indicator_x.svg b/assets/icons/indicator_x.svg new file mode 100644 index 0000000..d812c40 --- /dev/null +++ b/assets/icons/indicator_x.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/inlay_hint.svg b/assets/icons/inlay_hint.svg new file mode 100644 index 0000000..c8e6bb2 --- /dev/null +++ b/assets/icons/inlay_hint.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/library.svg b/assets/icons/library.svg new file mode 100644 index 0000000..95f8c71 --- /dev/null +++ b/assets/icons/library.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/link.svg b/assets/icons/link.svg new file mode 100644 index 0000000..4925bd8 --- /dev/null +++ b/assets/icons/link.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/logo_96.svg b/assets/icons/logo_96.svg new file mode 100644 index 0000000..dc98bb8 --- /dev/null +++ b/assets/icons/logo_96.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/magic_wand.svg b/assets/icons/magic_wand.svg new file mode 100644 index 0000000..cd21947 --- /dev/null +++ b/assets/icons/magic_wand.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/icons/magnifying_glass.svg b/assets/icons/magnifying_glass.svg new file mode 100644 index 0000000..0b539ad --- /dev/null +++ b/assets/icons/magnifying_glass.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/mail_open.svg b/assets/icons/mail_open.svg new file mode 100644 index 0000000..b857037 --- /dev/null +++ b/assets/icons/mail_open.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/maximize.svg b/assets/icons/maximize.svg new file mode 100644 index 0000000..b3504b5 --- /dev/null +++ b/assets/icons/maximize.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/menu.svg b/assets/icons/menu.svg new file mode 100644 index 0000000..6598697 --- /dev/null +++ b/assets/icons/menu.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/mic.svg b/assets/icons/mic.svg new file mode 100644 index 0000000..01f4c9b --- /dev/null +++ b/assets/icons/mic.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/mic_mute.svg b/assets/icons/mic_mute.svg new file mode 100644 index 0000000..fe5f820 --- /dev/null +++ b/assets/icons/mic_mute.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/minimize.svg b/assets/icons/minimize.svg new file mode 100644 index 0000000..0451233 --- /dev/null +++ b/assets/icons/minimize.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/option.svg b/assets/icons/option.svg new file mode 100644 index 0000000..9d54a6f --- /dev/null +++ b/assets/icons/option.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/page_down.svg b/assets/icons/page_down.svg new file mode 100644 index 0000000..765f36b --- /dev/null +++ b/assets/icons/page_down.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/page_up.svg b/assets/icons/page_up.svg new file mode 100644 index 0000000..f555165 --- /dev/null +++ b/assets/icons/page_up.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/assets/icons/pencil.svg b/assets/icons/pencil.svg new file mode 100644 index 0000000..d90dcda --- /dev/null +++ b/assets/icons/pencil.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/person.svg b/assets/icons/person.svg new file mode 100644 index 0000000..f613347 --- /dev/null +++ b/assets/icons/person.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/play.svg b/assets/icons/play.svg new file mode 100644 index 0000000..2fc2a23 --- /dev/null +++ b/assets/icons/play.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/plus.svg b/assets/icons/plus.svg new file mode 100644 index 0000000..57ce902 --- /dev/null +++ b/assets/icons/plus.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/project.svg b/assets/icons/project.svg new file mode 100644 index 0000000..4c921b1 --- /dev/null +++ b/assets/icons/project.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/public.svg b/assets/icons/public.svg new file mode 100644 index 0000000..38278cd --- /dev/null +++ b/assets/icons/public.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/pull_request.svg b/assets/icons/pull_request.svg new file mode 100644 index 0000000..150a532 --- /dev/null +++ b/assets/icons/pull_request.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/quote.svg b/assets/icons/quote.svg new file mode 100644 index 0000000..b970db1 --- /dev/null +++ b/assets/icons/quote.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/regex.svg b/assets/icons/regex.svg new file mode 100644 index 0000000..1b24398 --- /dev/null +++ b/assets/icons/regex.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/replace.svg b/assets/icons/replace.svg new file mode 100644 index 0000000..837cb23 --- /dev/null +++ b/assets/icons/replace.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/assets/icons/replace_all.svg b/assets/icons/replace_all.svg new file mode 100644 index 0000000..d3cf503 --- /dev/null +++ b/assets/icons/replace_all.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/replace_next.svg b/assets/icons/replace_next.svg new file mode 100644 index 0000000..72511be --- /dev/null +++ b/assets/icons/replace_next.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/reply_arrow_right.svg b/assets/icons/reply_arrow_right.svg new file mode 100644 index 0000000..d8321e8 --- /dev/null +++ b/assets/icons/reply_arrow_right.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/return.svg b/assets/icons/return.svg new file mode 100644 index 0000000..16cfeed --- /dev/null +++ b/assets/icons/return.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/select_all.svg b/assets/icons/select_all.svg new file mode 100644 index 0000000..78c3ee6 --- /dev/null +++ b/assets/icons/select_all.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/server.svg b/assets/icons/server.svg new file mode 100644 index 0000000..a8b6ad9 --- /dev/null +++ b/assets/icons/server.svg @@ -0,0 +1,16 @@ + + + + + + diff --git a/assets/icons/shift.svg b/assets/icons/shift.svg new file mode 100644 index 0000000..0232114 --- /dev/null +++ b/assets/icons/shift.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/sliders.svg b/assets/icons/sliders.svg new file mode 100644 index 0000000..33e50b0 --- /dev/null +++ b/assets/icons/sliders.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/snip.svg b/assets/icons/snip.svg new file mode 100644 index 0000000..03ae4ce --- /dev/null +++ b/assets/icons/snip.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/space.svg b/assets/icons/space.svg new file mode 100644 index 0000000..63718fb --- /dev/null +++ b/assets/icons/space.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/speaker_loud.svg b/assets/icons/speaker_loud.svg new file mode 100644 index 0000000..68982ee --- /dev/null +++ b/assets/icons/speaker_loud.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/speaker_off.svg b/assets/icons/speaker_off.svg new file mode 100644 index 0000000..f60c35d --- /dev/null +++ b/assets/icons/speaker_off.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/spinner.svg b/assets/icons/spinner.svg new file mode 100644 index 0000000..4f4034a --- /dev/null +++ b/assets/icons/spinner.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/assets/icons/split.svg b/assets/icons/split.svg new file mode 100644 index 0000000..4c13146 --- /dev/null +++ b/assets/icons/split.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/stop_sharing.svg b/assets/icons/stop_sharing.svg new file mode 100644 index 0000000..b0f06f6 --- /dev/null +++ b/assets/icons/stop_sharing.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/strikethrough.svg b/assets/icons/strikethrough.svg new file mode 100644 index 0000000..d7d0905 --- /dev/null +++ b/assets/icons/strikethrough.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/supermaven.svg b/assets/icons/supermaven.svg new file mode 100644 index 0000000..19837fb --- /dev/null +++ b/assets/icons/supermaven.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/assets/icons/supermaven_disabled.svg b/assets/icons/supermaven_disabled.svg new file mode 100644 index 0000000..39ff8a6 --- /dev/null +++ b/assets/icons/supermaven_disabled.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/assets/icons/supermaven_error.svg b/assets/icons/supermaven_error.svg new file mode 100644 index 0000000..669322b --- /dev/null +++ b/assets/icons/supermaven_error.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/assets/icons/supermaven_init.svg b/assets/icons/supermaven_init.svg new file mode 100644 index 0000000..b919d55 --- /dev/null +++ b/assets/icons/supermaven_init.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/assets/icons/tab.svg b/assets/icons/tab.svg new file mode 100644 index 0000000..49a3536 --- /dev/null +++ b/assets/icons/tab.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/terminal.svg b/assets/icons/terminal.svg new file mode 100644 index 0000000..d3742fa --- /dev/null +++ b/assets/icons/terminal.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/trash.svg b/assets/icons/trash.svg new file mode 100644 index 0000000..b71035b --- /dev/null +++ b/assets/icons/trash.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/update.svg b/assets/icons/update.svg new file mode 100644 index 0000000..b529b2b --- /dev/null +++ b/assets/icons/update.svg @@ -0,0 +1,8 @@ + + + diff --git a/assets/icons/user_group_16.svg b/assets/icons/user_group_16.svg new file mode 100644 index 0000000..aa99277 --- /dev/null +++ b/assets/icons/user_group_16.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/warning.svg b/assets/icons/warning.svg new file mode 100644 index 0000000..c48a575 --- /dev/null +++ b/assets/icons/warning.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/word_search.svg b/assets/icons/word_search.svg new file mode 100644 index 0000000..beca4cb --- /dev/null +++ b/assets/icons/word_search.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/x.svg b/assets/icons/x.svg new file mode 100644 index 0000000..31c5aa3 --- /dev/null +++ b/assets/icons/x.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/zed_assistant.svg b/assets/icons/zed_assistant.svg new file mode 100644 index 0000000..165ce74 --- /dev/null +++ b/assets/icons/zed_assistant.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/zed_x_copilot.svg b/assets/icons/zed_x_copilot.svg new file mode 100644 index 0000000..d024678 --- /dev/null +++ b/assets/icons/zed_x_copilot.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/assets/keymaps/atom.json b/assets/keymaps/atom.json new file mode 100644 index 0000000..9184429 --- /dev/null +++ b/assets/keymaps/atom.json @@ -0,0 +1,80 @@ +[ + { + "bindings": { + "cmd-k cmd-p": "workspace::ActivatePreviousPane", + "cmd-k cmd-n": "workspace::ActivateNextPane" + } + }, + { + "context": "Editor", + "bindings": { + "cmd-b": "editor::GoToDefinition", + "alt-cmd-b": "editor::GoToDefinitionSplit", + "cmd-<": "editor::ScrollCursorCenter", + "cmd-g": [ + "editor::SelectNext", + { + "replace_newest": true + } + ], + "ctrl-cmd-g": [ + "editor::SelectPrevious", + { + "replace_newest": true + } + ], + "ctrl-shift-down": "editor::AddSelectionBelow", + "ctrl-shift-up": "editor::AddSelectionAbove", + "cmd-shift-backspace": "editor::DeleteToBeginningOfLine" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "cmd-r": "outline::Toggle" + } + }, + { + "context": "BufferSearchBar", + "bindings": { + "cmd-f3": "search::SelectNextMatch", + "cmd-shift-f3": "search::SelectPrevMatch" + } + }, + { + "context": "Workspace", + "bindings": { + "cmd-\\": "workspace::ToggleLeftDock", + "cmd-k cmd-b": "workspace::ToggleLeftDock", + "cmd-t": "file_finder::Toggle", + "cmd-shift-r": "project_symbols::Toggle" + } + }, + { + "context": "Pane", + "bindings": { + "alt-cmd-/": "search::ToggleRegex", + "ctrl-0": "project_panel::ToggleFocus", + "cmd-1": ["pane::ActivateItem", 0], + "cmd-2": ["pane::ActivateItem", 1], + "cmd-3": ["pane::ActivateItem", 2], + "cmd-4": ["pane::ActivateItem", 3], + "cmd-5": ["pane::ActivateItem", 4], + "cmd-6": ["pane::ActivateItem", 5], + "cmd-7": ["pane::ActivateItem", 6], + "cmd-8": ["pane::ActivateItem", 7], + "cmd-9": "pane::ActivateLastItem" + } + }, + { + "context": "ProjectPanel", + "bindings": { + "ctrl-[": "project_panel::CollapseSelectedEntry", + "ctrl-b": "project_panel::CollapseSelectedEntry", + "alt-b": "project_panel::CollapseSelectedEntry", + "ctrl-]": "project_panel::ExpandSelectedEntry", + "ctrl-f": "project_panel::ExpandSelectedEntry", + "ctrl-shift-c": "project_panel::CopyPath" + } + } +] diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json new file mode 100644 index 0000000..bee4d63 --- /dev/null +++ b/assets/keymaps/default-linux.json @@ -0,0 +1,638 @@ +[ + // todo(linux): Review the editor bindings + // Standard Linux bindings + { + "bindings": { + "up": "menu::SelectPrev", + "pageup": "menu::SelectFirst", + "shift-pageup": "menu::SelectFirst", + "ctrl-p": "menu::SelectPrev", + "down": "menu::SelectNext", + "pagedown": "menu::SelectLast", + "shift-pagedown": "menu::SelectFirst", + "ctrl-n": "menu::SelectNext", + "enter": "menu::Confirm", + "ctrl-enter": "menu::SecondaryConfirm", + "escape": "menu::Cancel", + "ctrl-escape": "menu::Cancel", + "ctrl-c": "menu::Cancel", + "shift-enter": "picker::UseSelectedQuery", + "alt-enter": ["picker::ConfirmInput", { "secondary": false }], + "ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }], + "ctrl-shift-w": "workspace::CloseWindow", + "shift-escape": "workspace::ToggleZoom", + "ctrl-o": "workspace::Open", + "ctrl-=": "zed::IncreaseBufferFontSize", + "ctrl-+": "zed::IncreaseBufferFontSize", + "ctrl--": "zed::DecreaseBufferFontSize", + "ctrl-0": "zed::ResetBufferFontSize", + "ctrl-,": "zed::OpenSettings", + "ctrl-q": "zed::Quit", + "alt-f9": "zed::Hide", + "f11": "zed::ToggleFullScreen" + } + }, + { + "context": "Editor", + "bindings": { + "escape": "editor::Cancel", + "backspace": "editor::Backspace", + "shift-backspace": "editor::Backspace", + "delete": "editor::Delete", + "ctrl-d": "editor::Delete", + "tab": "editor::Tab", + "shift-tab": "editor::TabPrev", + "ctrl-k": "editor::CutToEndOfLine", + "ctrl-t": "editor::Transpose", + // "ctrl-backspace": "editor::DeleteToBeginningOfLine", + // "ctrl-delete": "editor::DeleteToEndOfLine", + "ctrl-backspace": "editor::DeleteToPreviousWordStart", + // "ctrl-w": "editor::DeleteToPreviousWordStart", + "ctrl-delete": "editor::DeleteToNextWordEnd", + // "alt-h": "editor::DeleteToPreviousWordStart", + // "alt-d": "editor::DeleteToNextWordEnd", + "ctrl-x": "editor::Cut", + "ctrl-c": "editor::Copy", + "ctrl-insert": "editor::Copy", + "ctrl-v": "editor::Paste", + "shift-insert": "editor::Paste", + "ctrl-z": "editor::Undo", + "ctrl-shift-z": "editor::Redo", + "up": "editor::MoveUp", + // "ctrl-up": "editor::MoveToStartOfParagraph", todo(linux) Should be "scroll down by 1 line" + "pageup": "editor::PageUp", + // "shift-pageup": "editor::MovePageUp", todo(linux) should be 'select page up' + "home": "editor::MoveToBeginningOfLine", + "down": "editor::MoveDown", + // "ctrl-down": "editor::MoveToEndOfParagraph", todo(linux) should be "scroll up by 1 line" + "pagedown": "editor::PageDown", + // "shift-pagedown": "editor::MovePageDown", todo(linux) should be 'select page down' + "end": "editor::MoveToEndOfLine", + "left": "editor::MoveLeft", + "right": "editor::MoveRight", + "ctrl-left": "editor::MoveToPreviousWordStart", + // "alt-b": "editor::MoveToPreviousWordStart", + "ctrl-right": "editor::MoveToNextWordEnd", + // "alt-f": "editor::MoveToNextWordEnd", + // "cmd-left": "editor::MoveToBeginningOfLine", + // "ctrl-a": "editor::MoveToBeginningOfLine", + // "cmd-right": "editor::MoveToEndOfLine", + // "ctrl-e": "editor::MoveToEndOfLine", + "ctrl-home": "editor::MoveToBeginning", + "ctrl-end": "editor::MoveToEnd", + "shift-up": "editor::SelectUp", + "shift-down": "editor::SelectDown", + "shift-left": "editor::SelectLeft", + "shift-right": "editor::SelectRight", + "ctrl-shift-left": "editor::SelectToPreviousWordStart", + "ctrl-shift-right": "editor::SelectToNextWordEnd", + "ctrl-shift-up": "editor::AddSelectionAbove", + "ctrl-shift-down": "editor::AddSelectionBelow", + // "ctrl-shift-up": "editor::SelectToStartOfParagraph", + // "ctrl-shift-down": "editor::SelectToEndOfParagraph", + "ctrl-shift-home": "editor::SelectToBeginning", + "ctrl-shift-end": "editor::SelectToEnd", + "ctrl-a": "editor::SelectAll", + "ctrl-l": "editor::SelectLine", + "ctrl-shift-i": "editor::Format", + // "cmd-shift-left": [ + // "editor::SelectToBeginningOfLine", + // { + // "stop_at_soft_wraps": true + // } + // ], + "shift-home": [ + "editor::SelectToBeginningOfLine", + { + "stop_at_soft_wraps": true + } + ], + // "ctrl-shift-a": [ + // "editor::SelectToBeginningOfLine", + // { + // "stop_at_soft_wraps": true + // } + // ], + // "cmd-shift-right": [ + // "editor::SelectToEndOfLine", + // { + // "stop_at_soft_wraps": true + // } + // ], + "shift-end": [ + "editor::SelectToEndOfLine", + { + "stop_at_soft_wraps": true + } + ], + // "ctrl-shift-e": [ + // "editor::SelectToEndOfLine", + // { + // "stop_at_soft_wraps": true + // } + // ], + // "alt-v": [ + // "editor::MovePageUp", + // { + // "center_cursor": true + // } + // ], + "ctrl-alt-space": "editor::ShowCharacterPalette", + "ctrl-;": "editor::ToggleLineNumbers", + "ctrl-k ctrl-r": "editor::RevertSelectedHunks", + "ctrl-'": "editor::ToggleHunkDiff", + "ctrl-\"": "editor::ExpandAllHunkDiffs", + "ctrl-alt-g b": "editor::ToggleGitBlame" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "enter": "editor::Newline", + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow", + "ctrl-enter": "editor::NewlineAbove", + "alt-z": "editor::ToggleSoftWrap", + "ctrl-f": "buffer_search::Deploy", + "ctrl-h": [ + "buffer_search::Deploy", + { + "replace_enabled": true + } + ], + // "cmd-e": [ + // "buffer_search::Deploy", + // { + // "focus": false + // } + // ], + "ctrl->": "assistant::QuoteSelection" + } + }, + { + "context": "Editor && mode == full && inline_completion", + "bindings": { + "alt-]": "editor::NextInlineCompletion", + "alt-[": "editor::PreviousInlineCompletion", + "alt-right": "editor::AcceptPartialInlineCompletion" + } + }, + { + "context": "Editor && !inline_completion", + "bindings": { + "alt-\\": "editor::ShowInlineCompletion" + } + }, + { + "context": "Editor && mode == auto_height", + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow" + } + }, + { + "context": "Markdown", + "bindings": { + "ctrl-c": "markdown::Copy" + } + }, + { + "context": "AssistantPanel", + "bindings": { + "ctrl-g": "search::SelectNextMatch", + "ctrl-shift-g": "search::SelectPrevMatch" + } + }, + { + "context": "ConversationEditor > Editor", + "bindings": { + "ctrl-enter": "assistant::Assist", + "ctrl-s": "workspace::Save", + "ctrl->": "assistant::QuoteSelection", + "shift-enter": "assistant::Split", + "ctrl-r": "assistant::CycleMessageRole" + } + }, + { + "context": "BufferSearchBar", + "bindings": { + "escape": "buffer_search::Dismiss", + "tab": "buffer_search::FocusEditor", + "enter": "search::SelectNextMatch", + "shift-enter": "search::SelectPrevMatch", + "alt-enter": "search::SelectAllMatches", + "ctrl-f": "search::FocusSearch", + "ctrl-h": "search::ToggleReplace" + } + }, + { + "context": "BufferSearchBar && in_replace", + "bindings": { + "enter": "search::ReplaceNext", + "ctrl-enter": "search::ReplaceAll" + } + }, + { + "context": "BufferSearchBar && !in_replace > Editor", + "bindings": { + "up": "search::PreviousHistoryQuery", + "down": "search::NextHistoryQuery" + } + }, + { + "context": "ProjectSearchBar", + "bindings": { + "escape": "project_search::ToggleFocus", + "ctrl-shift-f": "search::FocusSearch", + "ctrl-shift-h": "search::ToggleReplace", + "alt-ctrl-g": "search::ToggleRegex", + "alt-ctrl-x": "search::ToggleRegex" + } + }, + { + "context": "ProjectSearchBar > Editor", + "bindings": { + "up": "search::PreviousHistoryQuery", + "down": "search::NextHistoryQuery" + } + }, + { + "context": "ProjectSearchBar && in_replace", + "bindings": { + "enter": "search::ReplaceNext", + "ctrl-alt-enter": "search::ReplaceAll" + } + }, + { + "context": "ProjectSearchView", + "bindings": { + "escape": "project_search::ToggleFocus", + "ctrl-shift-h": "search::ToggleReplace", + "alt-ctrl-g": "search::ToggleRegex", + "alt-ctrl-x": "search::ToggleRegex" + } + }, + { + "context": "Pane", + "bindings": { + "ctrl-pageup": "pane::ActivatePrevItem", + "ctrl-pagedown": "pane::ActivateNextItem", + "ctrl-w": "pane::CloseActiveItem", + "alt-ctrl-t": "pane::CloseInactiveItems", + "alt-ctrl-shift-w": "workspace::CloseInactiveTabsAndPanes", + "ctrl-k u": "pane::CloseCleanItems", + "ctrl-k w": "pane::CloseAllItems", + "ctrl-shift-f": "project_search::ToggleFocus", + "ctrl-alt-g": "search::SelectNextMatch", + "ctrl-alt-shift-g": "search::SelectPrevMatch", + "ctrl-alt-shift-h": "search::ToggleReplace", + "alt-enter": "search::SelectAllMatches", + "alt-c": "search::ToggleCaseSensitive", + "alt-w": "search::ToggleWholeWord", + "alt-r": "search::ToggleRegex", + "alt-ctrl-f": "project_search::ToggleFilters", + "ctrl-alt-shift-r": "search::ToggleRegex", + "ctrl-alt-shift-x": "search::ToggleRegex" + } + }, + // Bindings from VS Code + { + "context": "Editor", + "bindings": { + "ctrl-[": "editor::Outdent", + "ctrl-]": "editor::Indent", + "shift-alt-up": "editor::AddSelectionAbove", + "shift-alt-down": "editor::AddSelectionBelow", + "ctrl-shift-k": "editor::DeleteLine", + "alt-up": "editor::MoveLineUp", + "alt-down": "editor::MoveLineDown", + "ctrl-alt-shift-up": "editor::DuplicateLineUp", + "ctrl-alt-shift-down": "editor::DuplicateLineDown", + "ctrl-shift-left": "editor::SelectToPreviousWordStart", + "ctrl-shift-right": "editor::SelectToNextWordEnd", + "ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding + "ctrl-shift-down": "editor::SelectSmallerSyntaxNode", //todo(linux) tmp keybinding + "ctrl-d": [ + "editor::SelectNext", + { + "replace_newest": false + } + ], + "ctrl-shift-l": "editor::SelectAllMatches", + "ctrl-shift-d": [ + "editor::SelectPrevious", + { + "replace_newest": false + } + ], + "ctrl-k ctrl-d": [ + "editor::SelectNext", + { + "replace_newest": true + } + ], + "ctrl-k ctrl-shift-d": [ + "editor::SelectPrevious", + { + "replace_newest": true + } + ], + "ctrl-k ctrl-i": "editor::Hover", + "ctrl-/": [ + "editor::ToggleComments", + { + "advance_downwards": false + } + ], + "ctrl-u": "editor::UndoSelection", + "ctrl-shift-u": "editor::RedoSelection", + "f8": "editor::GoToDiagnostic", + "shift-f8": "editor::GoToPrevDiagnostic", + "f2": "editor::Rename", + "f12": "editor::GoToDefinition", + "alt-f12": "editor::GoToDefinitionSplit", + "ctrl-f12": "editor::GoToTypeDefinition", + "shift-f12": "editor::GoToImplementation", + "alt-ctrl-f12": "editor::GoToTypeDefinitionSplit", + "alt-shift-f12": "editor::FindAllReferences", + "ctrl-m": "editor::MoveToEnclosingBracket", + "ctrl-shift-[": "editor::Fold", + "ctrl-shift-]": "editor::UnfoldLines", + "ctrl-space": "editor::ShowCompletions", + "ctrl-.": "editor::ToggleCodeActions", + "alt-ctrl-r": "editor::RevealInFinder", + "ctrl-alt-shift-c": "editor::DisplayCursorNames" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "ctrl-shift-o": "outline::Toggle", + "ctrl-g": "go_to_line::Toggle" + } + }, + { + "context": "Pane", + "bindings": { + "alt-1": ["pane::ActivateItem", 0], + "alt-2": ["pane::ActivateItem", 1], + "alt-3": ["pane::ActivateItem", 2], + "alt-4": ["pane::ActivateItem", 3], + "alt-5": ["pane::ActivateItem", 4], + "alt-6": ["pane::ActivateItem", 5], + "alt-7": ["pane::ActivateItem", 6], + "alt-8": ["pane::ActivateItem", 7], + "alt-9": ["pane::ActivateItem", 8], + "alt-0": "pane::ActivateLastItem", + "ctrl-alt--": "pane::GoBack", + "ctrl-alt-_": "pane::GoForward", + "ctrl-shift-t": "pane::ReopenClosedItem", + "ctrl-shift-f": "project_search::ToggleFocus" + } + }, + { + "context": "Workspace", + "bindings": { + // Change the default action on `menu::Confirm` by setting the parameter + // "alt-cmd-o": [ + // "projects::OpenRecent", + // { + // "create_new_window": true + // } + // ] + "alt-ctrl-o": "projects::OpenRecent", + "alt-ctrl-shift-b": "branches::OpenRecent", + "ctrl-~": "workspace::NewTerminal", + "ctrl-s": "workspace::Save", + "ctrl-k s": "workspace::SaveWithoutFormat", + "ctrl-shift-s": "workspace::SaveAs", + "ctrl-n": "workspace::NewFile", + "ctrl-shift-n": "workspace::NewWindow", + "ctrl-`": "terminal_panel::ToggleFocus", + "alt-1": ["workspace::ActivatePane", 0], + "alt-2": ["workspace::ActivatePane", 1], + "alt-3": ["workspace::ActivatePane", 2], + "alt-4": ["workspace::ActivatePane", 3], + "alt-5": ["workspace::ActivatePane", 4], + "alt-6": ["workspace::ActivatePane", 5], + "alt-7": ["workspace::ActivatePane", 6], + "alt-8": ["workspace::ActivatePane", 7], + "alt-9": ["workspace::ActivatePane", 8], + "ctrl-alt-b": "workspace::ToggleLeftDock", + "ctrl-b": "workspace::ToggleRightDock", + "ctrl-j": "workspace::ToggleBottomDock", + "ctrl-alt-y": "workspace::CloseAllDocks", + "ctrl-shift-f": "pane::DeploySearch", + "ctrl-shift-h": [ + "pane::DeploySearch", + { + "replace_enabled": true + } + ], + "ctrl-k ctrl-s": "zed::OpenKeymap", + "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-shift-t": "project_symbols::Toggle", + "ctrl-p": "file_finder::Toggle", + "ctrl-tab": "tab_switcher::Toggle", + "ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }], + "ctrl-e": "file_finder::Toggle", + "ctrl-shift-p": "command_palette::Toggle", + "ctrl-shift-m": "diagnostics::Deploy", + "ctrl-shift-e": "project_panel::ToggleFocus", + "ctrl-?": "assistant::ToggleFocus", + "ctrl-alt-s": "workspace::SaveAll", + "ctrl-k m": "language_selector::Toggle", + "escape": "workspace::Unfollow", + "ctrl-k ctrl-left": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-k ctrl-right": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-k ctrl-up": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-k ctrl-down": ["workspace::ActivatePaneInDirection", "Down"], + "ctrl-k shift-left": ["workspace::SwapPaneInDirection", "Left"], + "ctrl-k shift-right": ["workspace::SwapPaneInDirection", "Right"], + "ctrl-k shift-up": ["workspace::SwapPaneInDirection", "Up"], + "ctrl-k shift-down": ["workspace::SwapPaneInDirection", "Down"], + "alt-t": "task::Rerun", + "alt-shift-t": "task::Spawn" + } + }, + // Bindings from Sublime Text + { + "context": "Editor", + "bindings": { + "ctrl-shift-k": "editor::DeleteLine", + "ctrl-shift-d": "editor::DuplicateLineDown", + "ctrl-j": "editor::JoinLines", + "ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart", + "ctrl-alt-h": "editor::DeleteToPreviousSubwordStart", + "ctrl-alt-delete": "editor::DeleteToNextSubwordEnd", + "ctrl-alt-d": "editor::DeleteToNextSubwordEnd", + "ctrl-alt-left": "editor::MoveToPreviousSubwordStart", + "ctrl-alt-b": "editor::MoveToPreviousSubwordStart", + "ctrl-alt-right": "editor::MoveToNextSubwordEnd", + "ctrl-alt-f": "editor::MoveToNextSubwordEnd", + "ctrl-alt-shift-left": "editor::SelectToPreviousSubwordStart", + "ctrl-alt-shift-b": "editor::SelectToPreviousSubwordStart", + "ctrl-alt-shift-right": "editor::SelectToNextSubwordEnd", + "ctrl-alt-shift-f": "editor::SelectToNextSubwordEnd" + } + }, + // Bindings from Atom + { + "context": "Pane", + "bindings": { + "ctrl-k up": "pane::SplitUp", + "ctrl-k down": "pane::SplitDown", + "ctrl-k left": "pane::SplitLeft", + "ctrl-k right": "pane::SplitRight" + } + }, + // Bindings that should be unified with bindings for more general actions + { + "context": "Editor && renaming", + "bindings": { + "enter": "editor::ConfirmRename" + } + }, + { + "context": "Editor && showing_completions", + "bindings": { + "enter": "editor::ConfirmCompletion", + "tab": "editor::ConfirmCompletion" + } + }, + { + "context": "Editor && showing_code_actions", + "bindings": { + "enter": "editor::ConfirmCodeAction" + } + }, + { + "context": "Editor && (showing_code_actions || showing_completions)", + "bindings": { + "up": "editor::ContextMenuPrev", + "ctrl-p": "editor::ContextMenuPrev", + "down": "editor::ContextMenuNext", + "ctrl-n": "editor::ContextMenuNext", + "pageup": "editor::ContextMenuFirst", + "pagedown": "editor::ContextMenuLast" + } + }, + // Custom bindings + { + "bindings": { + "ctrl-alt-shift-f": "workspace::FollowNextCollaborator", + // TODO: Move this to a dock open action + "ctrl-shift-c": "collab_panel::ToggleFocus", + "ctrl-alt-i": "zed::DebugElements", + "ctrl-:": "editor::ToggleInlayHints" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "alt-enter": "editor::OpenExcerpts", + "shift-enter": "editor::ExpandExcerpts", + "ctrl-k enter": "editor::OpenExcerptsSplit", + "ctrl-f8": "editor::GoToHunk", + "ctrl-shift-f8": "editor::GoToPrevHunk", + "ctrl-enter": "assistant::InlineAssist" + } + }, + { + "context": "ProjectSearchBar && !in_replace", + "bindings": { + "ctrl-enter": "project_search::SearchInNew" + } + }, + { + "context": "ProjectPanel", + "bindings": { + "left": "project_panel::CollapseSelectedEntry", + "right": "project_panel::ExpandSelectedEntry", + "ctrl-n": "project_panel::NewFile", + "alt-ctrl-n": "project_panel::NewDirectory", + "ctrl-x": "project_panel::Cut", + "ctrl-c": "project_panel::Copy", + "ctrl-insert": "project_panel::Copy", + "ctrl-v": "project_panel::Paste", + "shift-insert": "project_panel::Paste", + "ctrl-alt-c": "project_panel::CopyPath", + "alt-ctrl-shift-c": "project_panel::CopyRelativePath", + "f2": "project_panel::Rename", + "enter": "project_panel::Rename", + "backspace": "project_panel::Trash", + "delete": "project_panel::Trash", + "ctrl-backspace": ["project_panel::Delete", { "skip_prompt": true }], + "ctrl-delete": ["project_panel::Delete", { "skip_prompt": true }], + "alt-ctrl-r": "project_panel::RevealInFinder", + "alt-shift-f": "project_panel::NewSearchInDirectory" + } + }, + { + "context": "ProjectPanel && not_editing", + "bindings": { + "space": "project_panel::Open" + } + }, + { + "context": "CollabPanel && not_editing", + "bindings": { + "ctrl-backspace": "collab_panel::Remove", + "space": "menu::Confirm" + } + }, + { + "context": "(CollabPanel && editing) > Editor", + "bindings": { + "space": "collab_panel::InsertSpace" + } + }, + { + "context": "ChannelModal", + "bindings": { + "tab": "channel_modal::ToggleMode" + } + }, + { + "context": "ChannelModal > Picker > Editor", + "bindings": { + "tab": "channel_modal::ToggleMode" + } + }, + { + "context": "FileFinder", + "bindings": { "ctrl-shift-p": "file_finder::SelectPrev" } + }, + { + "context": "TabSwitcher", + "bindings": { + "ctrl-up": "menu::SelectPrev", + "ctrl-down": "menu::SelectNext", + "ctrl-shift-tab": "menu::SelectPrev", + "ctrl-backspace": "tab_switcher::CloseSelectedItem" + } + }, + { + "context": "Terminal", + "bindings": { + "ctrl-alt-space": "terminal::ShowCharacterPalette", + "shift-ctrl-c": "terminal::Copy", + "ctrl-insert": "terminal::Copy", + "shift-ctrl-v": "terminal::Paste", + "shift-insert": "terminal::Paste", + "up": ["terminal::SendKeystroke", "up"], + "pageup": ["terminal::SendKeystroke", "pageup"], + "down": ["terminal::SendKeystroke", "down"], + "pagedown": ["terminal::SendKeystroke", "pagedown"], + "escape": ["terminal::SendKeystroke", "escape"], + "enter": ["terminal::SendKeystroke", "enter"], + "ctrl-c": ["terminal::SendKeystroke", "ctrl-c"], + + // Some nice conveniences + "ctrl-backspace": ["terminal::SendText", "\u0015"], + "ctrl-right": ["terminal::SendText", "\u0005"], + "ctrl-left": ["terminal::SendText", "\u0001"] + } + } +] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json new file mode 100644 index 0000000..d655a50 --- /dev/null +++ b/assets/keymaps/default-macos.json @@ -0,0 +1,661 @@ +[ + // Standard macOS bindings + { + "bindings": { + "up": "menu::SelectPrev", + "pageup": "menu::SelectFirst", + "shift-pageup": "menu::SelectFirst", + "ctrl-p": "menu::SelectPrev", + "down": "menu::SelectNext", + "pagedown": "menu::SelectLast", + "shift-pagedown": "menu::SelectFirst", + "ctrl-n": "menu::SelectNext", + "cmd-up": "menu::SelectFirst", + "cmd-down": "menu::SelectLast", + "enter": "menu::Confirm", + "ctrl-enter": "menu::SecondaryConfirm", + "cmd-enter": "menu::SecondaryConfirm", + "escape": "menu::Cancel", + "cmd-escape": "menu::Cancel", + "ctrl-escape": "menu::Cancel", + "ctrl-c": "menu::Cancel", + "cmd-shift-w": "workspace::CloseWindow", + "shift-escape": "workspace::ToggleZoom", + "cmd-o": "workspace::Open", + "cmd-=": "zed::IncreaseBufferFontSize", + "cmd-+": "zed::IncreaseBufferFontSize", + "cmd--": "zed::DecreaseBufferFontSize", + "cmd-0": "zed::ResetBufferFontSize", + "cmd-,": "zed::OpenSettings", + "cmd-q": "zed::Quit", + "cmd-h": "zed::Hide", + "alt-cmd-h": "zed::HideOthers", + "cmd-m": "zed::Minimize", + "ctrl-cmd-f": "zed::ToggleFullScreen" + } + }, + { + "context": "Editor", + "bindings": { + "escape": "editor::Cancel", + "backspace": "editor::Backspace", + "shift-backspace": "editor::Backspace", + "ctrl-h": "editor::Backspace", + "delete": "editor::Delete", + "ctrl-d": "editor::Delete", + "tab": "editor::Tab", + "shift-tab": "editor::TabPrev", + "ctrl-k": "editor::CutToEndOfLine", + "ctrl-t": "editor::Transpose", + "cmd-backspace": "editor::DeleteToBeginningOfLine", + "cmd-delete": "editor::DeleteToEndOfLine", + "alt-backspace": "editor::DeleteToPreviousWordStart", + "ctrl-w": "editor::DeleteToPreviousWordStart", + "alt-delete": "editor::DeleteToNextWordEnd", + "alt-h": "editor::DeleteToPreviousWordStart", + "alt-d": "editor::DeleteToNextWordEnd", + "cmd-x": "editor::Cut", + "cmd-c": "editor::Copy", + "cmd-v": "editor::Paste", + "cmd-z": "editor::Undo", + "cmd-shift-z": "editor::Redo", + "up": "editor::MoveUp", + "ctrl-up": "editor::MoveToStartOfParagraph", + "pageup": "editor::PageUp", + "shift-pageup": "editor::MovePageUp", + "home": "editor::MoveToBeginningOfLine", + "down": "editor::MoveDown", + "ctrl-down": "editor::MoveToEndOfParagraph", + "pagedown": "editor::PageDown", + "shift-pagedown": "editor::MovePageDown", + "end": "editor::MoveToEndOfLine", + "left": "editor::MoveLeft", + "right": "editor::MoveRight", + "ctrl-p": "editor::MoveUp", + "ctrl-n": "editor::MoveDown", + "ctrl-b": "editor::MoveLeft", + "ctrl-f": "editor::MoveRight", + "ctrl-l": "editor::NextScreen", + "alt-left": "editor::MoveToPreviousWordStart", + "alt-b": "editor::MoveToPreviousWordStart", + "alt-right": "editor::MoveToNextWordEnd", + "alt-f": "editor::MoveToNextWordEnd", + "cmd-left": "editor::MoveToBeginningOfLine", + "ctrl-a": "editor::MoveToBeginningOfLine", + "cmd-right": "editor::MoveToEndOfLine", + "ctrl-e": "editor::MoveToEndOfLine", + "cmd-up": "editor::MoveToBeginning", + "cmd-down": "editor::MoveToEnd", + "shift-up": "editor::SelectUp", + "ctrl-shift-p": "editor::SelectUp", + "shift-down": "editor::SelectDown", + "ctrl-shift-n": "editor::SelectDown", + "shift-left": "editor::SelectLeft", + "ctrl-shift-b": "editor::SelectLeft", + "shift-right": "editor::SelectRight", + "ctrl-shift-f": "editor::SelectRight", + "alt-shift-left": "editor::SelectToPreviousWordStart", + "alt-shift-b": "editor::SelectToPreviousWordStart", + "alt-shift-right": "editor::SelectToNextWordEnd", + "alt-shift-f": "editor::SelectToNextWordEnd", + "ctrl-shift-up": "editor::SelectToStartOfParagraph", + "ctrl-shift-down": "editor::SelectToEndOfParagraph", + "cmd-shift-up": "editor::SelectToBeginning", + "cmd-shift-down": "editor::SelectToEnd", + "cmd-a": "editor::SelectAll", + "cmd-l": "editor::SelectLine", + "cmd-shift-i": "editor::Format", + "cmd-shift-left": [ + "editor::SelectToBeginningOfLine", + { + "stop_at_soft_wraps": true + } + ], + "shift-home": [ + "editor::SelectToBeginningOfLine", + { + "stop_at_soft_wraps": true + } + ], + "ctrl-shift-a": [ + "editor::SelectToBeginningOfLine", + { + "stop_at_soft_wraps": true + } + ], + "cmd-shift-right": [ + "editor::SelectToEndOfLine", + { + "stop_at_soft_wraps": true + } + ], + "shift-end": [ + "editor::SelectToEndOfLine", + { + "stop_at_soft_wraps": true + } + ], + "ctrl-shift-e": [ + "editor::SelectToEndOfLine", + { + "stop_at_soft_wraps": true + } + ], + "ctrl-v": [ + "editor::MovePageDown", + { + "center_cursor": true + } + ], + "alt-v": [ + "editor::MovePageUp", + { + "center_cursor": true + } + ], + "ctrl-cmd-space": "editor::ShowCharacterPalette", + "cmd-;": "editor::ToggleLineNumbers", + "cmd-alt-z": "editor::RevertSelectedHunks", + "cmd-'": "editor::ToggleHunkDiff", + "cmd-\"": "editor::ExpandAllHunkDiffs", + "cmd-alt-g b": "editor::ToggleGitBlame" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "enter": "editor::Newline", + "shift-enter": "editor::Newline", + "cmd-shift-enter": "editor::NewlineAbove", + "cmd-enter": "editor::NewlineBelow", + "alt-z": "editor::ToggleSoftWrap", + "cmd-f": "buffer_search::Deploy", + "cmd-alt-f": [ + "buffer_search::Deploy", + { + "replace_enabled": true + } + ], + "cmd-e": [ + "buffer_search::Deploy", + { + "focus": false + } + ], + "cmd->": "assistant::QuoteSelection" + } + }, + { + "context": "Editor && mode == full && inline_completion", + "bindings": { + "alt-]": "editor::NextInlineCompletion", + "alt-[": "editor::PreviousInlineCompletion", + "alt-right": "editor::AcceptPartialInlineCompletion" + } + }, + { + "context": "Editor && !inline_completion", + "bindings": { + "alt-\\": "editor::ShowInlineCompletion" + } + }, + { + "context": "Editor && mode == auto_height", + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow" + } + }, + { + "context": "Markdown", + "bindings": { + "cmd-c": "markdown::Copy" + } + }, + { + "context": "AssistantPanel", // Used in the assistant crate, which we're replacing + "bindings": { + "cmd-g": "search::SelectNextMatch", + "cmd-shift-g": "search::SelectPrevMatch" + } + }, + { + "context": "ConversationEditor > Editor", + "bindings": { + "cmd-enter": "assistant::Assist", + "cmd-s": "workspace::Save", + "cmd->": "assistant::QuoteSelection", + "shift-enter": "assistant::Split", + "ctrl-r": "assistant::CycleMessageRole" + } + }, + { + "context": "BufferSearchBar", + "bindings": { + "escape": "buffer_search::Dismiss", + "tab": "buffer_search::FocusEditor", + "enter": "search::SelectNextMatch", + "shift-enter": "search::SelectPrevMatch", + "alt-enter": "search::SelectAllMatches", + "cmd-f": "search::FocusSearch", + "cmd-alt-f": "search::ToggleReplace" + } + }, + { + "context": "BufferSearchBar && in_replace", + "bindings": { + "enter": "search::ReplaceNext", + "cmd-enter": "search::ReplaceAll" + } + }, + { + "context": "BufferSearchBar && !in_replace > Editor", + "bindings": { + "up": "search::PreviousHistoryQuery", + "down": "search::NextHistoryQuery" + } + }, + { + "context": "ProjectSearchBar", + "bindings": { + "escape": "project_search::ToggleFocus", + "cmd-shift-f": "search::FocusSearch", + "cmd-shift-h": "search::ToggleReplace", + "alt-cmd-g": "search::ToggleRegex", + "alt-cmd-x": "search::ToggleRegex" + } + }, + { + "context": "ProjectSearchBar > Editor", + "bindings": { + "up": "search::PreviousHistoryQuery", + "down": "search::NextHistoryQuery" + } + }, + { + "context": "ProjectSearchBar && in_replace", + "bindings": { + "enter": "search::ReplaceNext", + "cmd-enter": "search::ReplaceAll" + } + }, + { + "context": "ProjectSearchView", + "bindings": { + "escape": "project_search::ToggleFocus", + "cmd-shift-h": "search::ToggleReplace", + "alt-cmd-g": "search::ToggleRegex", + "alt-cmd-x": "search::ToggleRegex" + } + }, + { + "context": "Pane", + "bindings": { + "cmd-{": "pane::ActivatePrevItem", + "cmd-}": "pane::ActivateNextItem", + "alt-cmd-left": "pane::ActivatePrevItem", + "alt-cmd-right": "pane::ActivateNextItem", + "cmd-w": "pane::CloseActiveItem", + "alt-cmd-t": "pane::CloseInactiveItems", + "ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes", + "cmd-k u": "pane::CloseCleanItems", + "cmd-k cmd-w": "pane::CloseAllItems", + "cmd-f": "project_search::ToggleFocus", + "cmd-g": "search::SelectNextMatch", + "cmd-shift-g": "search::SelectPrevMatch", + "cmd-shift-h": "search::ToggleReplace", + "alt-enter": "search::SelectAllMatches", + "alt-cmd-c": "search::ToggleCaseSensitive", + "alt-cmd-w": "search::ToggleWholeWord", + "alt-cmd-f": "project_search::ToggleFilters", + "alt-cmd-g": "search::ToggleRegex", + "alt-cmd-x": "search::ToggleRegex" + } + }, + // Bindings from VS Code + { + "context": "Editor", + "bindings": { + "cmd-[": "editor::Outdent", + "cmd-]": "editor::Indent", + "cmd-alt-up": "editor::AddSelectionAbove", + "cmd-ctrl-p": "editor::AddSelectionAbove", + "cmd-alt-down": "editor::AddSelectionBelow", + "cmd-ctrl-n": "editor::AddSelectionBelow", + "cmd-shift-k": "editor::DeleteLine", + "alt-up": "editor::MoveLineUp", + "alt-down": "editor::MoveLineDown", + "alt-shift-up": "editor::DuplicateLineUp", + "alt-shift-down": "editor::DuplicateLineDown", + "ctrl-shift-right": "editor::SelectLargerSyntaxNode", + "ctrl-shift-left": "editor::SelectSmallerSyntaxNode", + "cmd-d": [ + "editor::SelectNext", + { + "replace_newest": false + } + ], + "cmd-shift-l": "editor::SelectAllMatches", + "ctrl-cmd-d": [ + "editor::SelectPrevious", + { + "replace_newest": false + } + ], + "cmd-k cmd-d": [ + "editor::SelectNext", + { + "replace_newest": true + } + ], + "cmd-k ctrl-cmd-d": [ + "editor::SelectPrevious", + { + "replace_newest": true + } + ], + "cmd-k cmd-i": "editor::Hover", + "cmd-/": [ + "editor::ToggleComments", + { + "advance_downwards": false + } + ], + "cmd-u": "editor::UndoSelection", + "cmd-shift-u": "editor::RedoSelection", + "f8": "editor::GoToDiagnostic", + "shift-f8": "editor::GoToPrevDiagnostic", + "f2": "editor::Rename", + "f12": "editor::GoToDefinition", + "alt-f12": "editor::GoToDefinitionSplit", + "cmd-f12": "editor::GoToTypeDefinition", + "shift-f12": "editor::GoToImplementation", + "alt-cmd-f12": "editor::GoToTypeDefinitionSplit", + "alt-shift-f12": "editor::FindAllReferences", + "ctrl-m": "editor::MoveToEnclosingBracket", + "alt-cmd-[": "editor::Fold", + "alt-cmd-]": "editor::UnfoldLines", + "ctrl-space": "editor::ShowCompletions", + "cmd-.": "editor::ToggleCodeActions", + "alt-cmd-r": "editor::RevealInFinder", + "ctrl-cmd-c": "editor::DisplayCursorNames" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "cmd-shift-o": "outline::Toggle", + "ctrl-g": "go_to_line::Toggle" + } + }, + { + "context": "Pane", + "bindings": { + "ctrl-1": ["pane::ActivateItem", 0], + "ctrl-2": ["pane::ActivateItem", 1], + "ctrl-3": ["pane::ActivateItem", 2], + "ctrl-4": ["pane::ActivateItem", 3], + "ctrl-5": ["pane::ActivateItem", 4], + "ctrl-6": ["pane::ActivateItem", 5], + "ctrl-7": ["pane::ActivateItem", 6], + "ctrl-8": ["pane::ActivateItem", 7], + "ctrl-9": ["pane::ActivateItem", 8], + "ctrl-0": "pane::ActivateLastItem", + "ctrl--": "pane::GoBack", + "ctrl-_": "pane::GoForward", + "cmd-shift-t": "pane::ReopenClosedItem", + "cmd-shift-f": "project_search::ToggleFocus" + } + }, + { + "context": "Workspace", + "bindings": { + // Change the default action on `menu::Confirm` by setting the parameter + // "alt-cmd-o": [ + // "projects::OpenRecent", + // { + // "create_new_window": true + // } + // ] + "alt-cmd-o": "projects::OpenRecent", + "alt-cmd-b": "branches::OpenRecent", + "ctrl-~": "workspace::NewTerminal", + "cmd-s": "workspace::Save", + "cmd-k s": "workspace::SaveWithoutFormat", + "cmd-shift-s": "workspace::SaveAs", + "cmd-n": "workspace::NewFile", + "cmd-shift-n": "workspace::NewWindow", + "ctrl-`": "terminal_panel::ToggleFocus", + "cmd-1": ["workspace::ActivatePane", 0], + "cmd-2": ["workspace::ActivatePane", 1], + "cmd-3": ["workspace::ActivatePane", 2], + "cmd-4": ["workspace::ActivatePane", 3], + "cmd-5": ["workspace::ActivatePane", 4], + "cmd-6": ["workspace::ActivatePane", 5], + "cmd-7": ["workspace::ActivatePane", 6], + "cmd-8": ["workspace::ActivatePane", 7], + "cmd-9": ["workspace::ActivatePane", 8], + "cmd-b": "workspace::ToggleLeftDock", + "cmd-r": "workspace::ToggleRightDock", + "cmd-j": "workspace::ToggleBottomDock", + "alt-cmd-y": "workspace::CloseAllDocks", + "cmd-shift-f": "pane::DeploySearch", + "cmd-shift-h": [ + "pane::DeploySearch", + { + "replace_enabled": true + } + ], + "cmd-k cmd-s": "zed::OpenKeymap", + "cmd-k cmd-t": "theme_selector::Toggle", + "cmd-t": "project_symbols::Toggle", + "cmd-p": "file_finder::Toggle", + "ctrl-tab": "tab_switcher::Toggle", + "ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }], + "cmd-shift-p": "command_palette::Toggle", + "cmd-shift-m": "diagnostics::Deploy", + "cmd-shift-e": "project_panel::ToggleFocus", + "cmd-?": "assistant::ToggleFocus", + "cmd-alt-s": "workspace::SaveAll", + "cmd-k m": "language_selector::Toggle", + "escape": "workspace::Unfollow", + "cmd-k cmd-left": ["workspace::ActivatePaneInDirection", "Left"], + "cmd-k cmd-right": ["workspace::ActivatePaneInDirection", "Right"], + "cmd-k cmd-up": ["workspace::ActivatePaneInDirection", "Up"], + "cmd-k cmd-down": ["workspace::ActivatePaneInDirection", "Down"], + "cmd-k shift-left": ["workspace::SwapPaneInDirection", "Left"], + "cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"], + "cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"], + "cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"], + "alt-t": "task::Rerun", + "alt-shift-t": "task::Spawn" + } + }, + // Bindings from Sublime Text + { + "context": "Editor", + "bindings": { + "ctrl-j": "editor::JoinLines", + "ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart", + "ctrl-alt-h": "editor::DeleteToPreviousSubwordStart", + "ctrl-alt-delete": "editor::DeleteToNextSubwordEnd", + "ctrl-alt-d": "editor::DeleteToNextSubwordEnd", + "ctrl-alt-left": "editor::MoveToPreviousSubwordStart", + "ctrl-alt-b": "editor::MoveToPreviousSubwordStart", + "ctrl-alt-right": "editor::MoveToNextSubwordEnd", + "ctrl-alt-f": "editor::MoveToNextSubwordEnd", + "ctrl-alt-shift-left": "editor::SelectToPreviousSubwordStart", + "ctrl-alt-shift-b": "editor::SelectToPreviousSubwordStart", + "ctrl-alt-shift-right": "editor::SelectToNextSubwordEnd", + "ctrl-alt-shift-f": "editor::SelectToNextSubwordEnd" + } + }, + // Bindings from Atom + { + "context": "Pane", + "bindings": { + "cmd-k up": "pane::SplitUp", + "cmd-k down": "pane::SplitDown", + "cmd-k left": "pane::SplitLeft", + "cmd-k right": "pane::SplitRight" + } + }, + // Bindings that should be unified with bindings for more general actions + { + "context": "Editor && renaming", + "bindings": { + "enter": "editor::ConfirmRename" + } + }, + { + "context": "Editor && showing_completions", + "bindings": { + "enter": "editor::ConfirmCompletion", + "tab": "editor::ConfirmCompletion" + } + }, + { + "context": "Editor && showing_code_actions", + "bindings": { + "enter": "editor::ConfirmCodeAction" + } + }, + { + "context": "Editor && (showing_code_actions || showing_completions)", + "bindings": { + "up": "editor::ContextMenuPrev", + "ctrl-p": "editor::ContextMenuPrev", + "down": "editor::ContextMenuNext", + "ctrl-n": "editor::ContextMenuNext", + "pageup": "editor::ContextMenuFirst", + "pagedown": "editor::ContextMenuLast" + } + }, + // Custom bindings + { + "bindings": { + "ctrl-alt-cmd-f": "workspace::FollowNextCollaborator", + // TODO: Move this to a dock open action + "cmd-shift-c": "collab_panel::ToggleFocus", + "cmd-alt-i": "zed::DebugElements", + "ctrl-:": "editor::ToggleInlayHints" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "alt-enter": "editor::OpenExcerpts", + "shift-enter": "editor::ExpandExcerpts", + "cmd-k enter": "editor::OpenExcerptsSplit", + "cmd-f8": "editor::GoToHunk", + "cmd-shift-f8": "editor::GoToPrevHunk", + "ctrl-enter": "assistant::InlineAssist" + } + }, + { + "context": "ProjectSearchBar && !in_replace", + "bindings": { + "cmd-enter": "project_search::SearchInNew" + } + }, + { + "context": "ProjectPanel", + "bindings": { + "left": "project_panel::CollapseSelectedEntry", + "right": "project_panel::ExpandSelectedEntry", + "cmd-n": "project_panel::NewFile", + "alt-cmd-n": "project_panel::NewDirectory", + "cmd-x": "project_panel::Cut", + "cmd-c": "project_panel::Copy", + "cmd-v": "project_panel::Paste", + "cmd-alt-c": "project_panel::CopyPath", + "alt-cmd-shift-c": "project_panel::CopyRelativePath", + "f2": "project_panel::Rename", + "enter": "project_panel::Rename", + "backspace": "project_panel::Trash", + "delete": "project_panel::Trash", + "cmd-backspace": ["project_panel::Delete", { "skip_prompt": true }], + "cmd-delete": ["project_panel::Delete", { "skip_prompt": true }], + "alt-cmd-r": "project_panel::RevealInFinder", + "alt-shift-f": "project_panel::NewSearchInDirectory" + } + }, + { + "context": "ProjectPanel && not_editing", + "bindings": { + "space": "project_panel::Open" + } + }, + { + "context": "CollabPanel && not_editing", + "bindings": { + "ctrl-backspace": "collab_panel::Remove", + "space": "menu::Confirm" + } + }, + { + "context": "(CollabPanel && editing) > Editor", + "bindings": { + "space": "collab_panel::InsertSpace" + } + }, + { + "context": "ChannelModal", + "bindings": { + "tab": "channel_modal::ToggleMode" + } + }, + { + "context": "ChannelModal > Picker > Editor", + "bindings": { + "tab": "channel_modal::ToggleMode" + } + }, + { + "context": "FileFinder", + "bindings": { "cmd-shift-p": "file_finder::SelectPrev" } + }, + { + "context": "TabSwitcher", + "bindings": { + "ctrl-up": "menu::SelectPrev", + "ctrl-down": "menu::SelectNext", + "ctrl-shift-tab": "menu::SelectPrev", + "ctrl-backspace": "tab_switcher::CloseSelectedItem" + } + }, + { + "context": "Picker", + "bindings": { + "alt-e": "picker::UseSelectedQuery", + "alt-enter": ["picker::ConfirmInput", { "secondary": false }], + "cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }] + } + }, + { + "context": "Terminal", + "bindings": { + "ctrl-cmd-space": "terminal::ShowCharacterPalette", + "cmd-c": "terminal::Copy", + "cmd-v": "terminal::Paste", + "cmd-k": "terminal::Clear", + // Some nice conveniences + "cmd-backspace": ["terminal::SendText", "\u0015"], + "cmd-right": ["terminal::SendText", "\u0005"], + "cmd-left": ["terminal::SendText", "\u0001"], + // Terminal.app compatibility + "alt-left": ["terminal::SendText", "\u001bb"], + "alt-right": ["terminal::SendText", "\u001bf"], + // There are conflicting bindings for these keys in the global context. + // these bindings override them, remove at your own risk: + "up": ["terminal::SendKeystroke", "up"], + "pageup": ["terminal::SendKeystroke", "pageup"], + "down": ["terminal::SendKeystroke", "down"], + "pagedown": ["terminal::SendKeystroke", "pagedown"], + "escape": ["terminal::SendKeystroke", "escape"], + "enter": ["terminal::SendKeystroke", "enter"], + "ctrl-c": ["terminal::SendKeystroke", "ctrl-c"] + } + } +] diff --git a/assets/keymaps/jetbrains.json b/assets/keymaps/jetbrains.json new file mode 100644 index 0000000..4e31ec9 --- /dev/null +++ b/assets/keymaps/jetbrains.json @@ -0,0 +1,100 @@ +[ + { + "bindings": { + "cmd-shift-[": "pane::ActivatePrevItem", + "cmd-shift-]": "pane::ActivateNextItem" + } + }, + { + "context": "Editor", + "bindings": { + "ctrl->": "zed::IncreaseBufferFontSize", + "ctrl-<": "zed::DecreaseBufferFontSize", + "ctrl-shift-j": "editor::JoinLines", + "cmd-d": "editor::DuplicateLineDown", + "cmd-backspace": "editor::DeleteLine", + "cmd-pagedown": "editor::MovePageDown", + "cmd-pageup": "editor::MovePageUp", + "ctrl-alt-shift-b": "editor::SelectToPreviousWordStart", + "cmd-alt-enter": "editor::NewlineAbove", + "shift-enter": "editor::NewlineBelow", + "cmd--": "editor::Fold", + "cmd-+": "editor::UnfoldLines", + "alt-shift-g": "editor::SplitSelectionIntoLines", + "ctrl-g": [ + "editor::SelectNext", + { + "replace_newest": false + } + ], + "ctrl-cmd-g": [ + "editor::SelectPrevious", + { + "replace_newest": false + } + ], + "cmd-/": [ + "editor::ToggleComments", + { + "advance_downwards": true + } + ], + "alt-up": "editor::SelectLargerSyntaxNode", + "alt-down": "editor::SelectSmallerSyntaxNode", + "shift-alt-up": "editor::MoveLineUp", + "shift-alt-down": "editor::MoveLineDown", + "cmd-alt-l": "editor::Format", + "shift-f6": "editor::Rename", + "cmd-[": "pane::GoBack", + "cmd-]": "pane::GoForward", + "alt-f7": "editor::FindAllReferences", + "cmd-alt-f7": "editor::FindAllReferences", + "cmd-b": "editor::GoToDefinition", + "cmd-alt-b": "editor::GoToDefinitionSplit", + "cmd-shift-b": "editor::GoToTypeDefinition", + "cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit", + "alt-enter": "editor::ToggleCodeActions", + "f2": "editor::GoToDiagnostic", + "cmd-f2": "editor::GoToPrevDiagnostic", + "ctrl-alt-shift-down": "editor::GoToHunk", + "ctrl-alt-shift-up": "editor::GoToPrevHunk", + "cmd-home": "editor::MoveToBeginning", + "cmd-end": "editor::MoveToEnd", + "cmd-shift-home": "editor::SelectToBeginning", + "cmd-shift-end": "editor::SelectToEnd" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "cmd-f12": "outline::Toggle", + "cmd-7": "outline::Toggle", + "cmd-shift-o": "file_finder::Toggle", + "cmd-l": "go_to_line::Toggle" + } + }, + { + "context": "Workspace", + "bindings": { + "cmd-shift-o": "file_finder::Toggle", + "cmd-shift-a": "command_palette::Toggle", + "cmd-alt-o": "project_symbols::Toggle", + "cmd-1": "workspace::ToggleLeftDock", + "cmd-6": "diagnostics::Deploy" + } + }, + { + "context": "Pane", + "bindings": { + "cmd-alt-left": "pane::GoBack", + "cmd-alt-right": "pane::GoForward" + } + }, + { + "context": "ProjectPanel", + "bindings": { + "enter": "project_panel::Open", + "shift-f6": "project_panel::Rename" + } + } +] diff --git a/assets/keymaps/storybook.json b/assets/keymaps/storybook.json new file mode 100644 index 0000000..5e37582 --- /dev/null +++ b/assets/keymaps/storybook.json @@ -0,0 +1,27 @@ +[ + // Standard macOS bindings + { + "bindings": { + "up": "menu::SelectPrev", + "pageup": "menu::SelectFirst", + "shift-pageup": "menu::SelectFirst", + "ctrl-p": "menu::SelectPrev", + "down": "menu::SelectNext", + "pagedown": "menu::SelectLast", + "shift-pagedown": "menu::SelectFirst", + "ctrl-n": "menu::SelectNext", + "cmd-up": "menu::SelectFirst", + "cmd-down": "menu::SelectLast", + "enter": "menu::Confirm", + "ctrl-enter": "menu::SecondaryConfirm", + "cmd-enter": "menu::SecondaryConfirm", + "escape": "menu::Cancel", + "ctrl-c": "menu::Cancel", + "cmd-q": "storybook::Quit", + "backspace": "editor::Backspace", + "delete": "editor::Delete", + "left": "editor::MoveLeft", + "right": "editor::MoveRight" + } + } +] diff --git a/assets/keymaps/sublime_text.json b/assets/keymaps/sublime_text.json new file mode 100644 index 0000000..dc1fc1c --- /dev/null +++ b/assets/keymaps/sublime_text.json @@ -0,0 +1,55 @@ +[ + { + "bindings": { + "cmd-shift-[": "pane::ActivatePrevItem", + "cmd-shift-]": "pane::ActivateNextItem", + "ctrl-pagedown": "pane::ActivatePrevItem", + "ctrl-pageup": "pane::ActivateNextItem", + "ctrl-shift-tab": "pane::ActivateNextItem", + "ctrl-tab": "pane::ActivatePrevItem", + "cmd-+": "zed::IncreaseBufferFontSize" + } + }, + { + "context": "Editor", + "bindings": { + "ctrl-shift-up": "editor::AddSelectionAbove", + "ctrl-shift-down": "editor::AddSelectionBelow", + "cmd-shift-space": "editor::SelectAll", + "ctrl-shift-m": "editor::SelectLargerSyntaxNode", + "cmd-shift-l": "editor::SplitSelectionIntoLines", + "cmd-shift-a": "editor::SelectLargerSyntaxNode", + "shift-f12": "editor::FindAllReferences", + "alt-cmd-down": "editor::GoToDefinition", + "ctrl-alt-cmd-down": "editor::GoToDefinitionSplit", + "alt-shift-cmd-down": "editor::FindAllReferences", + "ctrl-.": "editor::GoToHunk", + "ctrl-,": "editor::GoToPrevHunk", + "ctrl-backspace": "editor::DeleteToPreviousWordStart", + "ctrl-delete": "editor::DeleteToNextWordEnd" + } + }, + { + "context": "Editor && mode == full", + "bindings": { + "cmd-r": "outline::Toggle" + } + }, + { + "context": "Pane", + "bindings": { + "f4": "search::SelectNextMatch", + "shift-f4": "search::SelectPrevMatch" + } + }, + { + "context": "Workspace", + "bindings": { + "cmd-k cmd-b": "workspace::ToggleLeftDock", + "cmd-t": "file_finder::Toggle", + "shift-cmd-r": "project_symbols::Toggle", + // Currently busted: https://github.com/zed-industries/feedback/issues/898 + "ctrl-0": "project_panel::ToggleFocus" + } + } +] diff --git a/assets/keymaps/textmate.json b/assets/keymaps/textmate.json new file mode 100644 index 0000000..c3947dc --- /dev/null +++ b/assets/keymaps/textmate.json @@ -0,0 +1,96 @@ +[ + { + "bindings": { + "cmd-shift-o": "projects::OpenRecent", + "cmd-alt-tab": "project_panel::ToggleFocus" + } + }, + { + "context": "Editor", + "bindings": { + "cmd-l": "go_to_line::Toggle", + "ctrl-shift-d": "editor::DuplicateLineDown", + "cmd-b": "editor::GoToDefinition", + "cmd-j": "editor::ScrollCursorCenter", + "cmd-enter": "editor::NewlineBelow", + "cmd-alt-enter": "editor::NewLineAbove", + "cmd-shift-l": "editor::SelectLine", + "cmd-shift-t": "outline::Toggle", + "alt-backspace": "editor::DeleteToPreviousWordStart", + "alt-shift-backspace": "editor::DeleteToNextWordEnd", + "alt-delete": "editor::DeleteToNextWordEnd", + "alt-shift-delete": "editor::DeleteToNextWordEnd", + "ctrl-backspace": "editor::DeleteToPreviousSubwordStart", + "ctrl-delete": "editor::DeleteToNextSubwordEnd", + "alt-left": [ + "editor::MoveToPreviousWordStart", + { + "stop_at_soft_wraps": true + } + ], + "alt-right": [ + "editor::MoveToNextWordEnd", + { + "stop_at_soft_wraps": true + } + ], + "ctrl-left": "editor::MoveToPreviousSubwordStart", + "ctrl-right": "editor::MoveToNextSubwordEnd", + "cmd-shift-left": "editor::SelectToBeginningOfLine", + "cmd-shift-right": "editor::SelectToEndOfLine", + "alt-shift-left": [ + "editor::SelectToPreviousWordStart", + { + "stop_at_soft_wraps": true + } + ], + "alt-shift-right": [ + "editor::SelectToNextWordEnd", + { + "stop_at_soft_wraps": true + } + ], + "ctrl-shift-left": "editor::SelectToPreviousSubwordStart", + "ctrl-shift-right": "editor::SelectToNextSubwordEnd", + "ctrl-w": "editor::SelectNext", + "ctrl-u": "editor::ConvertToUpperCase", + "ctrl-shift-u": "editor::ConvertToLowerCase", + "ctrl-alt-u": "editor::ConvertToUpperCamelCase", + "ctrl-_": "editor::ConvertToSnakeCase" + } + }, + { + "context": "Editor && mode == full", + "bindings": {} + }, + { + "context": "BufferSearchBar", + "bindings": { + "ctrl-s": "search::SelectNextMatch", + "ctrl-shift-s": "search::SelectPrevMatch" + } + }, + { + "context": "Workspace", + "bindings": { + "cmd-alt-ctrl-d": "workspace::ToggleLeftDock", + "cmd-t": "file_finder::Toggle", + "cmd-shift-t": "project_symbols::Toggle" + } + }, + { + "context": "Pane", + "bindings": { + "alt-cmd-r": "search::ToggleRegex", + "ctrl-tab": "project_panel::ToggleFocus" + } + }, + { + "context": "ProjectPanel", + "bindings": {} + }, + { + "context": "Dock", + "bindings": {} + } +] diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json new file mode 100644 index 0000000..47bd8ba --- /dev/null +++ b/assets/keymaps/vim.json @@ -0,0 +1,653 @@ +[ + { + "context": "ProjectPanel || Editor", + "bindings": { + "ctrl-6": "pane::AlternateFile" + } + }, + { + "context": "Editor && VimControl && !VimWaiting && !menu", + "bindings": { + "i": [ + "vim::PushOperator", + { + "Object": { + "around": false + } + } + ], + "a": [ + "vim::PushOperator", + { + "Object": { + "around": true + } + } + ], + ":": "command_palette::Toggle", + "h": "vim::Left", + "left": "vim::Left", + "backspace": "vim::Backspace", + "j": "vim::Down", + "down": "vim::Down", + "enter": "vim::NextLineStart", + "tab": "vim::Tab", + "shift-tab": "vim::Tab", + "k": "vim::Up", + "up": "vim::Up", + "l": "vim::Right", + "right": "vim::Right", + "space": "vim::Space", + "$": "vim::EndOfLine", + "^": "vim::FirstNonWhitespace", + "_": "vim::StartOfLineDownward", + "g _": "vim::EndOfLineDownward", + "shift-g": "vim::EndOfDocument", + "{": "vim::StartOfParagraph", + "}": "vim::EndOfParagraph", + "|": "vim::GoToColumn", + + // Word motions + "w": "vim::NextWordStart", + "e": "vim::NextWordEnd", + "b": "vim::PreviousWordStart", + "g e": "vim::PreviousWordEnd", + + // Subword motions + // "w": "vim::NextSubwordStart", + // "b": "vim::PreviousSubwordStart", + // "e": "vim::NextSubwordEnd", + // "g e": "vim::PreviousSubwordEnd", + + "shift-w": [ + "vim::NextWordStart", + { + "ignorePunctuation": true + } + ], + "shift-e": [ + "vim::NextWordEnd", + { + "ignorePunctuation": true + } + ], + "shift-b": [ + "vim::PreviousWordStart", + { + "ignorePunctuation": true + } + ], + "g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }], + + "/": "vim::Search", + "?": [ + "vim::Search", + { + "backwards": true + } + ], + "*": "vim::MoveToNext", + "#": "vim::MoveToPrev", + "n": "vim::MoveToNextMatch", + "shift-n": "vim::MoveToPrevMatch", + "%": "vim::Matching", + "f": [ + "vim::PushOperator", + { + "FindForward": { + "before": false + } + } + ], + "t": [ + "vim::PushOperator", + { + "FindForward": { + "before": true + } + } + ], + "shift-f": [ + "vim::PushOperator", + { + "FindBackward": { + "after": false + } + } + ], + "shift-t": [ + "vim::PushOperator", + { + "FindBackward": { + "after": true + } + } + ], + "m": ["vim::PushOperator", "Mark"], + "'": ["vim::PushOperator", { "Jump": { "line": true } }], + "`": ["vim::PushOperator", { "Jump": { "line": false } }], + ";": "vim::RepeatFind", + ",": "vim::RepeatFindReversed", + "ctrl-o": "pane::GoBack", + "ctrl-i": "pane::GoForward", + "ctrl-]": "editor::GoToDefinition", + "escape": ["vim::SwitchMode", "Normal"], + "ctrl-[": ["vim::SwitchMode", "Normal"], + "v": "vim::ToggleVisual", + "shift-v": "vim::ToggleVisualLine", + "ctrl-v": "vim::ToggleVisualBlock", + "ctrl-q": "vim::ToggleVisualBlock", + "shift-k": "editor::Hover", + "shift-r": "vim::ToggleReplace", + "0": "vim::StartOfLine", // When no number operator present, use start of line motion + "ctrl-f": "vim::PageDown", + "pagedown": "vim::PageDown", + "ctrl-b": "vim::PageUp", + "pageup": "vim::PageUp", + "ctrl-d": "vim::ScrollDown", + "ctrl-u": "vim::ScrollUp", + "ctrl-e": "vim::LineDown", + "ctrl-y": "vim::LineUp", + // "g" commands + "g g": "vim::StartOfDocument", + "g h": "editor::Hover", + "g t": "pane::ActivateNextItem", + "g shift-t": "pane::ActivatePrevItem", + "g d": "editor::GoToDefinition", + "g shift-d": "editor::GoToTypeDefinition", + "g x": "editor::OpenUrl", + "g n": "vim::SelectNextMatch", + "g shift-n": "vim::SelectPreviousMatch", + "g l": "vim::SelectNext", + "g shift-l": "vim::SelectPrevious", + "g >": [ + "editor::SelectNext", + { + "replace_newest": true + } + ], + "g <": [ + "editor::SelectPrevious", + { + "replace_newest": true + } + ], + "g a": "editor::SelectAllMatches", + "g s": "outline::Toggle", + "g shift-s": "project_symbols::Toggle", + "g .": "editor::ToggleCodeActions", // zed specific + "g shift-a": "editor::FindAllReferences", // zed specific + "g space": "editor::OpenExcerpts", // zed specific + "g *": [ + "vim::MoveToNext", + { + "partialWord": true + } + ], + "g #": [ + "vim::MoveToPrev", + { + "partialWord": true + } + ], + "g j": [ + "vim::Down", + { + "displayLines": true + } + ], + "g down": [ + "vim::Down", + { + "displayLines": true + } + ], + "g k": [ + "vim::Up", + { + "displayLines": true + } + ], + "g up": [ + "vim::Up", + { + "displayLines": true + } + ], + "g $": [ + "vim::EndOfLine", + { + "displayLines": true + } + ], + "g end": [ + "vim::EndOfLine", + { + "displayLines": true + } + ], + "g 0": [ + "vim::StartOfLine", + { + "displayLines": true + } + ], + "g home": [ + "vim::StartOfLine", + { + "displayLines": true + } + ], + "g ^": [ + "vim::FirstNonWhitespace", + { + "displayLines": true + } + ], + "g ]": "editor::GoToDiagnostic", + "g [": "editor::GoToPrevDiagnostic", + "g i": ["workspace::SendKeystrokes", "` ^ i"], + "g ,": "vim::ChangeListNewer", + "g ;": "vim::ChangeListOlder", + "shift-h": "vim::WindowTop", + "shift-m": "vim::WindowMiddle", + "shift-l": "vim::WindowBottom", + // z commands + "z t": "editor::ScrollCursorTop", + "z z": "editor::ScrollCursorCenter", + "z .": ["workspace::SendKeystrokes", "z z ^"], + "z b": "editor::ScrollCursorBottom", + "z c": "editor::Fold", + "z o": "editor::UnfoldLines", + "z f": "editor::FoldSelectedRanges", + "shift-z shift-q": [ + "pane::CloseActiveItem", + { + "saveIntent": "skip" + } + ], + "shift-z shift-z": [ + "pane::CloseActiveItem", + { + "saveIntent": "saveAll" + } + ], + // Count support + "1": ["vim::Number", 1], + "2": ["vim::Number", 2], + "3": ["vim::Number", 3], + "4": ["vim::Number", 4], + "5": ["vim::Number", 5], + "6": ["vim::Number", 6], + "7": ["vim::Number", 7], + "8": ["vim::Number", 8], + "9": ["vim::Number", 9], + // window related commands (ctrl-w X) + "ctrl-w left": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w right": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w up": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w down": ["workspace::ActivatePaneInDirection", "Down"], + "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"], + "ctrl-w ctrl-h": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w ctrl-l": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w ctrl-k": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w ctrl-j": ["workspace::ActivatePaneInDirection", "Down"], + "ctrl-w shift-left": ["workspace::SwapPaneInDirection", "Left"], + "ctrl-w shift-right": ["workspace::SwapPaneInDirection", "Right"], + "ctrl-w shift-up": ["workspace::SwapPaneInDirection", "Up"], + "ctrl-w shift-down": ["workspace::SwapPaneInDirection", "Down"], + "ctrl-w shift-h": ["workspace::SwapPaneInDirection", "Left"], + "ctrl-w shift-l": ["workspace::SwapPaneInDirection", "Right"], + "ctrl-w shift-k": ["workspace::SwapPaneInDirection", "Up"], + "ctrl-w shift-j": ["workspace::SwapPaneInDirection", "Down"], + "ctrl-w g t": "pane::ActivateNextItem", + "ctrl-w ctrl-g t": "pane::ActivateNextItem", + "ctrl-w g shift-t": "pane::ActivatePrevItem", + "ctrl-w ctrl-g shift-t": "pane::ActivatePrevItem", + "ctrl-w w": "workspace::ActivateNextPane", + "ctrl-w ctrl-w": "workspace::ActivateNextPane", + "ctrl-w p": "workspace::ActivatePreviousPane", + "ctrl-w ctrl-p": "workspace::ActivatePreviousPane", + "ctrl-w shift-w": "workspace::ActivatePreviousPane", + "ctrl-w ctrl-shift-w": "workspace::ActivatePreviousPane", + "ctrl-w v": "pane::SplitLeft", + "ctrl-w ctrl-v": "pane::SplitLeft", + "ctrl-w s": "pane::SplitUp", + "ctrl-w shift-s": "pane::SplitUp", + "ctrl-w ctrl-s": "pane::SplitUp", + "ctrl-w c": "pane::CloseAllItems", + "ctrl-w ctrl-c": "pane::CloseAllItems", + "ctrl-w q": "pane::CloseAllItems", + "ctrl-w ctrl-q": "pane::CloseAllItems", + "ctrl-w o": "workspace::CloseInactiveTabsAndPanes", + "ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes", + "ctrl-w n": ["workspace::NewFileInDirection", "Up"], + "ctrl-w ctrl-n": ["workspace::NewFileInDirection", "Up"], + + "ctrl-w d": "editor::GoToDefinitionSplit", + "ctrl-w g d": "editor::GoToDefinitionSplit", + "ctrl-w shift-d": "editor::GoToTypeDefinitionSplit", + "ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit", + "ctrl-w space": "editor::OpenExcerptsSplit", + "ctrl-w g space": "editor::OpenExcerptsSplit", + "-": "pane::RevealInProjectPanel" + } + }, + { + // escape is in its own section so that it cancels a pending count. + "context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting", + "bindings": { + "escape": "editor::Cancel", + "ctrl-[": "editor::Cancel" + } + }, + { + "context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting", + "bindings": { + ".": "vim::Repeat", + "c": ["vim::PushOperator", "Change"], + "shift-c": "vim::ChangeToEndOfLine", + "d": ["vim::PushOperator", "Delete"], + "shift-d": "vim::DeleteToEndOfLine", + "shift-j": "vim::JoinLines", + "y": ["vim::PushOperator", "Yank"], + "shift-y": "vim::YankLine", + "i": "vim::InsertBefore", + "shift-i": "vim::InsertFirstNonWhitespace", + "a": "vim::InsertAfter", + "shift-a": "vim::InsertEndOfLine", + "x": "vim::DeleteRight", + "shift-x": "vim::DeleteLeft", + "o": "vim::InsertLineBelow", + "shift-o": "vim::InsertLineAbove", + "~": "vim::ChangeCase", + "ctrl-a": "vim::Increment", + "ctrl-x": "vim::Decrement", + "p": "vim::Paste", + "shift-p": [ + "vim::Paste", + { + "before": true + } + ], + "u": "editor::Undo", + "ctrl-r": "editor::Redo", + "r": ["vim::PushOperator", "Replace"], + "s": "vim::Substitute", + "shift-s": "vim::SubstituteLine", + "> >": "vim::Indent", + "< <": "vim::Outdent", + "ctrl-pagedown": "pane::ActivateNextItem", + "ctrl-pageup": "pane::ActivatePrevItem", + // tree-sitter related commands + "[ x": "editor::SelectLargerSyntaxNode", + "] x": "editor::SelectSmallerSyntaxNode" + } + }, + { + "context": "Editor && vim_mode == visual && vim_operator == none && !VimWaiting", + "bindings": { + // tree-sitter related commands + "[ x": "editor::SelectLargerSyntaxNode", + "] x": "editor::SelectSmallerSyntaxNode" + } + }, + { + "context": "Editor && VimCount", + "bindings": { + "0": ["vim::Number", 0] + } + }, + { + "context": "Editor && vim_operator == c", + "bindings": { + "c": "vim::CurrentLine", + "d": "editor::Rename" // zed specific + } + }, + { + "context": "Editor && vim_mode == normal && vim_operator == c", + "bindings": { + "s": [ + "vim::PushOperator", + { + "ChangeSurrounds": {} + } + ] + } + }, + { + "context": "Editor && vim_operator == d", + "bindings": { + "d": "vim::CurrentLine" + } + }, + { + "context": "Editor && vim_mode == normal && vim_operator == d", + "bindings": { + "s": ["vim::PushOperator", "DeleteSurrounds"] + } + }, + { + "context": "Editor && vim_operator == y", + "bindings": { + "y": "vim::CurrentLine" + } + }, + { + "context": "Editor && vim_mode == normal && vim_operator == y", + "bindings": { + "s": [ + "vim::PushOperator", + { + "AddSurrounds": {} + } + ] + } + }, + { + "context": "Editor && vim_operator == ys", + "bindings": { + "s": "vim::CurrentLine" + } + }, + { + "context": "Editor && VimObject", + "bindings": { + "w": "vim::Word", + "shift-w": [ + "vim::Word", + { + "ignorePunctuation": true + } + ], + "t": "vim::Tag", + "s": "vim::Sentence", + "p": "vim::Paragraph", + "'": "vim::Quotes", + "`": "vim::BackQuotes", + "\"": "vim::DoubleQuotes", + "|": "vim::VerticalBars", + "(": "vim::Parentheses", + ")": "vim::Parentheses", + "b": "vim::Parentheses", + "[": "vim::SquareBrackets", + "]": "vim::SquareBrackets", + "{": "vim::CurlyBrackets", + "}": "vim::CurlyBrackets", + "shift-b": "vim::CurlyBrackets", + "<": "vim::AngleBrackets", + ">": "vim::AngleBrackets", + "a": "vim::Argument" + } + }, + { + "context": "Editor && vim_mode == visual && !VimWaiting && !VimObject", + "bindings": { + "u": "vim::ConvertToLowerCase", + "U": "vim::ConvertToUpperCase", + "o": "vim::OtherEnd", + "shift-o": "vim::OtherEnd", + "d": "vim::VisualDelete", + "x": "vim::VisualDelete", + "shift-d": "vim::VisualDelete", + "shift-x": "vim::VisualDelete", + "y": "vim::VisualYank", + "shift-y": "vim::VisualYank", + "p": "vim::Paste", + "shift-p": [ + "vim::Paste", + { + "preserveClipboard": true + } + ], + "s": "vim::Substitute", + "shift-s": "vim::SubstituteLine", + "shift-r": "vim::SubstituteLine", + "c": "vim::Substitute", + "~": "vim::ChangeCase", + "*": [ + "vim::MoveToNext", + { + "partialWord": true + } + ], + "#": [ + "vim::MoveToPrev", + { + "partialWord": true + } + ], + "ctrl-a": "vim::Increment", + "ctrl-x": "vim::Decrement", + "g ctrl-a": [ + "vim::Increment", + { + "step": true + } + ], + "g ctrl-x": [ + "vim::Decrement", + { + "step": true + } + ], + "shift-i": "vim::InsertBefore", + "shift-a": "vim::InsertAfter", + "shift-j": "vim::JoinLines", + "r": ["vim::PushOperator", "Replace"], + "ctrl-c": ["vim::SwitchMode", "Normal"], + "escape": ["vim::SwitchMode", "Normal"], + "ctrl-[": ["vim::SwitchMode", "Normal"], + ">": "vim::Indent", + "<": "vim::Outdent", + "i": [ + "vim::PushOperator", + { + "Object": { + "around": false + } + } + ], + "a": [ + "vim::PushOperator", + { + "Object": { + "around": true + } + } + ] + } + }, + { + "context": "Editor && vim_mode == normal", + "bindings": { + "g c c": "editor::ToggleComments" + } + }, + { + "context": "Editor && vim_mode == visual", + "bindings": { + "g c": "editor::ToggleComments" + } + }, + { + "context": "Editor && vim_mode == insert", + "bindings": { + "escape": "vim::NormalBefore", + "ctrl-c": "vim::NormalBefore", + "ctrl-[": "vim::NormalBefore", + "ctrl-x ctrl-o": "editor::ShowCompletions", + "ctrl-x ctrl-a": "assistant::InlineAssist", // zed specific + "ctrl-x ctrl-c": "editor::ShowInlineCompletion", // zed specific + "ctrl-x ctrl-l": "editor::ToggleCodeActions", // zed specific + "ctrl-x ctrl-z": "editor::Cancel", + "ctrl-w": "editor::DeleteToPreviousWordStart", + "ctrl-u": "editor::DeleteToBeginningOfLine", + "ctrl-t": "vim::Indent", + "ctrl-d": "vim::Outdent", + "ctrl-r \"": "editor::Paste", + "ctrl-r +": "editor::Paste" + } + }, + { + "context": "Editor && vim_mode == replace", + "bindings": { + "escape": "vim::NormalBefore", + "ctrl-c": "vim::NormalBefore", + "ctrl-[": "vim::NormalBefore", + "backspace": "vim::UndoReplace" + } + }, + { + "context": "Editor && VimWaiting", + "bindings": { + "tab": "vim::Tab", + "enter": "vim::Enter", + "escape": ["vim::SwitchMode", "Normal"], + "ctrl-[": ["vim::SwitchMode", "Normal"] + } + }, + { + "context": "BufferSearchBar && !in_replace", + "bindings": { + "enter": "vim::SearchSubmit", + "escape": "buffer_search::Dismiss" + } + }, + { + "context": "EmptyPane || SharedScreen", + "bindings": { + ":": "command_palette::Toggle" + } + }, + { + // netrw compatibility + "context": "ProjectPanel && not_editing", + "bindings": { + ":": "command_palette::Toggle", + "%": "project_panel::NewFile", + "/": "project_panel::NewSearchInDirectory", + "d": "project_panel::NewDirectory", + "enter": "project_panel::OpenPermanent", + "escape": "project_panel::ToggleFocus", + "h": "project_panel::CollapseSelectedEntry", + "j": "menu::SelectNext", + "k": "menu::SelectPrev", + "l": "project_panel::ExpandSelectedEntry", + "o": "project_panel::OpenPermanent", + "shift-d": "project_panel::Delete", + "shift-r": "project_panel::Rename", + "t": "project_panel::OpenPermanent", + "v": "project_panel::OpenPermanent", + "p": "project_panel::Open", + "x": "project_panel::RevealInFinder", + "shift-g": "menu::SelectLast", + "g g": "menu::SelectFirst", + "-": "project_panel::SelectParent" + } + } +] diff --git a/assets/settings/default.json b/assets/settings/default.json new file mode 100644 index 0000000..e6a265b --- /dev/null +++ b/assets/settings/default.json @@ -0,0 +1,842 @@ +{ + // The name of the Zed theme to use for the UI. + // + // The theme can also be set to follow system preferences: + // + // "theme": { + // "mode": "system", + // "light": "One Light", + // "dark": "One Dark" + // } + // + // Where `mode` is one of: + // - "system": Use the theme that corresponds to the system's appearance + // - "light": Use the theme indicated by the "light" field + // - "dark": Use the theme indicated by the "dark" field + "theme": "One Dark", + // The name of a base set of key bindings to use. + // This setting can take four values, each named after another + // text editor: + // + // 1. "VSCode" + // 2. "JetBrains" + // 3. "SublimeText" + // 4. "Atom" + "base_keymap": "VSCode", + // Features that can be globally enabled or disabled + "features": { + // Which inline completion provider to use. + "inline_completion_provider": "copilot" + }, + // The name of a font to use for rendering text in the editor + "buffer_font_family": "Zed Mono", + // The OpenType features to enable for text in the editor. + "buffer_font_features": { + // Disable ligatures: + // "calt": false + }, + // The default font size for text in the editor + "buffer_font_size": 15, + // Set the buffer's line height. + // May take 3 values: + // 1. Use a line height that's comfortable for reading (1.618) + // "line_height": "comfortable" + // 2. Use a standard line height, (1.3) + // "line_height": "standard", + // 3. Use a custom line height + // "line_height": { + // "custom": 2 + // }, + "buffer_line_height": "comfortable", + // The name of a font to use for rendering text in the UI + "ui_font_family": ".SystemUIFont", + // The OpenType features to enable for text in the UI + "ui_font_features": { + // Disable ligatures: + "calt": false + }, + // The default font size for text in the UI + "ui_font_size": 16, + // The factor to grow the active pane by. Defaults to 1.0 + // which gives the same size as all other panes. + "active_pane_magnification": 1.0, + // Centered layout related settings. + "centered_layout": { + // The relative width of the left padding of the central pane from the + // workspace when the centered layout is used. + "left_padding": 0.2, + // The relative width of the right padding of the central pane from the + // workspace when the centered layout is used. + "right_padding": 0.2 + }, + // The key to use for adding multiple cursors + // Currently "alt" or "cmd_or_ctrl" (also aliased as + // "cmd" and "ctrl") are supported. + "multi_cursor_modifier": "alt", + // Whether to enable vim modes and key bindings. + "vim_mode": false, + // Whether to show the informational hover box when moving the mouse + // over symbols in the editor. + "hover_popover_enabled": true, + // Whether to confirm before quitting Zed. + "confirm_quit": false, + // Whether to restore last closed project when fresh Zed instance is opened. + "restore_on_startup": "last_workspace", + // Size of the drop target in the editor. + "drop_target_size": 0.2, + // Whether the window should be closed when using 'close active item' on a window with no tabs. + // May take 3 values: + // 1. Use the current platform's convention + // "when_closing_with_no_tabs": "platform_default" + // 2. Always close the window: + // "when_closing_with_no_tabs": "close_window", + // 3. Never close the window + // "when_closing_with_no_tabs": "keep_window_open", + "when_closing_with_no_tabs": "platform_default", + // Whether the cursor blinks in the editor. + "cursor_blink": true, + // How to highlight the current line in the editor. + // + // 1. Don't highlight the current line: + // "none" + // 2. Highlight the gutter area: + // "gutter" + // 3. Highlight the editor area: + // "line" + // 4. Highlight the full line (default): + // "all" + "current_line_highlight": "all", + // Whether to pop the completions menu while typing in an editor without + // explicitly requesting it. + "show_completions_on_input": true, + // Whether to display inline and alongside documentation for items in the + // completions menu + "show_completion_documentation": true, + // The debounce delay before re-querying the language server for completion + // documentation when not included in original completion list. + "completion_documentation_secondary_query_debounce": 300, + // Whether to show wrap guides in the editor. Setting this to true will + // show a guide at the 'preferred_line_length' value if 'soft_wrap' is set to + // 'preferred_line_length', and will show any additional guides as specified + // by the 'wrap_guides' setting. + "show_wrap_guides": true, + // Character counts at which to show wrap guides in the editor. + "wrap_guides": [], + // Hide the values of in variables from visual display in private files + "redact_private_values": false, + // Globs to match against file paths to determine if a file is private. + "private_files": [ + "**/.env*", + "**/*.pem", + "**/*.key", + "**/*.cert", + "**/*.crt", + "**/secrets.yml" + ], + // Whether to use additional LSP queries to format (and amend) the code after + // every "trigger" symbol input, defined by LSP server capabilities. + "use_on_type_format": true, + // Whether to automatically add matching closing characters when typing + // opening parenthesis, bracket, brace, single or double quote characters. + // For example, when you type (, Zed will add a closing ) at the correct position. + "use_autoclose": true, + // Controls how the editor handles the autoclosed characters. + // When set to `false`(default), skipping over and auto-removing of the closing characters + // happen only for auto-inserted characters. + // Otherwise(when `true`), the closing characters are always skipped over and auto-removed + // no matter how they were inserted. + "always_treat_brackets_as_autoclosed": false, + // Controls whether copilot provides suggestion immediately + // or waits for a `copilot::Toggle` + "show_copilot_suggestions": true, + // Whether to show tabs and spaces in the editor. + // This setting can take three values: + // + // 1. Draw tabs and spaces only for the selected text (default): + // "selection" + // 2. Do not draw any tabs or spaces: + // "none" + // 3. Draw all invisible symbols: + // "all" + "show_whitespaces": "selection", + // Settings related to calls in Zed + "calls": { + // Join calls with the microphone live by default + "mute_on_join": false, + // Share your project when you are the first to join a channel + "share_on_join": true + }, + // Toolbar related settings + "toolbar": { + // Whether to show breadcrumbs. + "breadcrumbs": true, + // Whether to show quick action buttons. + "quick_actions": true + }, + // Scrollbar related settings + "scrollbar": { + // When to show the scrollbar in the editor. + // This setting can take four values: + // + // 1. Show the scrollbar if there's important information or + // follow the system's configured behavior (default): + // "auto" + // 2. Match the system's configured behavior: + // "system" + // 3. Always show the scrollbar: + // "always" + // 4. Never show the scrollbar: + // "never" + "show": "auto", + // Whether to show cursor positions in the scrollbar. + "cursors": true, + // Whether to show git diff indicators in the scrollbar. + "git_diff": true, + // Whether to show buffer search results in the scrollbar. + "search_results": true, + // Whether to show selected symbol occurrences in the scrollbar. + "selected_symbol": true, + // Whether to show diagnostic indicators in the scrollbar. + "diagnostics": true + }, + // What to do when multibuffer is double clicked in some of its excerpts + // (parts of singleton buffers). + // May take 2 values: + // 1. Behave as a regular buffer and select the whole word (default). + // "double_click_in_multibuffer": "select" + // 2. Open the excerpt clicked as a new buffer in the new tab. + // "double_click_in_multibuffer": "open", + // For the case of "open", regular selection behavior can be achieved by holding `alt` when double clicking. + "double_click_in_multibuffer": "select", + "gutter": { + // Whether to show line numbers in the gutter. + "line_numbers": true, + // Whether to show code action buttons in the gutter. + "code_actions": true, + // Whether to show fold buttons in the gutter. + "folds": true + }, + // The number of lines to keep above/below the cursor when scrolling. + "vertical_scroll_margin": 3, + // Scroll sensitivity multiplier. This multiplier is applied + // to both the horizontal and vertical delta values while scrolling. + "scroll_sensitivity": 1.0, + "relative_line_numbers": false, + // When to populate a new search's query based on the text under the cursor. + // This setting can take the following three values: + // + // 1. Always populate the search query with the word under the cursor (default). + // "always" + // 2. Only populate the search query when there is text selected + // "selection" + // 3. Never populate the search query + // "never" + "seed_search_query_from_cursor": "always", + // Inlay hint related settings + "inlay_hints": { + // Global switch to toggle hints on and off, switched off by default. + "enabled": false, + // Toggle certain types of hints on and off, all switched on by default. + "show_type_hints": true, + "show_parameter_hints": true, + // Corresponds to null/None LSP hint type value. + "show_other_hints": true, + // Time to wait after editing the buffer, before requesting the hints, + // set to 0 to disable debouncing. + "edit_debounce_ms": 700, + // Time to wait after scrolling the buffer, before requesting the hints, + // set to 0 to disable debouncing. + "scroll_debounce_ms": 50 + }, + "project_panel": { + // Whether to show the project panel button in the status bar + "button": true, + // Default width of the project panel. + "default_width": 240, + // Where to dock the project panel. Can be 'left' or 'right'. + "dock": "left", + // Whether to show file icons in the project panel. + "file_icons": true, + // Whether to show folder icons or chevrons for directories in the project panel. + "folder_icons": true, + // Whether to show the git status in the project panel. + "git_status": true, + // Amount of indentation for nested items. + "indent_size": 20, + // Whether to reveal it in the project panel automatically, + // when a corresponding project entry becomes active. + // Gitignored entries are never auto revealed. + "auto_reveal_entries": true, + /// Whether to fold directories automatically + /// when a directory has only one directory inside. + "auto_fold_dirs": false + }, + "collaboration_panel": { + // Whether to show the collaboration panel button in the status bar. + "button": true, + // Where to dock the collaboration panel. Can be 'left' or 'right'. + "dock": "left", + // Default width of the collaboration panel. + "default_width": 240 + }, + "chat_panel": { + // Whether to show the chat panel button in the status bar. + "button": true, + // Where to the chat panel. Can be 'left' or 'right'. + "dock": "right", + // Default width of the chat panel. + "default_width": 240 + }, + "message_editor": { + // Whether to automatically replace emoji shortcodes with emoji characters. + // For example: typing `:wave:` gets replaced with `👋`. + "auto_replace_emoji_shortcode": true + }, + "notification_panel": { + // Whether to show the notification panel button in the status bar. + "button": true, + // Where to dock the notification panel. Can be 'left' or 'right'. + "dock": "right", + // Default width of the notification panel. + "default_width": 380 + }, + "assistant": { + // Version of this setting. + "version": "1", + // Whether the assistant is enabled. + "enabled": true, + // Whether to show the assistant panel button in the status bar. + "button": true, + // Where to dock the assistant panel. Can be 'left', 'right' or 'bottom'. + "dock": "right", + // Default width when the assistant is docked to the left or right. + "default_width": 640, + // Default height when the assistant is docked to the bottom. + "default_height": 320, + // AI provider. + "provider": { + "name": "openai", + // The default model to use when starting new conversations. This + // setting can take three values: + // + // 1. "gpt-3.5-turbo" + // 2. "gpt-4" + // 3. "gpt-4-turbo-preview" + // 4. "gpt-4o" + "default_model": "gpt-4o" + } + }, + // Whether the screen sharing icon is shown in the os status bar. + "show_call_status_icon": true, + // Whether to use language servers to provide code intelligence. + "enable_language_server": true, + // The list of language servers to use (or disable) for all languages. + // + // This is typically customized on a per-language basis. + "language_servers": ["..."], + // When to automatically save edited buffers. This setting can + // take four values. + // + // 1. Never automatically save: + // "autosave": "off", + // 2. Save when changing focus away from the Zed window: + // "autosave": "on_window_change", + // 3. Save when changing focus away from a specific buffer: + // "autosave": "on_focus_change", + // 4. Save when idle for a certain amount of time: + // "autosave": { "after_delay": {"milliseconds": 500} }, + "autosave": "off", + // Settings related to the editor's tab bar. + "tab_bar": { + // Whether or not to show the tab bar in the editor + "show": true, + // Whether or not to show the navigation history buttons. + "show_nav_history_buttons": true + }, + // Settings related to the editor's tabs + "tabs": { + // Show git status colors in the editor tabs. + "git_status": false, + // Position of the close button on the editor tabs. + "close_position": "right" + }, + // Settings related to preview tabs. + "preview_tabs": { + // Whether preview tabs should be enabled. + // Preview tabs allow you to open files in preview mode, where they close automatically + // when you switch to another file unless you explicitly pin them. + // This is useful for quickly viewing files without cluttering your workspace. + "enabled": true, + // Whether to open tabs in preview mode when selected from the file finder. + "enable_preview_from_file_finder": false, + // Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. + "enable_preview_from_code_navigation": false + }, + // Whether or not to remove any trailing whitespace from lines of a buffer + // before saving it. + "remove_trailing_whitespace_on_save": true, + // Whether to start a new line with a comment when a previous line is a comment as well. + "extend_comment_on_newline": true, + // Whether or not to ensure there's a single newline at the end of a buffer + // when saving it. + "ensure_final_newline_on_save": true, + // Whether or not to perform a buffer format before saving + // + // Keep in mind, if the autosave with delay is enabled, format_on_save will be ignored + "format_on_save": "on", + // How to perform a buffer format. This setting can take 4 values: + // + // 1. Format code using the current language server: + // "formatter": "language_server" + // 2. Format code using an external command: + // "formatter": { + // "external": { + // "command": "prettier", + // "arguments": ["--stdin-filepath", "{buffer_path}"] + // } + // } + // 3. Format code using Zed's Prettier integration: + // "formatter": "prettier" + // 4. Default. Format files using Zed's Prettier integration (if applicable), + // or falling back to formatting via language server: + // "formatter": "auto" + "formatter": "auto", + // How to soft-wrap long lines of text. This setting can take + // three values: + // + // 1. Do not soft wrap. + // "soft_wrap": "none", + // 2. Prefer a single line generally, unless an overly long line is encountered. + // "soft_wrap": "prefer_line", + // 3. Soft wrap lines that overflow the editor: + // "soft_wrap": "editor_width", + // 4. Soft wrap lines at the preferred line length + // "soft_wrap": "preferred_line_length", + "soft_wrap": "prefer_line", + // The column at which to soft-wrap lines, for buffers where soft-wrap + // is enabled. + "preferred_line_length": 80, + // Whether to indent lines using tab characters, as opposed to multiple + // spaces. + "hard_tabs": false, + // How many columns a tab should occupy. + "tab_size": 4, + // Control what info is collected by Zed. + "telemetry": { + // Send debug info like crash reports. + "diagnostics": true, + // Send anonymized usage data like what languages you're using Zed with. + "metrics": true + }, + // Automatically update Zed + "auto_update": true, + // Diagnostics configuration. + "diagnostics": { + // Whether to show warnings or not by default. + "include_warnings": true + }, + // Add files or globs of files that will be excluded by Zed entirely: + // they will be skipped during FS scan(s), file tree and file search + // will lack the corresponding file entries. + "file_scan_exclusions": [ + "**/.git", + "**/.svn", + "**/.hg", + "**/CVS", + "**/.DS_Store", + "**/Thumbs.db", + "**/.classpath", + "**/.settings" + ], + // Git gutter behavior configuration. + "git": { + // Control whether the git gutter is shown. May take 2 values: + // 1. Show the gutter + // "git_gutter": "tracked_files" + // 2. Hide the gutter + // "git_gutter": "hide" + "git_gutter": "tracked_files", + // Control whether the git blame information is shown inline, + // in the currently focused line. + "inline_blame": { + "enabled": true + // Sets a delay after which the inline blame information is shown. + // Delay is restarted with every cursor movement. + // "delay_ms": 600 + } + }, + "copilot": { + // The set of glob patterns for which copilot should be disabled + // in any matching file. + "disabled_globs": [".env"] + }, + // Settings specific to journaling + "journal": { + // The path of the directory where journal entries are stored + "path": "~", + // What format to display the hours in + // May take 2 values: + // 1. hour12 + // 2. hour24 + "hour_format": "hour12" + }, + // Settings specific to the terminal + "terminal": { + // What shell to use when opening a terminal. May take 3 values: + // 1. Use the system's default terminal configuration in /etc/passwd + // "shell": "system" + // 2. A program: + // "shell": { + // "program": "sh" + // } + // 3. A program with arguments: + // "shell": { + // "with_arguments": { + // "program": "/bin/bash", + // "arguments": ["--login"] + // } + // } + "shell": "system", + // Where to dock terminals panel. Can be `left`, `right`, `bottom`. + "dock": "bottom", + // Default width when the terminal is docked to the left or right. + "default_width": 640, + // Default height when the terminal is docked to the bottom. + "default_height": 320, + // What working directory to use when launching the terminal. + // May take 4 values: + // 1. Use the current file's project directory. Will Fallback to the + // first project directory strategy if unsuccessful + // "working_directory": "current_project_directory" + // 2. Use the first project in this workspace's directory + // "working_directory": "first_project_directory" + // 3. Always use this platform's home directory (if we can find it) + // "working_directory": "always_home" + // 4. Always use a specific directory. This value will be shell expanded. + // If this path is not a valid directory the terminal will default to + // this platform's home directory (if we can find it) + // "working_directory": { + // "always": { + // "directory": "~/zed/projects/" + // } + // } + "working_directory": "current_project_directory", + // Set the cursor blinking behavior in the terminal. + // May take 3 values: + // 1. Never blink the cursor, ignoring the terminal mode + // "blinking": "off", + // 2. Default the cursor blink to off, but allow the terminal to + // set blinking + // "blinking": "terminal_controlled", + // 3. Always blink the cursor, ignoring the terminal mode + // "blinking": "on", + "blinking": "terminal_controlled", + // Set whether Alternate Scroll mode (code: ?1007) is active by default. + // Alternate Scroll mode converts mouse scroll events into up / down key + // presses when in the alternate screen (e.g. when running applications + // like vim or less). The terminal can still set and unset this mode. + // May take 2 values: + // 1. Default alternate scroll mode to on + // "alternate_scroll": "on", + // 2. Default alternate scroll mode to off + // "alternate_scroll": "off", + "alternate_scroll": "off", + // Set whether the option key behaves as the meta key. + // May take 2 values: + // 1. Rely on default platform handling of option key, on macOS + // this means generating certain unicode characters + // "option_to_meta": false, + // 2. Make the option keys behave as a 'meta' key, e.g. for emacs + // "option_to_meta": true, + "option_as_meta": false, + // Whether or not selecting text in the terminal will automatically + // copy to the system clipboard. + "copy_on_select": false, + // Whether to show the terminal button in the status bar + "button": true, + // Any key-value pairs added to this list will be added to the terminal's + // environment. Use `:` to separate multiple values. + "env": { + // "KEY": "value1:value2" + }, + // Set the terminal's line height. + // May take 3 values: + // 1. Use a line height that's comfortable for reading, 1.618 + // "line_height": "comfortable" + // 2. Use a standard line height, 1.3. This option is useful for TUIs, + // particularly if they use box characters + // "line_height": "standard", + // 3. Use a custom line height. + // "line_height": { + // "custom": 2 + // }, + "line_height": "comfortable", + // Activate the python virtual environment, if one is found, in the + // terminal's working directory (as resolved by the working_directory + // setting). Set this to "off" to disable this behavior. + "detect_venv": { + "on": { + // Default directories to search for virtual environments, relative + // to the current working directory. We recommend overriding this + // in your project's settings, rather than globally. + "directories": [".env", "env", ".venv", "venv"], + // Can also be `csh`, `fish`, and `nushell` + "activate_script": "default" + } + }, + "toolbar": { + // Whether to display the terminal title in its toolbar. + "title": true + } + // Set the terminal's font size. If this option is not included, + // the terminal will default to matching the buffer's font size. + // "font_size": 15, + // Set the terminal's font family. If this option is not included, + // the terminal will default to matching the buffer's font family. + // "font_family": "Zed Mono", + // Sets the maximum number of lines in the terminal's scrollback buffer. + // Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling. + // Existing terminals will not pick up this change until they are recreated. + // "max_scroll_history_lines": 10000, + }, + "code_actions_on_format": {}, + // An object whose keys are language names, and whose values + // are arrays of filenames or extensions of files that should + // use those languages. + // + // For example, to treat files like `foo.notjs` as JavaScript, + // and `Embargo.lock` as TOML: + // + // { + // "JavaScript": ["notjs"], + // "TOML": ["Embargo.lock"] + // } + // + "file_types": {}, + // The extensions that Zed should automatically install on startup. + // + // If you don't want any of these extensions, add this field to your settings + // and change the value to `false`. + "auto_install_extensions": { + "html": true + }, + // Different settings for specific languages. + "languages": { + "Astro": { + "prettier": { + "allowed": true, + "plugins": ["prettier-plugin-astro"] + } + }, + "Blade": { + "prettier": { + "allowed": true + } + }, + "C": { + "format_on_save": "off" + }, + "C++": { + "format_on_save": "off" + }, + "CSS": { + "prettier": { + "allowed": true + } + }, + "Elixir": { + "language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."] + }, + "Gleam": { + "tab_size": 2 + }, + "Go": { + "code_actions_on_format": { + "source.organizeImports": true + } + }, + "GraphQL": { + "prettier": { + "allowed": true + } + }, + "HEEX": { + "language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."] + }, + "HTML": { + "prettier": { + "allowed": true + } + }, + "Java": { + "prettier": { + "allowed": true, + "plugins": ["prettier-plugin-java"] + } + }, + "JavaScript": { + "prettier": { + "allowed": true + } + }, + "JSON": { + "prettier": { + "allowed": true + } + }, + "Make": { + "hard_tabs": true + }, + "Markdown": { + "format_on_save": "off", + "prettier": { + "allowed": true + } + }, + "PHP": { + "prettier": { + "allowed": true, + "plugins": ["@prettier/plugin-php"] + } + }, + "Prisma": { + "tab_size": 2 + }, + "Ruby": { + "language_servers": ["solargraph", "!ruby-lsp", "..."] + }, + "SCSS": { + "prettier": { + "allowed": true + } + }, + "SQL": { + "prettier": { + "allowed": true, + "plugins": ["prettier-plugin-sql"] + } + }, + "Svelte": { + "prettier": { + "allowed": true, + "plugins": ["prettier-plugin-svelte"] + } + }, + "TSX": { + "prettier": { + "allowed": true + } + }, + "Twig": { + "prettier": { + "allowed": true + } + }, + "TypeScript": { + "prettier": { + "allowed": true + } + }, + "Vue.js": { + "prettier": { + "allowed": true + } + }, + "XML": { + "prettier": { + "allowed": true, + "plugins": ["@prettier/plugin-xml"] + } + }, + "YAML": { + "prettier": { + "allowed": true + } + } + }, + // Zed's Prettier integration settings. + // Allows to enable/disable formatting with Prettier + // and configure default Prettier, used when no project-level Prettier installation is found. + "prettier": { + // // Whether to consider prettier formatter or not when attempting to format a file. + // "allowed": false, + // + // // Use regular Prettier json configuration. + // // If Prettier is allowed, Zed will use this for its Prettier instance for any applicable file, if + // // the project has no other Prettier installed. + // "plugins": [], + // + // // Use regular Prettier json configuration. + // // If Prettier is allowed, Zed will use this for its Prettier instance for any applicable file, if + // // the project has no other Prettier installed. + // "trailingComma": "es5", + // "tabWidth": 4, + // "semi": false, + // "singleQuote": true + }, + // LSP Specific settings. + "lsp": { + // Specify the LSP name as a key here. + // "rust-analyzer": { + // // These initialization options are merged into Zed's defaults + // "initialization_options": { + // "check": { + // "command": "clippy" // rust-analyzer.check.command (default: "check") + // } + // } + // } + }, + // Vim settings + "vim": { + "use_system_clipboard": "always", + "use_multiline_find": false, + "use_smartcase_find": false + }, + // The server to connect to. If the environment variable + // ZED_SERVER_URL is set, it will override this setting. + "server_url": "https://zed.dev", + // Settings overrides to use when using Zed Preview. + // Mostly useful for developers who are managing multiple instances of Zed. + "preview": { + // "theme": "Andromeda" + }, + // Settings overrides to use when using Zed Nightly. + // Mostly useful for developers who are managing multiple instances of Zed. + "nightly": { + // "theme": "Andromeda" + }, + // Settings overrides to use when using Zed Stable. + // Mostly useful for developers who are managing multiple instances of Zed. + "stable": { + // "theme": "Andromeda" + }, + // Settings overrides to use when using Zed Dev. + // Mostly useful for developers who are managing multiple instances of Zed. + "dev": { + // "theme": "Andromeda" + }, + // Task-related settings. + "task": { + // Whether to show task status indicator in the status bar. Default: true + "show_status_indicator": true + }, + // Whether to show full labels in line indicator or short ones + // + // Values: + // - `short`: "2 s, 15 l, 32 c" + // - `long`: "2 selections, 15 lines, 32 characters" + // Default: long + "line_indicator_format": "long", + // Set a proxy to use. The proxy protocol is specified by the URI scheme. + // + // Supported URI scheme: `http`, `https`, `socks4`, `socks4a`, `socks5`, + // `socks5h`. `http` will be used when no scheme is specified. + // + // By default no proxy will be used, or Zed will try get proxy settings from + // environment variables. + // + // Examples: + // - "proxy" = "socks5://localhost:10808" + // - "proxy" = "http://127.0.0.1:10809" + "proxy": null +} diff --git a/assets/settings/initial_local_settings.json b/assets/settings/initial_local_settings.json new file mode 100644 index 0000000..2fc9a47 --- /dev/null +++ b/assets/settings/initial_local_settings.json @@ -0,0 +1,5 @@ +// Folder-specific settings +// +// For a full list of overridable settings, and general information on folder-specific settings, +// see the documentation: https://zed.dev/docs/configuring-zed#folder-specific-settings +{} diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json new file mode 100644 index 0000000..45c02eb --- /dev/null +++ b/assets/settings/initial_tasks.json @@ -0,0 +1,22 @@ +// Static tasks configuration. +// +// Example: +[ + { + "label": "Example task", + "command": "for i in {1..5}; do echo \"Hello $i/5\"; sleep 1; done", + //"args": [], + // Env overrides for the command, will be appended to the terminal's environment from the settings. + "env": { "foo": "bar" }, + // Current working directory to spawn the command into, defaults to current project root. + //"cwd": "/path/to/working/directory", + // Whether to use a new terminal tab or reuse the existing one to spawn the process, defaults to `false`. + "use_new_terminal": false, + // Whether to allow multiple instances of the same task to be run, or rather wait for the existing ones to finish, defaults to `false`. + "allow_concurrent_runs": false, + // What to do with the terminal pane and tab, after the command was started: + // * `always` — always show the terminal pane, add and focus the corresponding task's tab in it (default) + // * `never` — avoid changing current terminal pane focus, but still add/reuse the task's tab there + "reveal": "always" + } +] diff --git a/assets/settings/initial_user_settings.json b/assets/settings/initial_user_settings.json new file mode 100644 index 0000000..75d4a02 --- /dev/null +++ b/assets/settings/initial_user_settings.json @@ -0,0 +1,12 @@ +// Zed settings +// +// For information on how to configure Zed, see the Zed +// documentation: https://zed.dev/docs/configuring-zed +// +// To see all of Zed's default settings without changing your +// custom settings, run the `open default settings` command +// from the command palette or from `Zed` application menu. +{ + "ui_font_size": 16, + "buffer_font_size": 16 +} diff --git a/assets/sounds/joined_call.wav b/assets/sounds/joined_call.wav new file mode 100644 index 0000000..cf6e5ba Binary files /dev/null and b/assets/sounds/joined_call.wav differ diff --git a/assets/sounds/leave_call.wav b/assets/sounds/leave_call.wav new file mode 100644 index 0000000..478b282 Binary files /dev/null and b/assets/sounds/leave_call.wav differ diff --git a/assets/sounds/mute.wav b/assets/sounds/mute.wav new file mode 100644 index 0000000..69e8456 Binary files /dev/null and b/assets/sounds/mute.wav differ diff --git a/assets/sounds/start_screenshare.wav b/assets/sounds/start_screenshare.wav new file mode 100644 index 0000000..7b72a90 Binary files /dev/null and b/assets/sounds/start_screenshare.wav differ diff --git a/assets/sounds/stop_screenshare.wav b/assets/sounds/stop_screenshare.wav new file mode 100644 index 0000000..1fe13e2 Binary files /dev/null and b/assets/sounds/stop_screenshare.wav differ diff --git a/assets/sounds/unmute.wav b/assets/sounds/unmute.wav new file mode 100644 index 0000000..f8c90f6 Binary files /dev/null and b/assets/sounds/unmute.wav differ diff --git a/assets/themes/.gitkeep b/assets/themes/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/assets/themes/LICENSES b/assets/themes/LICENSES new file mode 100644 index 0000000..f5d9c04 --- /dev/null +++ b/assets/themes/LICENSES @@ -0,0 +1,1013 @@ +## [Andromeda](https://github.com/EliverLara/Andromeda) + +The MIT License (MIT) + +Copyright (c) 2017 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Cave Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Cave Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Dune Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Dune Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Estuary Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Estuary Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Forest Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Forest Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Heath Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Heath Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Lakeside Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Lakeside Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Plateau Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Plateau Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Savanna Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Savanna Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Seaside Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Seaside Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Sulphurpool Dark](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Atelier Sulphurpool Light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/cave/) + +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Ayu Dark](https://github.com/dempfi/ayu) + +The MIT License (MIT) + +Copyright (c) 2016 Ike Ku + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Ayu Light](https://github.com/dempfi/ayu) + +The MIT License (MIT) + +Copyright (c) 2016 Ike Ku + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Ayu Mirage](https://github.com/dempfi/ayu) + +The MIT License (MIT) + +Copyright (c) 2016 Ike Ku + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Gruvbox Dark](https://github.com/morhetz/gruvbox) + +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Gruvbox Dark Hard](https://github.com/morhetz/gruvbox) + +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Gruvbox Dark Soft](https://github.com/morhetz/gruvbox) + +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Gruvbox Light](https://github.com/morhetz/gruvbox) + +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Gruvbox Light Hard](https://github.com/morhetz/gruvbox) + +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Gruvbox Light Soft](https://github.com/morhetz/gruvbox) + +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [One Dark](https://github.com/atom/atom/tree/master/packages/one-dark-ui) + +The MIT License (MIT) + +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [One Light](https://github.com/atom/atom/tree/master/packages/one-light-ui) + +The MIT License (MIT) + +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Rosé Pine](https://github.com/edunfelt/base16-rose-pine-scheme) + +The MIT License (MIT) + +Copyright (c) 2021 Emilia Dunfelt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Rosé Pine Dawn](https://github.com/edunfelt/base16-rose-pine-scheme) + +The MIT License (MIT) + +Copyright (c) 2021 Emilia Dunfelt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Rosé Pine Moon](https://github.com/edunfelt/base16-rose-pine-scheme) + +The MIT License (MIT) + +Copyright (c) 2021 Emilia Dunfelt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Sandcastle](https://github.com/gessig/base16-sandcastle-scheme) + +The MIT License (MIT) + +Copyright (c) 2019 George Essig + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Solarized Dark](https://github.com/altercation/solarized) + +The MIT License (MIT) + +Copyright (c) 2011 Ethan Schoonover + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Solarized Light](https://github.com/altercation/solarized) + +The MIT License (MIT) + +Copyright (c) 2011 Ethan Schoonover + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** + +## [Summercamp](https://github.com/zoefiri/base16-sc) + +The MIT License (MIT) + +Copyright (c) 2019 Zoe FiriH + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +******************************************************************************** diff --git a/assets/themes/andromeda/LICENSE b/assets/themes/andromeda/LICENSE new file mode 100644 index 0000000..9422ada --- /dev/null +++ b/assets/themes/andromeda/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/andromeda/andromeda.json b/assets/themes/andromeda/andromeda.json new file mode 100644 index 0000000..389b026 --- /dev/null +++ b/assets/themes/andromeda/andromeda.json @@ -0,0 +1,375 @@ +{ + "name": "Andromeda", + "author": "Zed Industries", + "themes": [ + { + "name": "Andromeda", + "appearance": "dark", + "style": { + "border": "#2b2f38ff", + "border.variant": "#252931ff", + "border.focused": "#183934ff", + "border.selected": "#183934ff", + "border.transparent": "#00000000", + "border.disabled": "#292d37ff", + "elevated_surface.background": "#21242bff", + "surface.background": "#21242bff", + "background": "#262933ff", + "element.background": "#21242bff", + "element.hover": "#252931ff", + "element.active": "#2a2f39ff", + "element.selected": "#2a2f39ff", + "element.disabled": "#21242bff", + "drop_target.background": "#aca8ae80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#252931ff", + "ghost_element.active": "#2a2f39ff", + "ghost_element.selected": "#2a2f39ff", + "ghost_element.disabled": "#21242bff", + "text": "#f7f7f8ff", + "text.muted": "#aca8aeff", + "text.placeholder": "#6b6b73ff", + "text.disabled": "#6b6b73ff", + "text.accent": "#10a793ff", + "icon": "#f7f7f8ff", + "icon.muted": "#aca8aeff", + "icon.disabled": "#6b6b73ff", + "icon.placeholder": "#aca8aeff", + "icon.accent": "#10a793ff", + "status_bar.background": "#262933ff", + "title_bar.background": "#262933ff", + "toolbar.background": "#1e2025ff", + "tab_bar.background": "#21242bff", + "tab.inactive_background": "#21242bff", + "tab.active_background": "#1e2025ff", + "search.match_background": "#11a79366", + "panel.background": "#21242bff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f7f7f84c", + "scrollbar.thumb.hover_background": "#252931ff", + "scrollbar.thumb.border": "#252931ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#21232aff", + "editor.foreground": "#f7f7f8ff", + "editor.background": "#1e2025ff", + "editor.gutter.background": "#1e2025ff", + "editor.subheader.background": "#21242bff", + "editor.active_line.background": "#21242bbf", + "editor.highlighted_line.background": "#21242bff", + "editor.line_number": "#f7f7f859", + "editor.active_line_number": "#f7f7f8ff", + "editor.invisible": "#64646dff", + "editor.wrap_guide": "#f7f7f80d", + "editor.active_wrap_guide": "#f7f7f81a", + "editor.document_highlight.read_background": "#10a7931a", + "editor.document_highlight.write_background": "#64646d66", + "terminal.background": "#1e2025ff", + "terminal.foreground": "#f7f7f8ff", + "terminal.bright_foreground": "#f7f7f8ff", + "terminal.dim_foreground": "#1e2025ff", + "terminal.ansi.black": "#1e2025ff", + "terminal.ansi.bright_black": "#40434cff", + "terminal.ansi.dim_black": "#f7f7f8ff", + "terminal.ansi.red": "#f82871ff", + "terminal.ansi.bright_red": "#8e0f3aff", + "terminal.ansi.dim_red": "#ffa3b5ff", + "terminal.ansi.green": "#96df71ff", + "terminal.ansi.bright_green": "#457c38ff", + "terminal.ansi.dim_green": "#cef0b9ff", + "terminal.ansi.yellow": "#fee56cff", + "terminal.ansi.bright_yellow": "#958334ff", + "terminal.ansi.dim_yellow": "#fef1b7ff", + "terminal.ansi.blue": "#10a793ff", + "terminal.ansi.bright_blue": "#1a5148ff", + "terminal.ansi.dim_blue": "#9cd4c7ff", + "terminal.ansi.magenta": "#c74cecff", + "terminal.ansi.bright_magenta": "#682681ff", + "terminal.ansi.dim_magenta": "#e7abf7ff", + "terminal.ansi.cyan": "#08e7c5ff", + "terminal.ansi.bright_cyan": "#008169ff", + "terminal.ansi.dim_cyan": "#a9f4e1ff", + "terminal.ansi.white": "#f7f7f8ff", + "terminal.ansi.bright_white": "#f7f7f8ff", + "terminal.ansi.dim_white": "#87858cff", + "link_text.hover": "#10a793ff", + "conflict": "#fee56cff", + "conflict.background": "#5c5014ff", + "conflict.border": "#796b26ff", + "created": "#96df71ff", + "created.background": "#184618ff", + "created.border": "#306129ff", + "deleted": "#f82871ff", + "deleted.background": "#54051bff", + "deleted.border": "#72092aff", + "error": "#f82871ff", + "error.background": "#54051bff", + "error.border": "#72092aff", + "hidden": "#6b6b73ff", + "hidden.background": "#262933ff", + "hidden.border": "#292d37ff", + "hint": "#618399ff", + "hint.background": "#12231fff", + "hint.border": "#183934ff", + "ignored": "#6b6b73ff", + "ignored.background": "#262933ff", + "ignored.border": "#2b2f38ff", + "info": "#10a793ff", + "info.background": "#12231fff", + "info.border": "#183934ff", + "modified": "#fee56cff", + "modified.background": "#5c5014ff", + "modified.border": "#796b26ff", + "predictive": "#315f70ff", + "predictive.background": "#184618ff", + "predictive.border": "#306129ff", + "renamed": "#10a793ff", + "renamed.background": "#12231fff", + "renamed.border": "#183934ff", + "success": "#96df71ff", + "success.background": "#184618ff", + "success.border": "#306129ff", + "unreachable": "#aca8aeff", + "unreachable.background": "#262933ff", + "unreachable.border": "#2b2f38ff", + "warning": "#fee56cff", + "warning.background": "#5c5014ff", + "warning.border": "#796b26ff", + "players": [ + { + "cursor": "#10a793ff", + "background": "#10a793ff", + "selection": "#10a7933d" + }, + { + "cursor": "#c74cecff", + "background": "#c74cecff", + "selection": "#c74cec3d" + }, + { + "cursor": "#f29c14ff", + "background": "#f29c14ff", + "selection": "#f29c143d" + }, + { + "cursor": "#893ea6ff", + "background": "#893ea6ff", + "selection": "#893ea63d" + }, + { + "cursor": "#08e7c5ff", + "background": "#08e7c5ff", + "selection": "#08e7c53d" + }, + { + "cursor": "#f82871ff", + "background": "#f82871ff", + "selection": "#f828713d" + }, + { + "cursor": "#fee56cff", + "background": "#fee56cff", + "selection": "#fee56c3d" + }, + { + "cursor": "#96df71ff", + "background": "#96df71ff", + "selection": "#96df713d" + } + ], + "syntax": { + "attribute": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#96df71ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#afabb1ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#afabb1ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#96df71ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f7f7f8ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#10a793ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#fee56cff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#618399ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#f29c14ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#96df71ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#96df71ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#315f70ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f7f7f8ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#f7f7f8ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#d8d5dbff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#d8d5dbff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#d8d5dbff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#d8d5dbff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#d8d5dbff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#afabb1ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#f29c14ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f7f7f8ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#08e7c5ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#f7f7f8ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#10a793ff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/atelier/LICENSE b/assets/themes/atelier/LICENSE new file mode 100644 index 0000000..47c46d0 --- /dev/null +++ b/assets/themes/atelier/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2023 Bram de Haan, http://atelierbramdehaan.nl + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/atelier/atelier.json b/assets/themes/atelier/atelier.json new file mode 100644 index 0000000..d8fae1d --- /dev/null +++ b/assets/themes/atelier/atelier.json @@ -0,0 +1,7686 @@ +{ + "name": "Atelier", + "author": "Zed Industries", + "themes": [ + { + "name": "Atelier Cave Dark", + "appearance": "dark", + "style": { + "border": "#56505eff", + "border.variant": "#332f38ff", + "border.focused": "#222953ff", + "border.selected": "#222953ff", + "border.transparent": "#00000000", + "border.disabled": "#48434fff", + "elevated_surface.background": "#221f26ff", + "surface.background": "#221f26ff", + "background": "#3a353fff", + "element.background": "#221f26ff", + "element.hover": "#332f38ff", + "element.active": "#544f5cff", + "element.selected": "#544f5cff", + "element.disabled": "#221f26ff", + "drop_target.background": "#89859180", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#332f38ff", + "ghost_element.active": "#544f5cff", + "ghost_element.selected": "#544f5cff", + "ghost_element.disabled": "#221f26ff", + "text": "#efecf4ff", + "text.muted": "#898591ff", + "text.placeholder": "#756f7eff", + "text.disabled": "#756f7eff", + "text.accent": "#566ddaff", + "icon": "#efecf4ff", + "icon.muted": "#898591ff", + "icon.disabled": "#756f7eff", + "icon.placeholder": "#898591ff", + "icon.accent": "#566ddaff", + "status_bar.background": "#3a353fff", + "title_bar.background": "#3a353fff", + "toolbar.background": "#19171cff", + "tab_bar.background": "#221f26ff", + "tab.inactive_background": "#221f26ff", + "tab.active_background": "#19171cff", + "search.match_background": "#576dda66", + "panel.background": "#221f26ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#efecf44c", + "scrollbar.thumb.hover_background": "#332f38ff", + "scrollbar.thumb.border": "#332f38ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#201e24ff", + "editor.foreground": "#e2dfe7ff", + "editor.background": "#19171cff", + "editor.gutter.background": "#19171cff", + "editor.subheader.background": "#221f26ff", + "editor.active_line.background": "#221f26bf", + "editor.highlighted_line.background": "#221f26ff", + "editor.line_number": "#efecf459", + "editor.active_line_number": "#efecf4ff", + "editor.invisible": "#726c7aff", + "editor.wrap_guide": "#efecf40d", + "editor.active_wrap_guide": "#efecf41a", + "editor.document_highlight.read_background": "#566dda1a", + "editor.document_highlight.write_background": "#726c7a66", + "terminal.background": "#19171cff", + "terminal.foreground": "#efecf4ff", + "terminal.bright_foreground": "#efecf4ff", + "terminal.dim_foreground": "#19171cff", + "terminal.ansi.black": "#19171cff", + "terminal.ansi.bright_black": "#635d6bff", + "terminal.ansi.dim_black": "#efecf4ff", + "terminal.ansi.red": "#be4677ff", + "terminal.ansi.bright_red": "#5c283cff", + "terminal.ansi.dim_red": "#e3a4b9ff", + "terminal.ansi.green": "#2b9292ff", + "terminal.ansi.bright_green": "#1f4747ff", + "terminal.ansi.dim_green": "#9dc8c8ff", + "terminal.ansi.yellow": "#a06d3aff", + "terminal.ansi.bright_yellow": "#4e3821ff", + "terminal.ansi.dim_yellow": "#d4b499ff", + "terminal.ansi.blue": "#566ddaff", + "terminal.ansi.bright_blue": "#2d376fff", + "terminal.ansi.dim_blue": "#b3b3eeff", + "terminal.ansi.magenta": "#bf41bfff", + "terminal.ansi.bright_magenta": "#60255aff", + "terminal.ansi.dim_magenta": "#e3a4dfff", + "terminal.ansi.cyan": "#3a8bc6ff", + "terminal.ansi.bright_cyan": "#26435eff", + "terminal.ansi.dim_cyan": "#a6c4e3ff", + "terminal.ansi.white": "#efecf4ff", + "terminal.ansi.bright_white": "#efecf4ff", + "terminal.ansi.dim_white": "#807b89ff", + "link_text.hover": "#566ddaff", + "conflict": "#a06d3aff", + "conflict.background": "#231a12ff", + "conflict.border": "#392a19ff", + "created": "#2b9292ff", + "created.background": "#132020ff", + "created.border": "#1a3333ff", + "deleted": "#be4677ff", + "deleted.background": "#28151cff", + "deleted.border": "#421e2dff", + "error": "#be4677ff", + "error.background": "#28151cff", + "error.border": "#421e2dff", + "hidden": "#756f7eff", + "hidden.background": "#3a353fff", + "hidden.border": "#48434fff", + "hint": "#706897ff", + "hint.background": "#161a35ff", + "hint.border": "#222953ff", + "ignored": "#756f7eff", + "ignored.background": "#3a353fff", + "ignored.border": "#56505eff", + "info": "#566ddaff", + "info.background": "#161a35ff", + "info.border": "#222953ff", + "modified": "#a06d3aff", + "modified.background": "#231a12ff", + "modified.border": "#392a19ff", + "predictive": "#615787ff", + "predictive.background": "#132020ff", + "predictive.border": "#1a3333ff", + "renamed": "#566ddaff", + "renamed.background": "#161a35ff", + "renamed.border": "#222953ff", + "success": "#2b9292ff", + "success.background": "#132020ff", + "success.border": "#1a3333ff", + "unreachable": "#898591ff", + "unreachable.background": "#3a353fff", + "unreachable.border": "#56505eff", + "warning": "#a06d3aff", + "warning.background": "#231a12ff", + "warning.border": "#392a19ff", + "players": [ + { + "cursor": "#566ddaff", + "background": "#566ddaff", + "selection": "#566dda3d" + }, + { + "cursor": "#bf41bfff", + "background": "#bf41bfff", + "selection": "#bf41bf3d" + }, + { + "cursor": "#aa563bff", + "background": "#aa563bff", + "selection": "#aa563b3d" + }, + { + "cursor": "#955ae6ff", + "background": "#955ae6ff", + "selection": "#955ae63d" + }, + { + "cursor": "#3a8bc6ff", + "background": "#3a8bc6ff", + "selection": "#3a8bc63d" + }, + { + "cursor": "#be4677ff", + "background": "#be4677ff", + "selection": "#be46773d" + }, + { + "cursor": "#a06d3aff", + "background": "#a06d3aff", + "selection": "#a06d3a3d" + }, + { + "cursor": "#2b9292ff", + "background": "#2b9292ff", + "selection": "#2b92923d" + } + ], + "syntax": { + "attribute": { + "color": "#566ddaff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#2b9292ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#655f6dff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#8b8792ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#2b9292ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#566ddaff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#efecf4ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#566ddaff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#566ddaff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#aa563bff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#576cdbff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#576cdbff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#706897ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#9559e7ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#566ddaff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#aa563bff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#2b9292ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#aa563bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#8b8792ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#615787ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#efecf4ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e2dfe7ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#be4677ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#e2dfe7ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#8b8792ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#8b8792ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#e2dfe7ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#bf3fbfff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#299292ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#8b8792ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#388bc6ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#bf3fbfff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#299292ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#566ddaff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#aa563bff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#efecf4ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e2dfe7ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#9559e7ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Cave Light", + "appearance": "light", + "style": { + "border": "#8f8b96ff", + "border.variant": "#cbc8d1ff", + "border.focused": "#c8c7f2ff", + "border.selected": "#c8c7f2ff", + "border.transparent": "#00000000", + "border.disabled": "#a7a3adff", + "elevated_surface.background": "#e6e3ebff", + "surface.background": "#e6e3ebff", + "background": "#bfbcc5ff", + "element.background": "#e6e3ebff", + "element.hover": "#cbc8d1ff", + "element.active": "#918d98ff", + "element.selected": "#918d98ff", + "element.disabled": "#e6e3ebff", + "drop_target.background": "#5a546280", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#cbc8d1ff", + "ghost_element.active": "#918d98ff", + "ghost_element.selected": "#918d98ff", + "ghost_element.disabled": "#e6e3ebff", + "text": "#19171cff", + "text.muted": "#5a5462ff", + "text.placeholder": "#6e6876ff", + "text.disabled": "#6e6876ff", + "text.accent": "#586cdaff", + "icon": "#19171cff", + "icon.muted": "#5a5462ff", + "icon.disabled": "#6e6876ff", + "icon.placeholder": "#5a5462ff", + "icon.accent": "#586cdaff", + "status_bar.background": "#bfbcc5ff", + "title_bar.background": "#bfbcc5ff", + "toolbar.background": "#efecf4ff", + "tab_bar.background": "#e6e3ebff", + "tab.inactive_background": "#e6e3ebff", + "tab.active_background": "#efecf4ff", + "search.match_background": "#586dda66", + "panel.background": "#e6e3ebff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#19171c4c", + "scrollbar.thumb.hover_background": "#cbc8d1ff", + "scrollbar.thumb.border": "#cbc8d1ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#e8e5edff", + "editor.foreground": "#26232aff", + "editor.background": "#efecf4ff", + "editor.gutter.background": "#efecf4ff", + "editor.subheader.background": "#e6e3ebff", + "editor.active_line.background": "#e6e3ebbf", + "editor.highlighted_line.background": "#e6e3ebff", + "editor.line_number": "#19171c59", + "editor.active_line_number": "#19171cff", + "editor.invisible": "#726c7aff", + "editor.wrap_guide": "#19171c0d", + "editor.active_wrap_guide": "#19171c1a", + "editor.document_highlight.read_background": "#586cda1a", + "editor.document_highlight.write_background": "#726c7a66", + "terminal.background": "#efecf4ff", + "terminal.foreground": "#19171cff", + "terminal.bright_foreground": "#19171cff", + "terminal.dim_foreground": "#efecf4ff", + "terminal.ansi.black": "#efecf4ff", + "terminal.ansi.bright_black": "#807b89ff", + "terminal.ansi.dim_black": "#19171cff", + "terminal.ansi.red": "#bd4677ff", + "terminal.ansi.bright_red": "#e3a4b9ff", + "terminal.ansi.dim_red": "#5c283cff", + "terminal.ansi.green": "#2b9292ff", + "terminal.ansi.bright_green": "#9dc8c8ff", + "terminal.ansi.dim_green": "#1f4747ff", + "terminal.ansi.yellow": "#a06e3bff", + "terminal.ansi.bright_yellow": "#d4b499ff", + "terminal.ansi.dim_yellow": "#4e3821ff", + "terminal.ansi.blue": "#586cdaff", + "terminal.ansi.bright_blue": "#b3b3eeff", + "terminal.ansi.dim_blue": "#2d376fff", + "terminal.ansi.magenta": "#bf41bfff", + "terminal.ansi.bright_magenta": "#e3a4dfff", + "terminal.ansi.dim_magenta": "#60255aff", + "terminal.ansi.cyan": "#3a8bc6ff", + "terminal.ansi.bright_cyan": "#a6c4e3ff", + "terminal.ansi.dim_cyan": "#26435eff", + "terminal.ansi.white": "#19171cff", + "terminal.ansi.bright_white": "#19171cff", + "terminal.ansi.dim_white": "#635d6bff", + "link_text.hover": "#586cdaff", + "conflict": "#a06e3bff", + "conflict.background": "#eee0d5ff", + "conflict.border": "#e0c9b5ff", + "created": "#2b9292ff", + "created.background": "#d7e9e8ff", + "created.border": "#b9d7d6ff", + "deleted": "#bd4677ff", + "deleted.background": "#f4d9e1ff", + "deleted.border": "#ecbecdff", + "error": "#bd4677ff", + "error.background": "#f4d9e1ff", + "error.border": "#ecbecdff", + "hidden": "#6e6876ff", + "hidden.background": "#bfbcc5ff", + "hidden.border": "#a7a3adff", + "hint": "#776d9dff", + "hint.background": "#e1e0f9ff", + "hint.border": "#c8c7f2ff", + "ignored": "#6e6876ff", + "ignored.background": "#bfbcc5ff", + "ignored.border": "#8f8b96ff", + "info": "#586cdaff", + "info.background": "#e1e0f9ff", + "info.border": "#c8c7f2ff", + "modified": "#a06e3bff", + "modified.background": "#eee0d5ff", + "modified.border": "#e0c9b5ff", + "predictive": "#887fafff", + "predictive.background": "#d7e9e8ff", + "predictive.border": "#b9d7d6ff", + "renamed": "#586cdaff", + "renamed.background": "#e1e0f9ff", + "renamed.border": "#c8c7f2ff", + "success": "#2b9292ff", + "success.background": "#d7e9e8ff", + "success.border": "#b9d7d6ff", + "unreachable": "#5a5462ff", + "unreachable.background": "#bfbcc5ff", + "unreachable.border": "#8f8b96ff", + "warning": "#a06e3bff", + "warning.background": "#eee0d5ff", + "warning.border": "#e0c9b5ff", + "players": [ + { + "cursor": "#586cdaff", + "background": "#586cdaff", + "selection": "#586cda3d" + }, + { + "cursor": "#bf41bfff", + "background": "#bf41bfff", + "selection": "#bf41bf3d" + }, + { + "cursor": "#aa573cff", + "background": "#aa573cff", + "selection": "#aa573c3d" + }, + { + "cursor": "#955ae6ff", + "background": "#955ae6ff", + "selection": "#955ae63d" + }, + { + "cursor": "#3a8bc6ff", + "background": "#3a8bc6ff", + "selection": "#3a8bc63d" + }, + { + "cursor": "#bd4677ff", + "background": "#bd4677ff", + "selection": "#bd46773d" + }, + { + "cursor": "#a06e3bff", + "background": "#a06e3bff", + "selection": "#a06e3b3d" + }, + { + "cursor": "#2b9292ff", + "background": "#2b9292ff", + "selection": "#2b92923d" + } + ], + "syntax": { + "attribute": { + "color": "#586cdaff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#2b9292ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7d7787ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#585260ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#2b9292ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#586cdaff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#19171cff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#586cdaff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#586cdaff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#aa573cff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#576cdbff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#576cdbff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#776d9dff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#9559e7ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#586cdaff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#aa573cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#2b9292ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#aa563bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#585260ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#887fafff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#19171cff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#26232aff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#be4677ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#26232aff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#585260ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#585260ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#26232aff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#bf3fbfff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#299292ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#585260ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#388bc6ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#bf3fbfff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#299292ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#586cdaff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#aa573cff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#19171cff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#26232aff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#9559e7ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Dune Dark", + "appearance": "dark", + "style": { + "border": "#6c695cff", + "border.variant": "#3b3933ff", + "border.focused": "#262f56ff", + "border.selected": "#262f56ff", + "border.transparent": "#00000000", + "border.disabled": "#58564bff", + "elevated_surface.background": "#262622ff", + "surface.background": "#262622ff", + "background": "#45433bff", + "element.background": "#262622ff", + "element.hover": "#3b3933ff", + "element.active": "#6a675aff", + "element.selected": "#6a675aff", + "element.disabled": "#262622ff", + "drop_target.background": "#a4a08b80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#3b3933ff", + "ghost_element.active": "#6a675aff", + "ghost_element.selected": "#6a675aff", + "ghost_element.disabled": "#262622ff", + "text": "#fefbecff", + "text.muted": "#a4a08bff", + "text.placeholder": "#8f8b77ff", + "text.disabled": "#8f8b77ff", + "text.accent": "#6684e0ff", + "icon": "#fefbecff", + "icon.muted": "#a4a08bff", + "icon.disabled": "#8f8b77ff", + "icon.placeholder": "#a4a08bff", + "icon.accent": "#6684e0ff", + "status_bar.background": "#45433bff", + "title_bar.background": "#45433bff", + "toolbar.background": "#20201dff", + "tab_bar.background": "#262622ff", + "tab.inactive_background": "#262622ff", + "tab.active_background": "#20201dff", + "search.match_background": "#6684e066", + "panel.background": "#262622ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#fefbec4c", + "scrollbar.thumb.hover_background": "#3b3933ff", + "scrollbar.thumb.border": "#3b3933ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#252521ff", + "editor.foreground": "#e8e4cfff", + "editor.background": "#20201dff", + "editor.gutter.background": "#20201dff", + "editor.subheader.background": "#262622ff", + "editor.active_line.background": "#262622bf", + "editor.highlighted_line.background": "#262622ff", + "editor.line_number": "#fefbec59", + "editor.active_line_number": "#fefbecff", + "editor.invisible": "#8b8773ff", + "editor.wrap_guide": "#fefbec0d", + "editor.active_wrap_guide": "#fefbec1a", + "editor.document_highlight.read_background": "#6684e01a", + "editor.document_highlight.write_background": "#8b877366", + "terminal.background": "#20201dff", + "terminal.foreground": "#fefbecff", + "terminal.bright_foreground": "#fefbecff", + "terminal.dim_foreground": "#20201dff", + "terminal.ansi.black": "#20201dff", + "terminal.ansi.bright_black": "#7a7766ff", + "terminal.ansi.dim_black": "#fefbecff", + "terminal.ansi.red": "#d73837ff", + "terminal.ansi.bright_red": "#781c1eff", + "terminal.ansi.dim_red": "#f7a195ff", + "terminal.ansi.green": "#5fac39ff", + "terminal.ansi.bright_green": "#325322ff", + "terminal.ansi.dim_green": "#b3d69cff", + "terminal.ansi.yellow": "#ae9414ff", + "terminal.ansi.bright_yellow": "#574814ff", + "terminal.ansi.dim_yellow": "#dcc98eff", + "terminal.ansi.blue": "#6684e0ff", + "terminal.ansi.bright_blue": "#334173ff", + "terminal.ansi.dim_blue": "#b8c0f1ff", + "terminal.ansi.magenta": "#d43651ff", + "terminal.ansi.bright_magenta": "#721d2aff", + "terminal.ansi.dim_magenta": "#f29fa4ff", + "terminal.ansi.cyan": "#20ad83ff", + "terminal.ansi.bright_cyan": "#1d5341ff", + "terminal.ansi.dim_cyan": "#9ed7c0ff", + "terminal.ansi.white": "#fefbecff", + "terminal.ansi.bright_white": "#fefbecff", + "terminal.ansi.dim_white": "#9b9782ff", + "link_text.hover": "#6684e0ff", + "conflict": "#ae9414ff", + "conflict.background": "#2a200dff", + "conflict.border": "#413413ff", + "created": "#5fac39ff", + "created.background": "#192412ff", + "created.border": "#273c1bff", + "deleted": "#d73837ff", + "deleted.background": "#440d11ff", + "deleted.border": "#5e1519ff", + "error": "#d73837ff", + "error.background": "#440d11ff", + "error.border": "#5e1519ff", + "hidden": "#8f8b77ff", + "hidden.background": "#45433bff", + "hidden.border": "#58564bff", + "hint": "#b17272ff", + "hint.background": "#171e38ff", + "hint.border": "#262f56ff", + "ignored": "#8f8b77ff", + "ignored.background": "#45433bff", + "ignored.border": "#6c695cff", + "info": "#6684e0ff", + "info.background": "#171e38ff", + "info.border": "#262f56ff", + "modified": "#ae9414ff", + "modified.background": "#2a200dff", + "modified.border": "#413413ff", + "predictive": "#9c6262ff", + "predictive.background": "#192412ff", + "predictive.border": "#273c1bff", + "renamed": "#6684e0ff", + "renamed.background": "#171e38ff", + "renamed.border": "#262f56ff", + "success": "#5fac39ff", + "success.background": "#192412ff", + "success.border": "#273c1bff", + "unreachable": "#a4a08bff", + "unreachable.background": "#45433bff", + "unreachable.border": "#6c695cff", + "warning": "#ae9414ff", + "warning.background": "#2a200dff", + "warning.border": "#413413ff", + "players": [ + { + "cursor": "#6684e0ff", + "background": "#6684e0ff", + "selection": "#6684e03d" + }, + { + "cursor": "#d43651ff", + "background": "#d43651ff", + "selection": "#d436513d" + }, + { + "cursor": "#b65611ff", + "background": "#b65611ff", + "selection": "#b656113d" + }, + { + "cursor": "#b854d3ff", + "background": "#b854d3ff", + "selection": "#b854d33d" + }, + { + "cursor": "#20ad83ff", + "background": "#20ad83ff", + "selection": "#20ad833d" + }, + { + "cursor": "#d73837ff", + "background": "#d73837ff", + "selection": "#d738373d" + }, + { + "cursor": "#ae9414ff", + "background": "#ae9414ff", + "selection": "#ae94143d" + }, + { + "cursor": "#5fac39ff", + "background": "#5fac39ff", + "selection": "#5fac393d" + } + ], + "syntax": { + "attribute": { + "color": "#6684e0ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#5fac39ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7d7a68ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#a6a28cff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#5fac39ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#6684e0ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#fefbecff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#6684e0ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#6684e0ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#b65611ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#6583e1ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#6583e1ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#ae9512ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#b17272ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#b854d4ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#6684e0ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#b65611ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#5fac39ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#b65610ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#a6a28cff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#9c6262ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#fefbecff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e8e4cfff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#d73737ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#e8e4cfff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#a6a28cff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#a6a28cff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#e8e4cfff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#d43451ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#5fac38ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#a6a28cff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#1ead82ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#d43451ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#5fac38ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#6684e0ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#b65611ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#fefbecff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#ae9512ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e8e4cfff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#b854d4ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#ae9512ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Dune Light", + "appearance": "light", + "style": { + "border": "#a8a48eff", + "border.variant": "#d7d3beff", + "border.focused": "#cdd1f5ff", + "border.selected": "#cdd1f5ff", + "border.transparent": "#00000000", + "border.disabled": "#bbb7a1ff", + "elevated_surface.background": "#eeebd7ff", + "surface.background": "#eeebd7ff", + "background": "#cecab4ff", + "element.background": "#eeebd7ff", + "element.hover": "#d7d3beff", + "element.active": "#aaa690ff", + "element.selected": "#aaa690ff", + "element.disabled": "#eeebd7ff", + "drop_target.background": "#706d5f80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#d7d3beff", + "ghost_element.active": "#aaa690ff", + "ghost_element.selected": "#aaa690ff", + "ghost_element.disabled": "#eeebd7ff", + "text": "#20201dff", + "text.muted": "#706d5fff", + "text.placeholder": "#878471ff", + "text.disabled": "#878471ff", + "text.accent": "#6684dfff", + "icon": "#20201dff", + "icon.muted": "#706d5fff", + "icon.disabled": "#878471ff", + "icon.placeholder": "#706d5fff", + "icon.accent": "#6684dfff", + "status_bar.background": "#cecab4ff", + "title_bar.background": "#cecab4ff", + "toolbar.background": "#fefbecff", + "tab_bar.background": "#eeebd7ff", + "tab.inactive_background": "#eeebd7ff", + "tab.active_background": "#fefbecff", + "search.match_background": "#6784e066", + "panel.background": "#eeebd7ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#20201d4c", + "scrollbar.thumb.hover_background": "#d7d3beff", + "scrollbar.thumb.border": "#d7d3beff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#f2eedcff", + "editor.foreground": "#292824ff", + "editor.background": "#fefbecff", + "editor.gutter.background": "#fefbecff", + "editor.subheader.background": "#eeebd7ff", + "editor.active_line.background": "#eeebd7bf", + "editor.highlighted_line.background": "#eeebd7ff", + "editor.line_number": "#20201d59", + "editor.active_line_number": "#20201dff", + "editor.invisible": "#8b8773ff", + "editor.wrap_guide": "#20201d0d", + "editor.active_wrap_guide": "#20201d1a", + "editor.document_highlight.read_background": "#6684df1a", + "editor.document_highlight.write_background": "#8b877366", + "terminal.background": "#fefbecff", + "terminal.foreground": "#20201dff", + "terminal.bright_foreground": "#20201dff", + "terminal.dim_foreground": "#fefbecff", + "terminal.ansi.black": "#fefbecff", + "terminal.ansi.bright_black": "#9b9782ff", + "terminal.ansi.dim_black": "#20201dff", + "terminal.ansi.red": "#d73737ff", + "terminal.ansi.bright_red": "#f7a195ff", + "terminal.ansi.dim_red": "#781c1eff", + "terminal.ansi.green": "#61ac39ff", + "terminal.ansi.bright_green": "#b3d69cff", + "terminal.ansi.dim_green": "#325322ff", + "terminal.ansi.yellow": "#ae9414ff", + "terminal.ansi.bright_yellow": "#dcc98eff", + "terminal.ansi.dim_yellow": "#574814ff", + "terminal.ansi.blue": "#6684dfff", + "terminal.ansi.bright_blue": "#b8c0f1ff", + "terminal.ansi.dim_blue": "#334173ff", + "terminal.ansi.magenta": "#d43652ff", + "terminal.ansi.bright_magenta": "#f29fa4ff", + "terminal.ansi.dim_magenta": "#721d2aff", + "terminal.ansi.cyan": "#21ad82ff", + "terminal.ansi.bright_cyan": "#9ed7c0ff", + "terminal.ansi.dim_cyan": "#1d5341ff", + "terminal.ansi.white": "#20201dff", + "terminal.ansi.bright_white": "#20201dff", + "terminal.ansi.dim_white": "#7a7766ff", + "link_text.hover": "#6684dfff", + "conflict": "#ae9414ff", + "conflict.background": "#f2e8d1ff", + "conflict.border": "#e7d7aeff", + "created": "#61ac39ff", + "created.background": "#e0eed6ff", + "created.border": "#c9e1b7ff", + "deleted": "#d73737ff", + "deleted.background": "#fed8d3ff", + "deleted.border": "#fcbcb2ff", + "error": "#d73737ff", + "error.background": "#fed8d3ff", + "error.border": "#fcbcb2ff", + "hidden": "#878471ff", + "hidden.background": "#cecab4ff", + "hidden.border": "#bbb7a1ff", + "hint": "#b37979ff", + "hint.background": "#e3e5faff", + "hint.border": "#cdd1f5ff", + "ignored": "#878471ff", + "ignored.background": "#cecab4ff", + "ignored.border": "#a8a48eff", + "info": "#6684dfff", + "info.background": "#e3e5faff", + "info.border": "#cdd1f5ff", + "modified": "#ae9414ff", + "modified.background": "#f2e8d1ff", + "modified.border": "#e7d7aeff", + "predictive": "#c88a8aff", + "predictive.background": "#e0eed6ff", + "predictive.border": "#c9e1b7ff", + "renamed": "#6684dfff", + "renamed.background": "#e3e5faff", + "renamed.border": "#cdd1f5ff", + "success": "#61ac39ff", + "success.background": "#e0eed6ff", + "success.border": "#c9e1b7ff", + "unreachable": "#706d5fff", + "unreachable.background": "#cecab4ff", + "unreachable.border": "#a8a48eff", + "warning": "#ae9414ff", + "warning.background": "#f2e8d1ff", + "warning.border": "#e7d7aeff", + "players": [ + { + "cursor": "#6684dfff", + "background": "#6684dfff", + "selection": "#6684df3d" + }, + { + "cursor": "#d43652ff", + "background": "#d43652ff", + "selection": "#d436523d" + }, + { + "cursor": "#b65712ff", + "background": "#b65712ff", + "selection": "#b657123d" + }, + { + "cursor": "#b755d3ff", + "background": "#b755d3ff", + "selection": "#b755d33d" + }, + { + "cursor": "#21ad82ff", + "background": "#21ad82ff", + "selection": "#21ad823d" + }, + { + "cursor": "#d73737ff", + "background": "#d73737ff", + "selection": "#d737373d" + }, + { + "cursor": "#ae9414ff", + "background": "#ae9414ff", + "selection": "#ae94143d" + }, + { + "cursor": "#61ac39ff", + "background": "#61ac39ff", + "selection": "#61ac393d" + } + ], + "syntax": { + "attribute": { + "color": "#6684dfff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#61ac39ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#999580ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#6e6b5eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#61ac39ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#6684dfff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#20201dff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#6684dfff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#6684dfff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#b65712ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#6583e1ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#6583e1ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#ae9512ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#b37979ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#b854d4ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#6684dfff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#b65712ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#61ac39ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#b65610ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#6e6b5eff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#c88a8aff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#20201dff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#292824ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#d73737ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#292824ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#6e6b5eff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#6e6b5eff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#292824ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#d43451ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#5fac38ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#6e6b5eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#1ead82ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#d43451ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#5fac38ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#6684dfff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#b65712ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#20201dff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#ae9512ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#292824ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#b854d4ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#ae9512ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Estuary Dark", + "appearance": "dark", + "style": { + "border": "#5d5c4cff", + "border.variant": "#3c3b31ff", + "border.focused": "#1c3927ff", + "border.selected": "#1c3927ff", + "border.transparent": "#00000000", + "border.disabled": "#504f41ff", + "elevated_surface.background": "#2c2b23ff", + "surface.background": "#2c2b23ff", + "background": "#424136ff", + "element.background": "#2c2b23ff", + "element.hover": "#3c3b31ff", + "element.active": "#5c5b4bff", + "element.selected": "#5c5b4bff", + "element.disabled": "#2c2b23ff", + "drop_target.background": "#91907f80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#3c3b31ff", + "ghost_element.active": "#5c5b4bff", + "ghost_element.selected": "#5c5b4bff", + "ghost_element.disabled": "#2c2b23ff", + "text": "#f4f3ecff", + "text.muted": "#91907fff", + "text.placeholder": "#7d7c6aff", + "text.disabled": "#7d7c6aff", + "text.accent": "#36a165ff", + "icon": "#f4f3ecff", + "icon.muted": "#91907fff", + "icon.disabled": "#7d7c6aff", + "icon.placeholder": "#91907fff", + "icon.accent": "#36a165ff", + "status_bar.background": "#424136ff", + "title_bar.background": "#424136ff", + "toolbar.background": "#22221bff", + "tab_bar.background": "#2c2b23ff", + "tab.inactive_background": "#2c2b23ff", + "tab.active_background": "#22221bff", + "search.match_background": "#37a16666", + "panel.background": "#2c2b23ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f4f3ec4c", + "scrollbar.thumb.hover_background": "#3c3b31ff", + "scrollbar.thumb.border": "#3c3b31ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#2a2922ff", + "editor.foreground": "#e7e6dfff", + "editor.background": "#22221bff", + "editor.gutter.background": "#22221bff", + "editor.subheader.background": "#2c2b23ff", + "editor.active_line.background": "#2c2b23bf", + "editor.highlighted_line.background": "#2c2b23ff", + "editor.line_number": "#f4f3ec59", + "editor.active_line_number": "#f4f3ecff", + "editor.invisible": "#7a7867ff", + "editor.wrap_guide": "#f4f3ec0d", + "editor.active_wrap_guide": "#f4f3ec1a", + "editor.document_highlight.read_background": "#36a1651a", + "editor.document_highlight.write_background": "#7a786766", + "terminal.background": "#22221bff", + "terminal.foreground": "#f4f3ecff", + "terminal.bright_foreground": "#f4f3ecff", + "terminal.dim_foreground": "#22221bff", + "terminal.ansi.black": "#22221bff", + "terminal.ansi.bright_black": "#6a6958ff", + "terminal.ansi.dim_black": "#f4f3ecff", + "terminal.ansi.red": "#ba6136ff", + "terminal.ansi.bright_red": "#5c321eff", + "terminal.ansi.dim_red": "#e4af96ff", + "terminal.ansi.green": "#7d9726ff", + "terminal.ansi.bright_green": "#3e4919ff", + "terminal.ansi.dim_green": "#c0ca93ff", + "terminal.ansi.yellow": "#a5980fff", + "terminal.ansi.bright_yellow": "#514a13ff", + "terminal.ansi.dim_yellow": "#d7ca8dff", + "terminal.ansi.blue": "#36a165ff", + "terminal.ansi.bright_blue": "#234e34ff", + "terminal.ansi.dim_blue": "#a0d1b0ff", + "terminal.ansi.magenta": "#9d6b7bff", + "terminal.ansi.bright_magenta": "#4c373eff", + "terminal.ansi.dim_magenta": "#ceb3bbff", + "terminal.ansi.cyan": "#5a9d47ff", + "terminal.ansi.bright_cyan": "#314c27ff", + "terminal.ansi.dim_cyan": "#aecea1ff", + "terminal.ansi.white": "#f4f3ecff", + "terminal.ansi.bright_white": "#f4f3ecff", + "terminal.ansi.dim_white": "#898775ff", + "link_text.hover": "#36a165ff", + "conflict": "#a5980fff", + "conflict.background": "#25210dff", + "conflict.border": "#3b3612ff", + "created": "#7d9726ff", + "created.background": "#1e2110ff", + "created.border": "#2f3515ff", + "deleted": "#ba6136ff", + "deleted.background": "#2a1811ff", + "deleted.border": "#442618ff", + "error": "#ba6136ff", + "error.background": "#2a1811ff", + "error.border": "#442618ff", + "hidden": "#7d7c6aff", + "hidden.background": "#424136ff", + "hidden.border": "#504f41ff", + "hint": "#6f815aff", + "hint.background": "#142319ff", + "hint.border": "#1c3927ff", + "ignored": "#7d7c6aff", + "ignored.background": "#424136ff", + "ignored.border": "#5d5c4cff", + "info": "#36a165ff", + "info.background": "#142319ff", + "info.border": "#1c3927ff", + "modified": "#a5980fff", + "modified.background": "#25210dff", + "modified.border": "#3b3612ff", + "predictive": "#5f724cff", + "predictive.background": "#1e2110ff", + "predictive.border": "#2f3515ff", + "renamed": "#36a165ff", + "renamed.background": "#142319ff", + "renamed.border": "#1c3927ff", + "success": "#7d9726ff", + "success.background": "#1e2110ff", + "success.border": "#2f3515ff", + "unreachable": "#91907fff", + "unreachable.background": "#424136ff", + "unreachable.border": "#5d5c4cff", + "warning": "#a5980fff", + "warning.background": "#25210dff", + "warning.border": "#3b3612ff", + "players": [ + { + "cursor": "#36a165ff", + "background": "#36a165ff", + "selection": "#36a1653d" + }, + { + "cursor": "#9d6b7bff", + "background": "#9d6b7bff", + "selection": "#9d6b7b3d" + }, + { + "cursor": "#ae7214ff", + "background": "#ae7214ff", + "selection": "#ae72143d" + }, + { + "cursor": "#5f9182ff", + "background": "#5f9182ff", + "selection": "#5f91823d" + }, + { + "cursor": "#5a9d47ff", + "background": "#5a9d47ff", + "selection": "#5a9d473d" + }, + { + "cursor": "#ba6136ff", + "background": "#ba6136ff", + "selection": "#ba61363d" + }, + { + "cursor": "#a5980fff", + "background": "#a5980fff", + "selection": "#a5980f3d" + }, + { + "cursor": "#7d9726ff", + "background": "#7d9726ff", + "selection": "#7d97263d" + } + ], + "syntax": { + "attribute": { + "color": "#36a165ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#7d9726ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#6c6b5aff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#929181ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#7d9726ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#36a165ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f4f3ecff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#36a165ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#36a165ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#ae7214ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#35a166ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#35a166ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a5980cff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#6f815aff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#5f9182ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#36a165ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#ae7214ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#7d9726ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#ae7312ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#929181ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#5f724cff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f4f3ecff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e7e6dfff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ba6135ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#e7e6dfff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#929181ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#929181ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#e7e6dfff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#9d6b7bff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#7c9725ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#929181ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#5a9d47ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#9d6b7bff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#7c9725ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#36a165ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#ae7214ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f4f3ecff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a5980cff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e7e6dfff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#5f9182ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a5980cff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Estuary Light", + "appearance": "light", + "style": { + "border": "#969585ff", + "border.variant": "#d0cfc5ff", + "border.focused": "#bbddc6ff", + "border.selected": "#bbddc6ff", + "border.transparent": "#00000000", + "border.disabled": "#adac9fff", + "elevated_surface.background": "#ebeae3ff", + "surface.background": "#ebeae3ff", + "background": "#c5c4b9ff", + "element.background": "#ebeae3ff", + "element.hover": "#d0cfc5ff", + "element.active": "#989788ff", + "element.selected": "#989788ff", + "element.disabled": "#ebeae3ff", + "drop_target.background": "#61604f80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#d0cfc5ff", + "ghost_element.active": "#989788ff", + "ghost_element.selected": "#989788ff", + "ghost_element.disabled": "#ebeae3ff", + "text": "#22221bff", + "text.muted": "#61604fff", + "text.placeholder": "#767463ff", + "text.disabled": "#767463ff", + "text.accent": "#37a165ff", + "icon": "#22221bff", + "icon.muted": "#61604fff", + "icon.disabled": "#767463ff", + "icon.placeholder": "#61604fff", + "icon.accent": "#37a165ff", + "status_bar.background": "#c5c4b9ff", + "title_bar.background": "#c5c4b9ff", + "toolbar.background": "#f4f3ecff", + "tab_bar.background": "#ebeae3ff", + "tab.inactive_background": "#ebeae3ff", + "tab.active_background": "#f4f3ecff", + "search.match_background": "#38a16666", + "panel.background": "#ebeae3ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#22221b4c", + "scrollbar.thumb.hover_background": "#d0cfc5ff", + "scrollbar.thumb.border": "#d0cfc5ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#edece5ff", + "editor.foreground": "#302f27ff", + "editor.background": "#f4f3ecff", + "editor.gutter.background": "#f4f3ecff", + "editor.subheader.background": "#ebeae3ff", + "editor.active_line.background": "#ebeae3bf", + "editor.highlighted_line.background": "#ebeae3ff", + "editor.line_number": "#22221b59", + "editor.active_line_number": "#22221bff", + "editor.invisible": "#7a7867ff", + "editor.wrap_guide": "#22221b0d", + "editor.active_wrap_guide": "#22221b1a", + "editor.document_highlight.read_background": "#37a1651a", + "editor.document_highlight.write_background": "#7a786766", + "terminal.background": "#f4f3ecff", + "terminal.foreground": "#22221bff", + "terminal.bright_foreground": "#22221bff", + "terminal.dim_foreground": "#f4f3ecff", + "terminal.ansi.black": "#f4f3ecff", + "terminal.ansi.bright_black": "#898775ff", + "terminal.ansi.dim_black": "#22221bff", + "terminal.ansi.red": "#ba6336ff", + "terminal.ansi.bright_red": "#e4af96ff", + "terminal.ansi.dim_red": "#5c321eff", + "terminal.ansi.green": "#7c9728ff", + "terminal.ansi.bright_green": "#c0ca93ff", + "terminal.ansi.dim_green": "#3e4919ff", + "terminal.ansi.yellow": "#a5980fff", + "terminal.ansi.bright_yellow": "#d7ca8dff", + "terminal.ansi.dim_yellow": "#514a13ff", + "terminal.ansi.blue": "#37a165ff", + "terminal.ansi.bright_blue": "#a0d1b0ff", + "terminal.ansi.dim_blue": "#234e34ff", + "terminal.ansi.magenta": "#9d6b7bff", + "terminal.ansi.bright_magenta": "#ceb3bbff", + "terminal.ansi.dim_magenta": "#4c373eff", + "terminal.ansi.cyan": "#5c9d49ff", + "terminal.ansi.bright_cyan": "#aecea1ff", + "terminal.ansi.dim_cyan": "#314c27ff", + "terminal.ansi.white": "#22221bff", + "terminal.ansi.bright_white": "#22221bff", + "terminal.ansi.dim_white": "#6a6958ff", + "link_text.hover": "#37a165ff", + "conflict": "#a5980fff", + "conflict.background": "#f0e9d1ff", + "conflict.border": "#e3d8adff", + "created": "#7c9728ff", + "created.background": "#e6e9d3ff", + "created.border": "#d2d8b1ff", + "deleted": "#ba6336ff", + "deleted.background": "#f6ded4ff", + "deleted.border": "#edc5b3ff", + "error": "#ba6336ff", + "error.background": "#f6ded4ff", + "error.border": "#edc5b3ff", + "hidden": "#767463ff", + "hidden.background": "#c5c4b9ff", + "hidden.border": "#adac9fff", + "hint": "#758961ff", + "hint.background": "#d9ecdfff", + "hint.border": "#bbddc6ff", + "ignored": "#767463ff", + "ignored.background": "#c5c4b9ff", + "ignored.border": "#969585ff", + "info": "#37a165ff", + "info.background": "#d9ecdfff", + "info.border": "#bbddc6ff", + "modified": "#a5980fff", + "modified.background": "#f0e9d1ff", + "modified.border": "#e3d8adff", + "predictive": "#879a72ff", + "predictive.background": "#e6e9d3ff", + "predictive.border": "#d2d8b1ff", + "renamed": "#37a165ff", + "renamed.background": "#d9ecdfff", + "renamed.border": "#bbddc6ff", + "success": "#7c9728ff", + "success.background": "#e6e9d3ff", + "success.border": "#d2d8b1ff", + "unreachable": "#61604fff", + "unreachable.background": "#c5c4b9ff", + "unreachable.border": "#969585ff", + "warning": "#a5980fff", + "warning.background": "#f0e9d1ff", + "warning.border": "#e3d8adff", + "players": [ + { + "cursor": "#37a165ff", + "background": "#37a165ff", + "selection": "#37a1653d" + }, + { + "cursor": "#9d6b7bff", + "background": "#9d6b7bff", + "selection": "#9d6b7b3d" + }, + { + "cursor": "#ae7214ff", + "background": "#ae7214ff", + "selection": "#ae72143d" + }, + { + "cursor": "#5f9182ff", + "background": "#5f9182ff", + "selection": "#5f91823d" + }, + { + "cursor": "#5c9d49ff", + "background": "#5c9d49ff", + "selection": "#5c9d493d" + }, + { + "cursor": "#ba6336ff", + "background": "#ba6336ff", + "selection": "#ba63363d" + }, + { + "cursor": "#a5980fff", + "background": "#a5980fff", + "selection": "#a5980f3d" + }, + { + "cursor": "#7c9728ff", + "background": "#7c9728ff", + "selection": "#7c97283d" + } + ], + "syntax": { + "attribute": { + "color": "#37a165ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#7c9728ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#878573ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#5f5e4eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#7c9728ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#37a165ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#22221bff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#37a165ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#37a165ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#ae7214ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#35a166ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#35a166ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a5980cff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#758961ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#5f9182ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#37a165ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#ae7214ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#7c9728ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#ae7312ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#5f5e4eff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#879a72ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#22221bff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#302f27ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ba6135ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#302f27ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#5f5e4eff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#5f5e4eff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#302f27ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#9d6b7bff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#7c9725ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#5f5e4eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#5a9d47ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#9d6b7bff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#7c9725ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#37a165ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#ae7214ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#22221bff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a5980cff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#302f27ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#5f9182ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a5980cff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Forest Dark", + "appearance": "dark", + "style": { + "border": "#665f5cff", + "border.variant": "#3b3431ff", + "border.focused": "#182d5bff", + "border.selected": "#182d5bff", + "border.transparent": "#00000000", + "border.disabled": "#554e4bff", + "elevated_surface.background": "#27211eff", + "surface.background": "#27211eff", + "background": "#443c39ff", + "element.background": "#27211eff", + "element.hover": "#3b3431ff", + "element.active": "#645d5aff", + "element.selected": "#645d5aff", + "element.disabled": "#27211eff", + "drop_target.background": "#a79f9d80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#3b3431ff", + "ghost_element.active": "#645d5aff", + "ghost_element.selected": "#645d5aff", + "ghost_element.disabled": "#27211eff", + "text": "#f0eeedff", + "text.muted": "#a79f9dff", + "text.placeholder": "#8e8683ff", + "text.disabled": "#8e8683ff", + "text.accent": "#407ee6ff", + "icon": "#f0eeedff", + "icon.muted": "#a79f9dff", + "icon.disabled": "#8e8683ff", + "icon.placeholder": "#a79f9dff", + "icon.accent": "#407ee6ff", + "status_bar.background": "#443c39ff", + "title_bar.background": "#443c39ff", + "toolbar.background": "#1b1918ff", + "tab_bar.background": "#27211eff", + "tab.inactive_background": "#27211eff", + "tab.active_background": "#1b1918ff", + "search.match_background": "#417ee666", + "panel.background": "#27211eff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f0eeed4c", + "scrollbar.thumb.hover_background": "#3b3431ff", + "scrollbar.thumb.border": "#3b3431ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#251f1dff", + "editor.foreground": "#e6e2e0ff", + "editor.background": "#1b1918ff", + "editor.gutter.background": "#1b1918ff", + "editor.subheader.background": "#27211eff", + "editor.active_line.background": "#27211ebf", + "editor.highlighted_line.background": "#27211eff", + "editor.line_number": "#f0eeed59", + "editor.active_line_number": "#f0eeedff", + "editor.invisible": "#89817dff", + "editor.wrap_guide": "#f0eeed0d", + "editor.active_wrap_guide": "#f0eeed1a", + "editor.document_highlight.read_background": "#407ee61a", + "editor.document_highlight.write_background": "#89817d66", + "terminal.background": "#1b1918ff", + "terminal.foreground": "#f0eeedff", + "terminal.bright_foreground": "#f0eeedff", + "terminal.dim_foreground": "#1b1918ff", + "terminal.ansi.black": "#1b1918ff", + "terminal.ansi.bright_black": "#746c69ff", + "terminal.ansi.dim_black": "#f0eeedff", + "terminal.ansi.red": "#f22c3fff", + "terminal.ansi.bright_red": "#8c1122ff", + "terminal.ansi.dim_red": "#ffa29aff", + "terminal.ansi.green": "#7a9726ff", + "terminal.ansi.bright_green": "#3d4919ff", + "terminal.ansi.dim_green": "#bfca93ff", + "terminal.ansi.yellow": "#c38418ff", + "terminal.ansi.bright_yellow": "#664115ff", + "terminal.ansi.dim_yellow": "#e8bf8dff", + "terminal.ansi.blue": "#407ee6ff", + "terminal.ansi.bright_blue": "#213f78ff", + "terminal.ansi.dim_blue": "#aebcf4ff", + "terminal.ansi.magenta": "#c340f2ff", + "terminal.ansi.bright_magenta": "#662086ff", + "terminal.ansi.dim_magenta": "#e7a6fbff", + "terminal.ansi.cyan": "#3d97b8ff", + "terminal.ansi.bright_cyan": "#264858ff", + "terminal.ansi.dim_cyan": "#a6cadbff", + "terminal.ansi.white": "#f0eeedff", + "terminal.ansi.bright_white": "#f0eeedff", + "terminal.ansi.dim_white": "#9e9693ff", + "link_text.hover": "#407ee6ff", + "conflict": "#c38418ff", + "conflict.background": "#371d0dff", + "conflict.border": "#4f2e11ff", + "created": "#7a9726ff", + "created.background": "#1d2110ff", + "created.border": "#2e3515ff", + "deleted": "#f22c3fff", + "deleted.background": "#540511ff", + "deleted.border": "#710b1aff", + "error": "#f22c3fff", + "error.background": "#540511ff", + "error.border": "#710b1aff", + "hidden": "#8e8683ff", + "hidden.background": "#443c39ff", + "hidden.border": "#554e4bff", + "hint": "#a77087ff", + "hint.background": "#0f1c3dff", + "hint.border": "#182d5bff", + "ignored": "#8e8683ff", + "ignored.background": "#443c39ff", + "ignored.border": "#665f5cff", + "info": "#407ee6ff", + "info.background": "#0f1c3dff", + "info.border": "#182d5bff", + "modified": "#c38418ff", + "modified.background": "#371d0dff", + "modified.border": "#4f2e11ff", + "predictive": "#8f5b70ff", + "predictive.background": "#1d2110ff", + "predictive.border": "#2e3515ff", + "renamed": "#407ee6ff", + "renamed.background": "#0f1c3dff", + "renamed.border": "#182d5bff", + "success": "#7a9726ff", + "success.background": "#1d2110ff", + "success.border": "#2e3515ff", + "unreachable": "#a79f9dff", + "unreachable.background": "#443c39ff", + "unreachable.border": "#665f5cff", + "warning": "#c38418ff", + "warning.background": "#371d0dff", + "warning.border": "#4f2e11ff", + "players": [ + { + "cursor": "#407ee6ff", + "background": "#407ee6ff", + "selection": "#407ee63d" + }, + { + "cursor": "#c340f2ff", + "background": "#c340f2ff", + "selection": "#c340f23d" + }, + { + "cursor": "#df5321ff", + "background": "#df5321ff", + "selection": "#df53213d" + }, + { + "cursor": "#6565e9ff", + "background": "#6565e9ff", + "selection": "#6565e93d" + }, + { + "cursor": "#3d97b8ff", + "background": "#3d97b8ff", + "selection": "#3d97b83d" + }, + { + "cursor": "#f22c3fff", + "background": "#f22c3fff", + "selection": "#f22c3f3d" + }, + { + "cursor": "#c38418ff", + "background": "#c38418ff", + "selection": "#c384183d" + }, + { + "cursor": "#7a9726ff", + "background": "#7a9726ff", + "selection": "#7a97263d" + } + ], + "syntax": { + "attribute": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#7a9726ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#766e6bff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#a8a19fff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#7a9726ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f0eeedff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#df5321ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#3f7ee7ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#3f7ee7ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#c38417ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#a77087ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#6666eaff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#df5321ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#7a9726ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#df521fff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#a8a19fff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#8f5b70ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f0eeedff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e6e2e0ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#f22c40ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#e6e2e0ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#a8a19fff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#a8a19fff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#e6e2e0ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#c33ff3ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#7a9725ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#a8a19fff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#3c96b8ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#c33ff3ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#7a9725ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#df5321ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f0eeedff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#c38417ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e6e2e0ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#6666eaff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#c38417ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Forest Light", + "appearance": "light", + "style": { + "border": "#aaa3a1ff", + "border.variant": "#d6d1cfff", + "border.focused": "#c6cef7ff", + "border.selected": "#c6cef7ff", + "border.transparent": "#00000000", + "border.disabled": "#bcb6b4ff", + "elevated_surface.background": "#e9e6e4ff", + "surface.background": "#e9e6e4ff", + "background": "#ccc7c5ff", + "element.background": "#e9e6e4ff", + "element.hover": "#d6d1cfff", + "element.active": "#aca5a3ff", + "element.selected": "#aca5a3ff", + "element.disabled": "#e9e6e4ff", + "drop_target.background": "#6a636080", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#d6d1cfff", + "ghost_element.active": "#aca5a3ff", + "ghost_element.selected": "#aca5a3ff", + "ghost_element.disabled": "#e9e6e4ff", + "text": "#1b1918ff", + "text.muted": "#6a6360ff", + "text.placeholder": "#837b78ff", + "text.disabled": "#837b78ff", + "text.accent": "#407ee6ff", + "icon": "#1b1918ff", + "icon.muted": "#6a6360ff", + "icon.disabled": "#837b78ff", + "icon.placeholder": "#6a6360ff", + "icon.accent": "#407ee6ff", + "status_bar.background": "#ccc7c5ff", + "title_bar.background": "#ccc7c5ff", + "toolbar.background": "#f0eeedff", + "tab_bar.background": "#e9e6e4ff", + "tab.inactive_background": "#e9e6e4ff", + "tab.active_background": "#f0eeedff", + "search.match_background": "#417ee666", + "panel.background": "#e9e6e4ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#1b19184c", + "scrollbar.thumb.hover_background": "#d6d1cfff", + "scrollbar.thumb.border": "#d6d1cfff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#ebe8e6ff", + "editor.foreground": "#2c2421ff", + "editor.background": "#f0eeedff", + "editor.gutter.background": "#f0eeedff", + "editor.subheader.background": "#e9e6e4ff", + "editor.active_line.background": "#e9e6e4bf", + "editor.highlighted_line.background": "#e9e6e4ff", + "editor.line_number": "#1b191859", + "editor.active_line_number": "#1b1918ff", + "editor.invisible": "#89817dff", + "editor.wrap_guide": "#1b19180d", + "editor.active_wrap_guide": "#1b19181a", + "editor.document_highlight.read_background": "#407ee61a", + "editor.document_highlight.write_background": "#89817d66", + "terminal.background": "#f0eeedff", + "terminal.foreground": "#1b1918ff", + "terminal.bright_foreground": "#1b1918ff", + "terminal.dim_foreground": "#f0eeedff", + "terminal.ansi.black": "#f0eeedff", + "terminal.ansi.bright_black": "#9e9693ff", + "terminal.ansi.dim_black": "#1b1918ff", + "terminal.ansi.red": "#f22e40ff", + "terminal.ansi.bright_red": "#ffa29aff", + "terminal.ansi.dim_red": "#8c1122ff", + "terminal.ansi.green": "#7a9728ff", + "terminal.ansi.bright_green": "#bfca93ff", + "terminal.ansi.dim_green": "#3d4919ff", + "terminal.ansi.yellow": "#c38419ff", + "terminal.ansi.bright_yellow": "#e8bf8dff", + "terminal.ansi.dim_yellow": "#664115ff", + "terminal.ansi.blue": "#407ee6ff", + "terminal.ansi.bright_blue": "#aebcf4ff", + "terminal.ansi.dim_blue": "#213f78ff", + "terminal.ansi.magenta": "#c340f2ff", + "terminal.ansi.bright_magenta": "#e7a6fbff", + "terminal.ansi.dim_magenta": "#662086ff", + "terminal.ansi.cyan": "#3e96b8ff", + "terminal.ansi.bright_cyan": "#a6cadbff", + "terminal.ansi.dim_cyan": "#264858ff", + "terminal.ansi.white": "#1b1918ff", + "terminal.ansi.bright_white": "#1b1918ff", + "terminal.ansi.dim_white": "#746c69ff", + "link_text.hover": "#407ee6ff", + "conflict": "#c38419ff", + "conflict.background": "#f8e5d1ff", + "conflict.border": "#f0d1adff", + "created": "#7a9728ff", + "created.background": "#e5e9d3ff", + "created.border": "#d1d8b1ff", + "deleted": "#f22e40ff", + "deleted.background": "#ffdad5ff", + "deleted.border": "#ffbdb6ff", + "error": "#f22e40ff", + "error.background": "#ffdad5ff", + "error.border": "#ffbdb6ff", + "hidden": "#837b78ff", + "hidden.background": "#ccc7c5ff", + "hidden.border": "#bcb6b4ff", + "hint": "#a67287ff", + "hint.background": "#dfe3fbff", + "hint.border": "#c6cef7ff", + "ignored": "#837b78ff", + "ignored.background": "#ccc7c5ff", + "ignored.border": "#aaa3a1ff", + "info": "#407ee6ff", + "info.background": "#dfe3fbff", + "info.border": "#c6cef7ff", + "modified": "#c38419ff", + "modified.background": "#f8e5d1ff", + "modified.border": "#f0d1adff", + "predictive": "#be899eff", + "predictive.background": "#e5e9d3ff", + "predictive.border": "#d1d8b1ff", + "renamed": "#407ee6ff", + "renamed.background": "#dfe3fbff", + "renamed.border": "#c6cef7ff", + "success": "#7a9728ff", + "success.background": "#e5e9d3ff", + "success.border": "#d1d8b1ff", + "unreachable": "#6a6360ff", + "unreachable.background": "#ccc7c5ff", + "unreachable.border": "#aaa3a1ff", + "warning": "#c38419ff", + "warning.background": "#f8e5d1ff", + "warning.border": "#f0d1adff", + "players": [ + { + "cursor": "#407ee6ff", + "background": "#407ee6ff", + "selection": "#407ee63d" + }, + { + "cursor": "#c340f2ff", + "background": "#c340f2ff", + "selection": "#c340f23d" + }, + { + "cursor": "#df5421ff", + "background": "#df5421ff", + "selection": "#df54213d" + }, + { + "cursor": "#6765e9ff", + "background": "#6765e9ff", + "selection": "#6765e93d" + }, + { + "cursor": "#3e96b8ff", + "background": "#3e96b8ff", + "selection": "#3e96b83d" + }, + { + "cursor": "#f22e40ff", + "background": "#f22e40ff", + "selection": "#f22e403d" + }, + { + "cursor": "#c38419ff", + "background": "#c38419ff", + "selection": "#c384193d" + }, + { + "cursor": "#7a9728ff", + "background": "#7a9728ff", + "selection": "#7a97283d" + } + ], + "syntax": { + "attribute": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#7a9728ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#9c9491ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#68615eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#7a9728ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#1b1918ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#df5421ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#3f7ee7ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#3f7ee7ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#c38417ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#a67287ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#6666eaff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#df5421ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#7a9728ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#df521fff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#68615eff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#be899eff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#1b1918ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#2c2421ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#f22c40ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#2c2421ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#68615eff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#68615eff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#2c2421ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#c33ff3ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#7a9725ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#68615eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#3c96b8ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#c33ff3ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#7a9725ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#407ee6ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#df5421ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#1b1918ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#c38417ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#2c2421ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#6666eaff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#c38417ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Heath Dark", + "appearance": "dark", + "style": { + "border": "#675b67ff", + "border.variant": "#393239ff", + "border.focused": "#192961ff", + "border.selected": "#192961ff", + "border.transparent": "#00000000", + "border.disabled": "#554a55ff", + "elevated_surface.background": "#252025ff", + "surface.background": "#252025ff", + "background": "#433a43ff", + "element.background": "#252025ff", + "element.hover": "#393239ff", + "element.active": "#655965ff", + "element.selected": "#655965ff", + "element.disabled": "#252025ff", + "drop_target.background": "#a899a880", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#393239ff", + "ghost_element.active": "#655965ff", + "ghost_element.selected": "#655965ff", + "ghost_element.disabled": "#252025ff", + "text": "#f7f3f7ff", + "text.muted": "#a899a8ff", + "text.placeholder": "#908190ff", + "text.disabled": "#908190ff", + "text.accent": "#5169ebff", + "icon": "#f7f3f7ff", + "icon.muted": "#a899a8ff", + "icon.disabled": "#908190ff", + "icon.placeholder": "#a899a8ff", + "icon.accent": "#5169ebff", + "status_bar.background": "#433a43ff", + "title_bar.background": "#433a43ff", + "toolbar.background": "#1b181bff", + "tab_bar.background": "#252025ff", + "tab.inactive_background": "#252025ff", + "tab.active_background": "#1b181bff", + "search.match_background": "#526aeb66", + "panel.background": "#252025ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f7f3f74c", + "scrollbar.thumb.hover_background": "#393239ff", + "scrollbar.thumb.border": "#393239ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#231e23ff", + "editor.foreground": "#d8cad8ff", + "editor.background": "#1b181bff", + "editor.gutter.background": "#1b181bff", + "editor.subheader.background": "#252025ff", + "editor.active_line.background": "#252025bf", + "editor.highlighted_line.background": "#252025ff", + "editor.line_number": "#f7f3f759", + "editor.active_line_number": "#f7f3f7ff", + "editor.invisible": "#8b7b8bff", + "editor.wrap_guide": "#f7f3f70d", + "editor.active_wrap_guide": "#f7f3f71a", + "editor.document_highlight.read_background": "#5169eb1a", + "editor.document_highlight.write_background": "#8b7b8b66", + "terminal.background": "#1b181bff", + "terminal.foreground": "#f7f3f7ff", + "terminal.bright_foreground": "#f7f3f7ff", + "terminal.dim_foreground": "#1b181bff", + "terminal.ansi.black": "#1b181bff", + "terminal.ansi.bright_black": "#756775ff", + "terminal.ansi.dim_black": "#f7f3f7ff", + "terminal.ansi.red": "#ca3f2bff", + "terminal.ansi.bright_red": "#6d221aff", + "terminal.ansi.dim_red": "#f0a28fff", + "terminal.ansi.green": "#918b3aff", + "terminal.ansi.bright_green": "#474422ff", + "terminal.ansi.dim_green": "#cac49aff", + "terminal.ansi.yellow": "#bb8a35ff", + "terminal.ansi.bright_yellow": "#5e441eff", + "terminal.ansi.dim_yellow": "#e2c398ff", + "terminal.ansi.blue": "#5169ebff", + "terminal.ansi.bright_blue": "#26367eff", + "terminal.ansi.dim_blue": "#b4b2f7ff", + "terminal.ansi.magenta": "#cc34ccff", + "terminal.ansi.bright_magenta": "#6c1e66ff", + "terminal.ansi.dim_magenta": "#eba2e6ff", + "terminal.ansi.cyan": "#189393ff", + "terminal.ansi.bright_cyan": "#1a4848ff", + "terminal.ansi.dim_cyan": "#99c8c7ff", + "terminal.ansi.white": "#f7f3f7ff", + "terminal.ansi.bright_white": "#f7f3f7ff", + "terminal.ansi.dim_white": "#a091a0ff", + "link_text.hover": "#5169ebff", + "conflict": "#bb8a35ff", + "conflict.background": "#2d1e11ff", + "conflict.border": "#463218ff", + "created": "#918b3aff", + "created.background": "#211f12ff", + "created.border": "#34321bff", + "deleted": "#ca3f2bff", + "deleted.background": "#3c110eff", + "deleted.border": "#541a15ff", + "error": "#ca3f2bff", + "error.background": "#3c110eff", + "error.border": "#541a15ff", + "hidden": "#908190ff", + "hidden.background": "#433a43ff", + "hidden.border": "#554a55ff", + "hint": "#8d70a8ff", + "hint.background": "#0d1a43ff", + "hint.border": "#192961ff", + "ignored": "#908190ff", + "ignored.background": "#433a43ff", + "ignored.border": "#675b67ff", + "info": "#5169ebff", + "info.background": "#0d1a43ff", + "info.border": "#192961ff", + "modified": "#bb8a35ff", + "modified.background": "#2d1e11ff", + "modified.border": "#463218ff", + "predictive": "#75588fff", + "predictive.background": "#211f12ff", + "predictive.border": "#34321bff", + "renamed": "#5169ebff", + "renamed.background": "#0d1a43ff", + "renamed.border": "#192961ff", + "success": "#918b3aff", + "success.background": "#211f12ff", + "success.border": "#34321bff", + "unreachable": "#a899a8ff", + "unreachable.background": "#433a43ff", + "unreachable.border": "#675b67ff", + "warning": "#bb8a35ff", + "warning.background": "#2d1e11ff", + "warning.border": "#463218ff", + "players": [ + { + "cursor": "#5169ebff", + "background": "#5169ebff", + "selection": "#5169eb3d" + }, + { + "cursor": "#cc34ccff", + "background": "#cc34ccff", + "selection": "#cc34cc3d" + }, + { + "cursor": "#a65827ff", + "background": "#a65827ff", + "selection": "#a658273d" + }, + { + "cursor": "#7b58bfff", + "background": "#7b58bfff", + "selection": "#7b58bf3d" + }, + { + "cursor": "#189393ff", + "background": "#189393ff", + "selection": "#1893933d" + }, + { + "cursor": "#ca3f2bff", + "background": "#ca3f2bff", + "selection": "#ca3f2b3d" + }, + { + "cursor": "#bb8a35ff", + "background": "#bb8a35ff", + "selection": "#bb8a353d" + }, + { + "cursor": "#918b3aff", + "background": "#918b3aff", + "selection": "#918b3a3d" + } + ], + "syntax": { + "attribute": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#776977ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#ab9babff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f7f3f7ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#5169ebff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#a65827ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#506aecff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#506aecff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#bb8a34ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8d70a8ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#7b58bfff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#a65827ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#a65825ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#ab9babff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#75588fff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f7f3f7ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#d8cad8ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ca3f2aff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#d8cad8ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#ab9babff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#ab9babff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#d8cad8ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#cc32ccff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#ab9babff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#149393ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#cc32ccff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#a65827ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f7f3f7ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#bb8a34ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#d8cad8ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#7b58bfff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#bb8a34ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Heath Light", + "appearance": "light", + "style": { + "border": "#ad9dadff", + "border.variant": "#ccbdccff", + "border.focused": "#cac7faff", + "border.selected": "#cac7faff", + "border.transparent": "#00000000", + "border.disabled": "#baaabaff", + "elevated_surface.background": "#e0d5e0ff", + "surface.background": "#e0d5e0ff", + "background": "#c6b8c6ff", + "element.background": "#e0d5e0ff", + "element.hover": "#ccbdccff", + "element.active": "#ae9eaeff", + "element.selected": "#ae9eaeff", + "element.disabled": "#e0d5e0ff", + "drop_target.background": "#6b5e6b80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#ccbdccff", + "ghost_element.active": "#ae9eaeff", + "ghost_element.selected": "#ae9eaeff", + "ghost_element.disabled": "#e0d5e0ff", + "text": "#1b181bff", + "text.muted": "#6b5e6bff", + "text.placeholder": "#857785ff", + "text.disabled": "#857785ff", + "text.accent": "#5169ebff", + "icon": "#1b181bff", + "icon.muted": "#6b5e6bff", + "icon.disabled": "#857785ff", + "icon.placeholder": "#6b5e6bff", + "icon.accent": "#5169ebff", + "status_bar.background": "#c6b8c6ff", + "title_bar.background": "#c6b8c6ff", + "toolbar.background": "#f7f3f7ff", + "tab_bar.background": "#e0d5e0ff", + "tab.inactive_background": "#e0d5e0ff", + "tab.active_background": "#f7f3f7ff", + "search.match_background": "#526aeb66", + "panel.background": "#e0d5e0ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#1b181b4c", + "scrollbar.thumb.hover_background": "#ccbdccff", + "scrollbar.thumb.border": "#ccbdccff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#e5dce5ff", + "editor.foreground": "#292329ff", + "editor.background": "#f7f3f7ff", + "editor.gutter.background": "#f7f3f7ff", + "editor.subheader.background": "#e0d5e0ff", + "editor.active_line.background": "#e0d5e0bf", + "editor.highlighted_line.background": "#e0d5e0ff", + "editor.line_number": "#1b181b59", + "editor.active_line_number": "#1b181bff", + "editor.invisible": "#8b7b8bff", + "editor.wrap_guide": "#1b181b0d", + "editor.active_wrap_guide": "#1b181b1a", + "editor.document_highlight.read_background": "#5169eb1a", + "editor.document_highlight.write_background": "#8b7b8b66", + "terminal.background": "#f7f3f7ff", + "terminal.foreground": "#1b181bff", + "terminal.bright_foreground": "#1b181bff", + "terminal.dim_foreground": "#f7f3f7ff", + "terminal.ansi.black": "#f7f3f7ff", + "terminal.ansi.bright_black": "#a091a0ff", + "terminal.ansi.dim_black": "#1b181bff", + "terminal.ansi.red": "#ca402bff", + "terminal.ansi.bright_red": "#f0a28fff", + "terminal.ansi.dim_red": "#6d221aff", + "terminal.ansi.green": "#918b3bff", + "terminal.ansi.bright_green": "#cac49aff", + "terminal.ansi.dim_green": "#474422ff", + "terminal.ansi.yellow": "#bb8a35ff", + "terminal.ansi.bright_yellow": "#e2c398ff", + "terminal.ansi.dim_yellow": "#5e441eff", + "terminal.ansi.blue": "#5169ebff", + "terminal.ansi.bright_blue": "#b4b2f7ff", + "terminal.ansi.dim_blue": "#26367eff", + "terminal.ansi.magenta": "#cc34ccff", + "terminal.ansi.bright_magenta": "#eba2e6ff", + "terminal.ansi.dim_magenta": "#6c1e66ff", + "terminal.ansi.cyan": "#189393ff", + "terminal.ansi.bright_cyan": "#99c8c7ff", + "terminal.ansi.dim_cyan": "#1a4848ff", + "terminal.ansi.white": "#1b181bff", + "terminal.ansi.bright_white": "#1b181bff", + "terminal.ansi.dim_white": "#756775ff", + "link_text.hover": "#5169ebff", + "conflict": "#bb8a35ff", + "conflict.background": "#f5e6d5ff", + "conflict.border": "#ebd3b5ff", + "created": "#918b3bff", + "created.background": "#eae6d6ff", + "created.border": "#d8d3b5ff", + "deleted": "#ca402bff", + "deleted.background": "#fcd9d1ff", + "deleted.border": "#f7bcaeff", + "error": "#ca402bff", + "error.background": "#fcd9d1ff", + "error.border": "#f7bcaeff", + "hidden": "#857785ff", + "hidden.background": "#c6b8c6ff", + "hidden.border": "#baaabaff", + "hint": "#8c70a6ff", + "hint.background": "#e2dffcff", + "hint.border": "#cac7faff", + "ignored": "#857785ff", + "ignored.background": "#c6b8c6ff", + "ignored.border": "#ad9dadff", + "info": "#5169ebff", + "info.background": "#e2dffcff", + "info.border": "#cac7faff", + "modified": "#bb8a35ff", + "modified.background": "#f5e6d5ff", + "modified.border": "#ebd3b5ff", + "predictive": "#a487bfff", + "predictive.background": "#eae6d6ff", + "predictive.border": "#d8d3b5ff", + "renamed": "#5169ebff", + "renamed.background": "#e2dffcff", + "renamed.border": "#cac7faff", + "success": "#918b3bff", + "success.background": "#eae6d6ff", + "success.border": "#d8d3b5ff", + "unreachable": "#6b5e6bff", + "unreachable.background": "#c6b8c6ff", + "unreachable.border": "#ad9dadff", + "warning": "#bb8a35ff", + "warning.background": "#f5e6d5ff", + "warning.border": "#ebd3b5ff", + "players": [ + { + "cursor": "#5169ebff", + "background": "#5169ebff", + "selection": "#5169eb3d" + }, + { + "cursor": "#cc34ccff", + "background": "#cc34ccff", + "selection": "#cc34cc3d" + }, + { + "cursor": "#a65927ff", + "background": "#a65927ff", + "selection": "#a659273d" + }, + { + "cursor": "#7a5ac0ff", + "background": "#7a5ac0ff", + "selection": "#7a5ac03d" + }, + { + "cursor": "#189393ff", + "background": "#189393ff", + "selection": "#1893933d" + }, + { + "cursor": "#ca402bff", + "background": "#ca402bff", + "selection": "#ca402b3d" + }, + { + "cursor": "#bb8a35ff", + "background": "#bb8a35ff", + "selection": "#bb8a353d" + }, + { + "cursor": "#918b3bff", + "background": "#918b3bff", + "selection": "#918b3b3d" + } + ], + "syntax": { + "attribute": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#918b3bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#9e8f9eff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#695d69ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#918b3bff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#1b181bff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#5169ebff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#a65927ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#506aecff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#506aecff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#bb8a34ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8c70a6ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#7b58bfff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#a65927ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#918b3bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#a65825ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#695d69ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#a487bfff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#1b181bff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#292329ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ca3f2aff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#292329ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#695d69ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#695d69ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#292329ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#cc32ccff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#695d69ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#149393ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#cc32ccff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#918b3aff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#5169ebff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#a65927ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#1b181bff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#bb8a34ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#292329ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#7b58bfff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#bb8a34ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Lakeside Dark", + "appearance": "dark", + "style": { + "border": "#4f6a78ff", + "border.variant": "#2c3b42ff", + "border.focused": "#1a2f3cff", + "border.selected": "#1a2f3cff", + "border.transparent": "#00000000", + "border.disabled": "#415763ff", + "elevated_surface.background": "#1c2529ff", + "surface.background": "#1c2529ff", + "background": "#33444dff", + "element.background": "#1c2529ff", + "element.hover": "#2c3b42ff", + "element.active": "#4d6876ff", + "element.selected": "#4d6876ff", + "element.disabled": "#1c2529ff", + "drop_target.background": "#7c9fb380", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#2c3b42ff", + "ghost_element.active": "#4d6876ff", + "ghost_element.selected": "#4d6876ff", + "ghost_element.disabled": "#1c2529ff", + "text": "#ebf8ffff", + "text.muted": "#7c9fb3ff", + "text.placeholder": "#688c9dff", + "text.disabled": "#688c9dff", + "text.accent": "#267eadff", + "icon": "#ebf8ffff", + "icon.muted": "#7c9fb3ff", + "icon.disabled": "#688c9dff", + "icon.placeholder": "#7c9fb3ff", + "icon.accent": "#267eadff", + "status_bar.background": "#33444dff", + "title_bar.background": "#33444dff", + "toolbar.background": "#161b1dff", + "tab_bar.background": "#1c2529ff", + "tab.inactive_background": "#1c2529ff", + "tab.active_background": "#161b1dff", + "search.match_background": "#277fad66", + "panel.background": "#1c2529ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#ebf8ff4c", + "scrollbar.thumb.hover_background": "#2c3b42ff", + "scrollbar.thumb.border": "#2c3b42ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#1b2327ff", + "editor.foreground": "#c1e4f6ff", + "editor.background": "#161b1dff", + "editor.gutter.background": "#161b1dff", + "editor.subheader.background": "#1c2529ff", + "editor.active_line.background": "#1c2529bf", + "editor.highlighted_line.background": "#1c2529ff", + "editor.line_number": "#ebf8ff59", + "editor.active_line_number": "#ebf8ffff", + "editor.invisible": "#66889aff", + "editor.wrap_guide": "#ebf8ff0d", + "editor.active_wrap_guide": "#ebf8ff1a", + "editor.document_highlight.read_background": "#267ead1a", + "editor.document_highlight.write_background": "#66889a66", + "terminal.background": "#161b1dff", + "terminal.foreground": "#ebf8ffff", + "terminal.bright_foreground": "#ebf8ffff", + "terminal.dim_foreground": "#161b1dff", + "terminal.ansi.black": "#161b1dff", + "terminal.ansi.bright_black": "#577889ff", + "terminal.ansi.dim_black": "#ebf8ffff", + "terminal.ansi.red": "#d22e72ff", + "terminal.ansi.bright_red": "#6f1c3aff", + "terminal.ansi.dim_red": "#f09fb6ff", + "terminal.ansi.green": "#558c3aff", + "terminal.ansi.bright_green": "#2e4522ff", + "terminal.ansi.dim_green": "#aac499ff", + "terminal.ansi.yellow": "#8a8a10ff", + "terminal.ansi.bright_yellow": "#454413ff", + "terminal.ansi.dim_yellow": "#c7c28aff", + "terminal.ansi.blue": "#267eadff", + "terminal.ansi.bright_blue": "#1d3f53ff", + "terminal.ansi.dim_blue": "#9ebcd6ff", + "terminal.ansi.magenta": "#b72ed2ff", + "terminal.ansi.bright_magenta": "#5c1e6bff", + "terminal.ansi.dim_magenta": "#e09fe9ff", + "terminal.ansi.cyan": "#2d8f6fff", + "terminal.ansi.bright_cyan": "#1e4637ff", + "terminal.ansi.dim_cyan": "#9bc7b5ff", + "terminal.ansi.white": "#ebf8ffff", + "terminal.ansi.bright_white": "#ebf8ffff", + "terminal.ansi.dim_white": "#7296a9ff", + "link_text.hover": "#267eadff", + "conflict": "#8a8a10ff", + "conflict.background": "#201e0cff", + "conflict.border": "#323111ff", + "created": "#558c3aff", + "created.background": "#171f12ff", + "created.border": "#23321bff", + "deleted": "#d22e72ff", + "deleted.background": "#39101bff", + "deleted.border": "#55152bff", + "error": "#d22e72ff", + "error.background": "#39101bff", + "error.border": "#55152bff", + "hidden": "#688c9dff", + "hidden.background": "#33444dff", + "hidden.border": "#415763ff", + "hint": "#52809aff", + "hint.background": "#121c24ff", + "hint.border": "#1a2f3cff", + "ignored": "#688c9dff", + "ignored.background": "#33444dff", + "ignored.border": "#4f6a78ff", + "info": "#267eadff", + "info.background": "#121c24ff", + "info.border": "#1a2f3cff", + "modified": "#8a8a10ff", + "modified.background": "#201e0cff", + "modified.border": "#323111ff", + "predictive": "#426f88ff", + "predictive.background": "#171f12ff", + "predictive.border": "#23321bff", + "renamed": "#267eadff", + "renamed.background": "#121c24ff", + "renamed.border": "#1a2f3cff", + "success": "#558c3aff", + "success.background": "#171f12ff", + "success.border": "#23321bff", + "unreachable": "#7c9fb3ff", + "unreachable.background": "#33444dff", + "unreachable.border": "#4f6a78ff", + "warning": "#8a8a10ff", + "warning.background": "#201e0cff", + "warning.border": "#323111ff", + "players": [ + { + "cursor": "#267eadff", + "background": "#267eadff", + "selection": "#267ead3d" + }, + { + "cursor": "#b72ed2ff", + "background": "#b72ed2ff", + "selection": "#b72ed23d" + }, + { + "cursor": "#935b25ff", + "background": "#935b25ff", + "selection": "#935b253d" + }, + { + "cursor": "#6a6ab7ff", + "background": "#6a6ab7ff", + "selection": "#6a6ab73d" + }, + { + "cursor": "#2d8f6fff", + "background": "#2d8f6fff", + "selection": "#2d8f6f3d" + }, + { + "cursor": "#d22e72ff", + "background": "#d22e72ff", + "selection": "#d22e723d" + }, + { + "cursor": "#8a8a10ff", + "background": "#8a8a10ff", + "selection": "#8a8a103d" + }, + { + "cursor": "#558c3aff", + "background": "#558c3aff", + "selection": "#558c3a3d" + } + ], + "syntax": { + "attribute": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#5a7b8cff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#7ea2b4ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#ebf8ffff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#267eadff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#935b25ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#247eadff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#247eadff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#8a8a0eff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#52809aff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#6a6ab7ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#935b25ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#935c24ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#7ea2b4ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#426f88ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#ebf8ffff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#c1e4f6ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#d22c72ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#c1e4f6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#7ea2b4ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#7ea2b4ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#c1e4f6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#b72cd2ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#7ea2b4ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#2c8f6eff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#b72cd2ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#935b25ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#ebf8ffff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#8a8a0eff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#c1e4f6ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#6a6ab7ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#8a8a0eff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Lakeside Light", + "appearance": "light", + "style": { + "border": "#80a4b6ff", + "border.variant": "#b0d3e5ff", + "border.focused": "#b9cee0ff", + "border.selected": "#b9cee0ff", + "border.transparent": "#00000000", + "border.disabled": "#93b7c9ff", + "elevated_surface.background": "#cdeaf9ff", + "surface.background": "#cdeaf9ff", + "background": "#a6cadcff", + "element.background": "#cdeaf9ff", + "element.hover": "#b0d3e5ff", + "element.active": "#82a6b8ff", + "element.selected": "#82a6b8ff", + "element.disabled": "#cdeaf9ff", + "drop_target.background": "#526f7d80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#b0d3e5ff", + "ghost_element.active": "#82a6b8ff", + "ghost_element.selected": "#82a6b8ff", + "ghost_element.disabled": "#cdeaf9ff", + "text": "#161b1dff", + "text.muted": "#526f7dff", + "text.placeholder": "#628496ff", + "text.disabled": "#628496ff", + "text.accent": "#267eadff", + "icon": "#161b1dff", + "icon.muted": "#526f7dff", + "icon.disabled": "#628496ff", + "icon.placeholder": "#526f7dff", + "icon.accent": "#267eadff", + "status_bar.background": "#a6cadcff", + "title_bar.background": "#a6cadcff", + "toolbar.background": "#ebf8ffff", + "tab_bar.background": "#cdeaf9ff", + "tab.inactive_background": "#cdeaf9ff", + "tab.active_background": "#ebf8ffff", + "search.match_background": "#277fad66", + "panel.background": "#cdeaf9ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#161b1d4c", + "scrollbar.thumb.hover_background": "#b0d3e5ff", + "scrollbar.thumb.border": "#b0d3e5ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#d3edfaff", + "editor.foreground": "#1f292eff", + "editor.background": "#ebf8ffff", + "editor.gutter.background": "#ebf8ffff", + "editor.subheader.background": "#cdeaf9ff", + "editor.active_line.background": "#cdeaf9bf", + "editor.highlighted_line.background": "#cdeaf9ff", + "editor.line_number": "#161b1d59", + "editor.active_line_number": "#161b1dff", + "editor.invisible": "#66889aff", + "editor.wrap_guide": "#161b1d0d", + "editor.active_wrap_guide": "#161b1d1a", + "editor.document_highlight.read_background": "#267ead1a", + "editor.document_highlight.write_background": "#66889a66", + "terminal.background": "#ebf8ffff", + "terminal.foreground": "#161b1dff", + "terminal.bright_foreground": "#161b1dff", + "terminal.dim_foreground": "#ebf8ffff", + "terminal.ansi.black": "#ebf8ffff", + "terminal.ansi.bright_black": "#7296a9ff", + "terminal.ansi.dim_black": "#161b1dff", + "terminal.ansi.red": "#d22e71ff", + "terminal.ansi.bright_red": "#f09fb6ff", + "terminal.ansi.dim_red": "#6f1c3aff", + "terminal.ansi.green": "#568c3bff", + "terminal.ansi.bright_green": "#aac499ff", + "terminal.ansi.dim_green": "#2e4522ff", + "terminal.ansi.yellow": "#8a8a10ff", + "terminal.ansi.bright_yellow": "#c7c28aff", + "terminal.ansi.dim_yellow": "#454413ff", + "terminal.ansi.blue": "#267eadff", + "terminal.ansi.bright_blue": "#9ebcd6ff", + "terminal.ansi.dim_blue": "#1d3f53ff", + "terminal.ansi.magenta": "#b72ed2ff", + "terminal.ansi.bright_magenta": "#e09fe9ff", + "terminal.ansi.dim_magenta": "#5c1e6bff", + "terminal.ansi.cyan": "#2e8f6eff", + "terminal.ansi.bright_cyan": "#9bc7b5ff", + "terminal.ansi.dim_cyan": "#1e4637ff", + "terminal.ansi.white": "#161b1dff", + "terminal.ansi.bright_white": "#161b1dff", + "terminal.ansi.dim_white": "#577889ff", + "link_text.hover": "#267eadff", + "conflict": "#8a8a10ff", + "conflict.background": "#eae6d0ff", + "conflict.border": "#d8d3abff", + "created": "#568c3bff", + "created.background": "#dde7d5ff", + "created.border": "#c1d4b5ff", + "deleted": "#d22e71ff", + "deleted.background": "#fad7e0ff", + "deleted.border": "#f6bacaff", + "error": "#d22e71ff", + "error.background": "#fad7e0ff", + "error.border": "#f6bacaff", + "hidden": "#628496ff", + "hidden.background": "#a6cadcff", + "hidden.border": "#93b7c9ff", + "hint": "#5a87a0ff", + "hint.background": "#d8e4eeff", + "hint.border": "#b9cee0ff", + "ignored": "#628496ff", + "ignored.background": "#a6cadcff", + "ignored.border": "#80a4b6ff", + "info": "#267eadff", + "info.background": "#d8e4eeff", + "info.border": "#b9cee0ff", + "modified": "#8a8a10ff", + "modified.background": "#eae6d0ff", + "modified.border": "#d8d3abff", + "predictive": "#6a97b2ff", + "predictive.background": "#dde7d5ff", + "predictive.border": "#c1d4b5ff", + "renamed": "#267eadff", + "renamed.background": "#d8e4eeff", + "renamed.border": "#b9cee0ff", + "success": "#568c3bff", + "success.background": "#dde7d5ff", + "success.border": "#c1d4b5ff", + "unreachable": "#526f7dff", + "unreachable.background": "#a6cadcff", + "unreachable.border": "#80a4b6ff", + "warning": "#8a8a10ff", + "warning.background": "#eae6d0ff", + "warning.border": "#d8d3abff", + "players": [ + { + "cursor": "#267eadff", + "background": "#267eadff", + "selection": "#267ead3d" + }, + { + "cursor": "#b72ed2ff", + "background": "#b72ed2ff", + "selection": "#b72ed23d" + }, + { + "cursor": "#935c25ff", + "background": "#935c25ff", + "selection": "#935c253d" + }, + { + "cursor": "#6c6ab7ff", + "background": "#6c6ab7ff", + "selection": "#6c6ab73d" + }, + { + "cursor": "#2e8f6eff", + "background": "#2e8f6eff", + "selection": "#2e8f6e3d" + }, + { + "cursor": "#d22e71ff", + "background": "#d22e71ff", + "selection": "#d22e713d" + }, + { + "cursor": "#8a8a10ff", + "background": "#8a8a10ff", + "selection": "#8a8a103d" + }, + { + "cursor": "#568c3bff", + "background": "#568c3bff", + "selection": "#568c3b3d" + } + ], + "syntax": { + "attribute": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#568c3bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7094a7ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#516d7bff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#568c3bff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#161b1dff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#267eadff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#935c25ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#247eadff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#247eadff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#8a8a0eff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#5a87a0ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#6a6ab7ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#935c25ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#568c3bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#935c24ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#516d7bff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#6a97b2ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#161b1dff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#1f292eff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#d22c72ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#1f292eff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#516d7bff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#516d7bff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#1f292eff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#b72cd2ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#516d7bff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#2c8f6eff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#b72cd2ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#558c3aff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#267eadff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#935c25ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#161b1dff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#8a8a0eff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#1f292eff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#6a6ab7ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#8a8a0eff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Plateau Dark", + "appearance": "dark", + "style": { + "border": "#564e4eff", + "border.variant": "#352f2fff", + "border.focused": "#2c2b45ff", + "border.selected": "#2c2b45ff", + "border.transparent": "#00000000", + "border.disabled": "#494242ff", + "elevated_surface.background": "#252020ff", + "surface.background": "#252020ff", + "background": "#3b3535ff", + "element.background": "#252020ff", + "element.hover": "#352f2fff", + "element.active": "#554d4dff", + "element.selected": "#554d4dff", + "element.disabled": "#252020ff", + "drop_target.background": "#89838380", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#352f2fff", + "ghost_element.active": "#554d4dff", + "ghost_element.selected": "#554d4dff", + "ghost_element.disabled": "#252020ff", + "text": "#f4ececff", + "text.muted": "#898383ff", + "text.placeholder": "#756e6eff", + "text.disabled": "#756e6eff", + "text.accent": "#7272caff", + "icon": "#f4ececff", + "icon.muted": "#898383ff", + "icon.disabled": "#756e6eff", + "icon.placeholder": "#898383ff", + "icon.accent": "#7272caff", + "status_bar.background": "#3b3535ff", + "title_bar.background": "#3b3535ff", + "toolbar.background": "#1b1818ff", + "tab_bar.background": "#252020ff", + "tab.inactive_background": "#252020ff", + "tab.active_background": "#1b1818ff", + "search.match_background": "#7272ca66", + "panel.background": "#252020ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f4ecec4c", + "scrollbar.thumb.hover_background": "#352f2fff", + "scrollbar.thumb.border": "#352f2fff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#231f1fff", + "editor.foreground": "#e7dfdfff", + "editor.background": "#1b1818ff", + "editor.gutter.background": "#1b1818ff", + "editor.subheader.background": "#252020ff", + "editor.active_line.background": "#252020bf", + "editor.highlighted_line.background": "#252020ff", + "editor.line_number": "#f4ecec59", + "editor.active_line_number": "#f4ececff", + "editor.invisible": "#726a6aff", + "editor.wrap_guide": "#f4ecec0d", + "editor.active_wrap_guide": "#f4ecec1a", + "editor.document_highlight.read_background": "#7272ca1a", + "editor.document_highlight.write_background": "#726a6a66", + "terminal.background": "#1b1818ff", + "terminal.foreground": "#f4ececff", + "terminal.bright_foreground": "#f4ececff", + "terminal.dim_foreground": "#1b1818ff", + "terminal.ansi.black": "#1b1818ff", + "terminal.ansi.bright_black": "#635b5bff", + "terminal.ansi.dim_black": "#f4ececff", + "terminal.ansi.red": "#ca4848ff", + "terminal.ansi.bright_red": "#692727ff", + "terminal.ansi.dim_red": "#eda69fff", + "terminal.ansi.green": "#4b8b8bff", + "terminal.ansi.bright_green": "#2a4444ff", + "terminal.ansi.dim_green": "#a6c4c4ff", + "terminal.ansi.yellow": "#a06d3aff", + "terminal.ansi.bright_yellow": "#4e3821ff", + "terminal.ansi.dim_yellow": "#d4b499ff", + "terminal.ansi.blue": "#7272caff", + "terminal.ansi.bright_blue": "#3b3960ff", + "terminal.ansi.dim_blue": "#bab5e4ff", + "terminal.ansi.magenta": "#bd5187ff", + "terminal.ansi.bright_magenta": "#5b2c42ff", + "terminal.ansi.dim_magenta": "#e2a9c2ff", + "terminal.ansi.cyan": "#5485b6ff", + "terminal.ansi.bright_cyan": "#2e4257ff", + "terminal.ansi.dim_cyan": "#acc0daff", + "terminal.ansi.white": "#f4ececff", + "terminal.ansi.bright_white": "#f4ececff", + "terminal.ansi.dim_white": "#7f7878ff", + "link_text.hover": "#7272caff", + "conflict": "#a06d3aff", + "conflict.background": "#231a12ff", + "conflict.border": "#392a19ff", + "created": "#4b8b8bff", + "created.background": "#161e1eff", + "created.border": "#203131ff", + "deleted": "#ca4848ff", + "deleted.background": "#351414ff", + "deleted.border": "#501e1eff", + "error": "#ca4848ff", + "error.background": "#351414ff", + "error.border": "#501e1eff", + "hidden": "#756e6eff", + "hidden.background": "#3b3535ff", + "hidden.border": "#494242ff", + "hint": "#8a647aff", + "hint.background": "#1c1b29ff", + "hint.border": "#2c2b45ff", + "ignored": "#756e6eff", + "ignored.background": "#3b3535ff", + "ignored.border": "#564e4eff", + "info": "#7272caff", + "info.background": "#1c1b29ff", + "info.border": "#2c2b45ff", + "modified": "#a06d3aff", + "modified.background": "#231a12ff", + "modified.border": "#392a19ff", + "predictive": "#795369ff", + "predictive.background": "#161e1eff", + "predictive.border": "#203131ff", + "renamed": "#7272caff", + "renamed.background": "#1c1b29ff", + "renamed.border": "#2c2b45ff", + "success": "#4b8b8bff", + "success.background": "#161e1eff", + "success.border": "#203131ff", + "unreachable": "#898383ff", + "unreachable.background": "#3b3535ff", + "unreachable.border": "#564e4eff", + "warning": "#a06d3aff", + "warning.background": "#231a12ff", + "warning.border": "#392a19ff", + "players": [ + { + "cursor": "#7272caff", + "background": "#7272caff", + "selection": "#7272ca3d" + }, + { + "cursor": "#bd5187ff", + "background": "#bd5187ff", + "selection": "#bd51873d" + }, + { + "cursor": "#b4593bff", + "background": "#b4593bff", + "selection": "#b4593b3d" + }, + { + "cursor": "#8464c4ff", + "background": "#8464c4ff", + "selection": "#8464c43d" + }, + { + "cursor": "#5485b6ff", + "background": "#5485b6ff", + "selection": "#5485b63d" + }, + { + "cursor": "#ca4848ff", + "background": "#ca4848ff", + "selection": "#ca48483d" + }, + { + "cursor": "#a06d3aff", + "background": "#a06d3aff", + "selection": "#a06d3a3d" + }, + { + "cursor": "#4b8b8bff", + "background": "#4b8b8bff", + "selection": "#4b8b8b3d" + } + ], + "syntax": { + "attribute": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#655d5dff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#8a8585ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f4ececff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#7272caff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#b4593bff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8a647aff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#8464c4ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#b4593bff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#b4593bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#8a8585ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#795369ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f4ececff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e7dfdfff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ca4848ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#e7dfdfff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#8a8585ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#8a8585ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#e7dfdfff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#bd5187ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#8a8585ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#5485b6ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#bd5187ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#b4593bff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f4ececff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e7dfdfff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#8464c4ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Plateau Light", + "appearance": "light", + "style": { + "border": "#8e8989ff", + "border.variant": "#cfc7c7ff", + "border.focused": "#cecaecff", + "border.selected": "#cecaecff", + "border.transparent": "#00000000", + "border.disabled": "#a8a2a2ff", + "elevated_surface.background": "#ebe3e3ff", + "surface.background": "#ebe3e3ff", + "background": "#c1bbbbff", + "element.background": "#ebe3e3ff", + "element.hover": "#cfc7c7ff", + "element.active": "#908b8bff", + "element.selected": "#908b8bff", + "element.disabled": "#ebe3e3ff", + "drop_target.background": "#5a525280", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#cfc7c7ff", + "ghost_element.active": "#908b8bff", + "ghost_element.selected": "#908b8bff", + "ghost_element.disabled": "#ebe3e3ff", + "text": "#1b1818ff", + "text.muted": "#5a5252ff", + "text.placeholder": "#6e6666ff", + "text.disabled": "#6e6666ff", + "text.accent": "#7272caff", + "icon": "#1b1818ff", + "icon.muted": "#5a5252ff", + "icon.disabled": "#6e6666ff", + "icon.placeholder": "#5a5252ff", + "icon.accent": "#7272caff", + "status_bar.background": "#c1bbbbff", + "title_bar.background": "#c1bbbbff", + "toolbar.background": "#f4ececff", + "tab_bar.background": "#ebe3e3ff", + "tab.inactive_background": "#ebe3e3ff", + "tab.active_background": "#f4ececff", + "search.match_background": "#7372ca66", + "panel.background": "#ebe3e3ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#1b18184c", + "scrollbar.thumb.hover_background": "#cfc7c7ff", + "scrollbar.thumb.border": "#cfc7c7ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#ede5e5ff", + "editor.foreground": "#292424ff", + "editor.background": "#f4ececff", + "editor.gutter.background": "#f4ececff", + "editor.subheader.background": "#ebe3e3ff", + "editor.active_line.background": "#ebe3e3bf", + "editor.highlighted_line.background": "#ebe3e3ff", + "editor.line_number": "#1b181859", + "editor.active_line_number": "#1b1818ff", + "editor.invisible": "#726a6aff", + "editor.wrap_guide": "#1b18180d", + "editor.active_wrap_guide": "#1b18181a", + "editor.document_highlight.read_background": "#7272ca1a", + "editor.document_highlight.write_background": "#726a6a66", + "terminal.background": "#f4ececff", + "terminal.foreground": "#1b1818ff", + "terminal.bright_foreground": "#1b1818ff", + "terminal.dim_foreground": "#f4ececff", + "terminal.ansi.black": "#f4ececff", + "terminal.ansi.bright_black": "#7f7878ff", + "terminal.ansi.dim_black": "#1b1818ff", + "terminal.ansi.red": "#ca4a4aff", + "terminal.ansi.bright_red": "#eda69fff", + "terminal.ansi.dim_red": "#692727ff", + "terminal.ansi.green": "#4c8b8bff", + "terminal.ansi.bright_green": "#a6c4c4ff", + "terminal.ansi.dim_green": "#2a4444ff", + "terminal.ansi.yellow": "#a06e3bff", + "terminal.ansi.bright_yellow": "#d4b499ff", + "terminal.ansi.dim_yellow": "#4e3821ff", + "terminal.ansi.blue": "#7272caff", + "terminal.ansi.bright_blue": "#bab5e4ff", + "terminal.ansi.dim_blue": "#3b3960ff", + "terminal.ansi.magenta": "#bd5186ff", + "terminal.ansi.bright_magenta": "#e2a9c2ff", + "terminal.ansi.dim_magenta": "#5b2c42ff", + "terminal.ansi.cyan": "#5485b5ff", + "terminal.ansi.bright_cyan": "#acc0daff", + "terminal.ansi.dim_cyan": "#2e4257ff", + "terminal.ansi.white": "#1b1818ff", + "terminal.ansi.bright_white": "#1b1818ff", + "terminal.ansi.dim_white": "#635b5bff", + "link_text.hover": "#7272caff", + "conflict": "#a06e3bff", + "conflict.background": "#eee0d5ff", + "conflict.border": "#e0c9b5ff", + "created": "#4c8b8bff", + "created.background": "#dae7e7ff", + "created.border": "#bfd4d4ff", + "deleted": "#ca4a4aff", + "deleted.background": "#fadbd7ff", + "deleted.border": "#f4bfbaff", + "error": "#ca4a4aff", + "error.background": "#fadbd7ff", + "error.border": "#f4bfbaff", + "hidden": "#6e6666ff", + "hidden.background": "#c1bbbbff", + "hidden.border": "#a8a2a2ff", + "hint": "#91697fff", + "hint.background": "#e4e1f5ff", + "hint.border": "#cecaecff", + "ignored": "#6e6666ff", + "ignored.background": "#c1bbbbff", + "ignored.border": "#8e8989ff", + "info": "#7272caff", + "info.background": "#e4e1f5ff", + "info.border": "#cecaecff", + "modified": "#a06e3bff", + "modified.background": "#eee0d5ff", + "modified.border": "#e0c9b5ff", + "predictive": "#a27a91ff", + "predictive.background": "#dae7e7ff", + "predictive.border": "#bfd4d4ff", + "renamed": "#7272caff", + "renamed.background": "#e4e1f5ff", + "renamed.border": "#cecaecff", + "success": "#4c8b8bff", + "success.background": "#dae7e7ff", + "success.border": "#bfd4d4ff", + "unreachable": "#5a5252ff", + "unreachable.background": "#c1bbbbff", + "unreachable.border": "#8e8989ff", + "warning": "#a06e3bff", + "warning.background": "#eee0d5ff", + "warning.border": "#e0c9b5ff", + "players": [ + { + "cursor": "#7272caff", + "background": "#7272caff", + "selection": "#7272ca3d" + }, + { + "cursor": "#bd5186ff", + "background": "#bd5186ff", + "selection": "#bd51863d" + }, + { + "cursor": "#b45a3cff", + "background": "#b45a3cff", + "selection": "#b45a3c3d" + }, + { + "cursor": "#8464c4ff", + "background": "#8464c4ff", + "selection": "#8464c43d" + }, + { + "cursor": "#5485b5ff", + "background": "#5485b5ff", + "selection": "#5485b53d" + }, + { + "cursor": "#ca4a4aff", + "background": "#ca4a4aff", + "selection": "#ca4a4a3d" + }, + { + "cursor": "#a06e3bff", + "background": "#a06e3bff", + "selection": "#a06e3b3d" + }, + { + "cursor": "#4c8b8bff", + "background": "#4c8b8bff", + "selection": "#4c8b8b3d" + } + ], + "syntax": { + "attribute": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#4c8b8bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7e7777ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#585050ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#4c8b8bff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#1b1818ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#7272caff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#b45a3cff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#91697fff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#8464c4ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#b45a3cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#4c8b8bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#b4593bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#585050ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#a27a91ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#1b1818ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#292424ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ca4848ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#292424ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#585050ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#585050ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#292424ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#bd5187ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#585050ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#5485b6ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#bd5187ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#4b8b8bff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#7272caff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#b45a3cff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#1b1818ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#292424ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#8464c4ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a06d3aff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Savanna Dark", + "appearance": "dark", + "style": { + "border": "#505e55ff", + "border.variant": "#2f3832ff", + "border.focused": "#1f3233ff", + "border.selected": "#1f3233ff", + "border.transparent": "#00000000", + "border.disabled": "#434f47ff", + "elevated_surface.background": "#1f2621ff", + "surface.background": "#1f2621ff", + "background": "#353f39ff", + "element.background": "#1f2621ff", + "element.hover": "#2f3832ff", + "element.active": "#4f5c53ff", + "element.selected": "#4f5c53ff", + "element.disabled": "#1f2621ff", + "drop_target.background": "#85918880", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#2f3832ff", + "ghost_element.active": "#4f5c53ff", + "ghost_element.selected": "#4f5c53ff", + "ghost_element.disabled": "#1f2621ff", + "text": "#ecf4eeff", + "text.muted": "#859188ff", + "text.placeholder": "#6f7e74ff", + "text.disabled": "#6f7e74ff", + "text.accent": "#468b8fff", + "icon": "#ecf4eeff", + "icon.muted": "#859188ff", + "icon.disabled": "#6f7e74ff", + "icon.placeholder": "#859188ff", + "icon.accent": "#468b8fff", + "status_bar.background": "#353f39ff", + "title_bar.background": "#353f39ff", + "toolbar.background": "#171c19ff", + "tab_bar.background": "#1f2621ff", + "tab.inactive_background": "#1f2621ff", + "tab.active_background": "#171c19ff", + "search.match_background": "#478c9066", + "panel.background": "#1f2621ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#ecf4ee4c", + "scrollbar.thumb.hover_background": "#2f3832ff", + "scrollbar.thumb.border": "#2f3832ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#1e2420ff", + "editor.foreground": "#dfe7e2ff", + "editor.background": "#171c19ff", + "editor.gutter.background": "#171c19ff", + "editor.subheader.background": "#1f2621ff", + "editor.active_line.background": "#1f2621bf", + "editor.highlighted_line.background": "#1f2621ff", + "editor.line_number": "#ecf4ee59", + "editor.active_line_number": "#ecf4eeff", + "editor.invisible": "#6c7a71ff", + "editor.wrap_guide": "#ecf4ee0d", + "editor.active_wrap_guide": "#ecf4ee1a", + "editor.document_highlight.read_background": "#468b8f1a", + "editor.document_highlight.write_background": "#6c7a7166", + "terminal.background": "#171c19ff", + "terminal.foreground": "#ecf4eeff", + "terminal.bright_foreground": "#ecf4eeff", + "terminal.dim_foreground": "#171c19ff", + "terminal.ansi.black": "#171c19ff", + "terminal.ansi.bright_black": "#5d6b62ff", + "terminal.ansi.dim_black": "#ecf4eeff", + "terminal.ansi.red": "#b16038ff", + "terminal.ansi.bright_red": "#563220ff", + "terminal.ansi.dim_red": "#deae97ff", + "terminal.ansi.green": "#479962ff", + "terminal.ansi.bright_green": "#294a33ff", + "terminal.ansi.dim_green": "#a5ccafff", + "terminal.ansi.yellow": "#a07d3aff", + "terminal.ansi.bright_yellow": "#4e3f22ff", + "terminal.ansi.dim_yellow": "#d3bd9aff", + "terminal.ansi.blue": "#468b8fff", + "terminal.ansi.bright_blue": "#284546ff", + "terminal.ansi.dim_blue": "#a5c5c6ff", + "terminal.ansi.magenta": "#857368ff", + "terminal.ansi.bright_magenta": "#423a36ff", + "terminal.ansi.dim_magenta": "#c2b7b1ff", + "terminal.ansi.cyan": "#1d9aa0ff", + "terminal.ansi.bright_cyan": "#1d4a4dff", + "terminal.ansi.dim_cyan": "#9dcdcfff", + "terminal.ansi.white": "#ecf4eeff", + "terminal.ansi.bright_white": "#ecf4eeff", + "terminal.ansi.dim_white": "#7b897fff", + "link_text.hover": "#468b8fff", + "conflict": "#a07d3aff", + "conflict.background": "#231d12ff", + "conflict.border": "#392e19ff", + "created": "#479962ff", + "created.background": "#162119ff", + "created.border": "#203526ff", + "deleted": "#b16038ff", + "deleted.background": "#261810ff", + "deleted.border": "#3e2619ff", + "error": "#b16038ff", + "error.background": "#261810ff", + "error.border": "#3e2619ff", + "hidden": "#6f7e74ff", + "hidden.background": "#353f39ff", + "hidden.border": "#434f47ff", + "hint": "#607e76ff", + "hint.background": "#151e20ff", + "hint.border": "#1f3233ff", + "ignored": "#6f7e74ff", + "ignored.background": "#353f39ff", + "ignored.border": "#505e55ff", + "info": "#468b8fff", + "info.background": "#151e20ff", + "info.border": "#1f3233ff", + "modified": "#a07d3aff", + "modified.background": "#231d12ff", + "modified.border": "#392e19ff", + "predictive": "#506d66ff", + "predictive.background": "#162119ff", + "predictive.border": "#203526ff", + "renamed": "#468b8fff", + "renamed.background": "#151e20ff", + "renamed.border": "#1f3233ff", + "success": "#479962ff", + "success.background": "#162119ff", + "success.border": "#203526ff", + "unreachable": "#859188ff", + "unreachable.background": "#353f39ff", + "unreachable.border": "#505e55ff", + "warning": "#a07d3aff", + "warning.background": "#231d12ff", + "warning.border": "#392e19ff", + "players": [ + { + "cursor": "#468b8fff", + "background": "#468b8fff", + "selection": "#468b8f3d" + }, + { + "cursor": "#857368ff", + "background": "#857368ff", + "selection": "#8573683d" + }, + { + "cursor": "#9f703bff", + "background": "#9f703bff", + "selection": "#9f703b3d" + }, + { + "cursor": "#55859bff", + "background": "#55859bff", + "selection": "#55859b3d" + }, + { + "cursor": "#1d9aa0ff", + "background": "#1d9aa0ff", + "selection": "#1d9aa03d" + }, + { + "cursor": "#b16038ff", + "background": "#b16038ff", + "selection": "#b160383d" + }, + { + "cursor": "#a07d3aff", + "background": "#a07d3aff", + "selection": "#a07d3a3d" + }, + { + "cursor": "#479962ff", + "background": "#479962ff", + "selection": "#4799623d" + } + ], + "syntax": { + "attribute": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#5f6d64ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#87928aff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#ecf4eeff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#468b8fff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#9f703bff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#607e76ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#55859bff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#9f703bff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#9f703bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#87928aff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#506d66ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#ecf4eeff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#dfe7e2ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#b16038ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#dfe7e2ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#87928aff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#87928aff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#dfe7e2ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#857368ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#87928aff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#1b9aa0ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#857368ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#9f703bff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#ecf4eeff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#dfe7e2ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#55859bff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Savanna Light", + "appearance": "light", + "style": { + "border": "#8b968eff", + "border.variant": "#c8d1cbff", + "border.focused": "#bed4d6ff", + "border.selected": "#bed4d6ff", + "border.transparent": "#00000000", + "border.disabled": "#a3ada6ff", + "elevated_surface.background": "#e3ebe6ff", + "surface.background": "#e3ebe6ff", + "background": "#bcc5bfff", + "element.background": "#e3ebe6ff", + "element.hover": "#c8d1cbff", + "element.active": "#8d9890ff", + "element.selected": "#8d9890ff", + "element.disabled": "#e3ebe6ff", + "drop_target.background": "#54625980", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#c8d1cbff", + "ghost_element.active": "#8d9890ff", + "ghost_element.selected": "#8d9890ff", + "ghost_element.disabled": "#e3ebe6ff", + "text": "#171c19ff", + "text.muted": "#546259ff", + "text.placeholder": "#68766dff", + "text.disabled": "#68766dff", + "text.accent": "#488b90ff", + "icon": "#171c19ff", + "icon.muted": "#546259ff", + "icon.disabled": "#68766dff", + "icon.placeholder": "#546259ff", + "icon.accent": "#488b90ff", + "status_bar.background": "#bcc5bfff", + "title_bar.background": "#bcc5bfff", + "toolbar.background": "#ecf4eeff", + "tab_bar.background": "#e3ebe6ff", + "tab.inactive_background": "#e3ebe6ff", + "tab.active_background": "#ecf4eeff", + "search.match_background": "#488c9066", + "panel.background": "#e3ebe6ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#171c194c", + "scrollbar.thumb.hover_background": "#c8d1cbff", + "scrollbar.thumb.border": "#c8d1cbff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#e5ede7ff", + "editor.foreground": "#232a25ff", + "editor.background": "#ecf4eeff", + "editor.gutter.background": "#ecf4eeff", + "editor.subheader.background": "#e3ebe6ff", + "editor.active_line.background": "#e3ebe6bf", + "editor.highlighted_line.background": "#e3ebe6ff", + "editor.line_number": "#171c1959", + "editor.active_line_number": "#171c19ff", + "editor.invisible": "#6c7a71ff", + "editor.wrap_guide": "#171c190d", + "editor.active_wrap_guide": "#171c191a", + "editor.document_highlight.read_background": "#488b901a", + "editor.document_highlight.write_background": "#6c7a7166", + "terminal.background": "#ecf4eeff", + "terminal.foreground": "#171c19ff", + "terminal.bright_foreground": "#171c19ff", + "terminal.dim_foreground": "#ecf4eeff", + "terminal.ansi.black": "#ecf4eeff", + "terminal.ansi.bright_black": "#7b897fff", + "terminal.ansi.dim_black": "#171c19ff", + "terminal.ansi.red": "#b16139ff", + "terminal.ansi.bright_red": "#deae97ff", + "terminal.ansi.dim_red": "#563220ff", + "terminal.ansi.green": "#499963ff", + "terminal.ansi.bright_green": "#a5ccafff", + "terminal.ansi.dim_green": "#294a33ff", + "terminal.ansi.yellow": "#a07d3bff", + "terminal.ansi.bright_yellow": "#d3bd9aff", + "terminal.ansi.dim_yellow": "#4e3f22ff", + "terminal.ansi.blue": "#488b90ff", + "terminal.ansi.bright_blue": "#a5c5c6ff", + "terminal.ansi.dim_blue": "#284546ff", + "terminal.ansi.magenta": "#857368ff", + "terminal.ansi.bright_magenta": "#c2b7b1ff", + "terminal.ansi.dim_magenta": "#423a36ff", + "terminal.ansi.cyan": "#1e9aa0ff", + "terminal.ansi.bright_cyan": "#9dcdcfff", + "terminal.ansi.dim_cyan": "#1d4a4dff", + "terminal.ansi.white": "#171c19ff", + "terminal.ansi.bright_white": "#171c19ff", + "terminal.ansi.dim_white": "#5d6b62ff", + "link_text.hover": "#488b90ff", + "conflict": "#a07d3bff", + "conflict.background": "#eee4d5ff", + "conflict.border": "#dfcfb6ff", + "created": "#499963ff", + "created.background": "#daeadeff", + "created.border": "#bedac5ff", + "deleted": "#b16139ff", + "deleted.background": "#f3ded4ff", + "deleted.border": "#e8c5b4ff", + "error": "#b16139ff", + "error.background": "#f3ded4ff", + "error.border": "#e8c5b4ff", + "hidden": "#68766dff", + "hidden.background": "#bcc5bfff", + "hidden.border": "#a3ada6ff", + "hint": "#66847cff", + "hint.background": "#dae7e8ff", + "hint.border": "#bed4d6ff", + "ignored": "#68766dff", + "ignored.background": "#bcc5bfff", + "ignored.border": "#8b968eff", + "info": "#488b90ff", + "info.background": "#dae7e8ff", + "info.border": "#bed4d6ff", + "modified": "#a07d3bff", + "modified.background": "#eee4d5ff", + "modified.border": "#dfcfb6ff", + "predictive": "#75958bff", + "predictive.background": "#daeadeff", + "predictive.border": "#bedac5ff", + "renamed": "#488b90ff", + "renamed.background": "#dae7e8ff", + "renamed.border": "#bed4d6ff", + "success": "#499963ff", + "success.background": "#daeadeff", + "success.border": "#bedac5ff", + "unreachable": "#546259ff", + "unreachable.background": "#bcc5bfff", + "unreachable.border": "#8b968eff", + "warning": "#a07d3bff", + "warning.background": "#eee4d5ff", + "warning.border": "#dfcfb6ff", + "players": [ + { + "cursor": "#488b90ff", + "background": "#488b90ff", + "selection": "#488b903d" + }, + { + "cursor": "#857368ff", + "background": "#857368ff", + "selection": "#8573683d" + }, + { + "cursor": "#9f713cff", + "background": "#9f713cff", + "selection": "#9f713c3d" + }, + { + "cursor": "#55859bff", + "background": "#55859bff", + "selection": "#55859b3d" + }, + { + "cursor": "#1e9aa0ff", + "background": "#1e9aa0ff", + "selection": "#1e9aa03d" + }, + { + "cursor": "#b16139ff", + "background": "#b16139ff", + "selection": "#b161393d" + }, + { + "cursor": "#a07d3bff", + "background": "#a07d3bff", + "selection": "#a07d3b3d" + }, + { + "cursor": "#499963ff", + "background": "#499963ff", + "selection": "#4999633d" + } + ], + "syntax": { + "attribute": { + "color": "#488b90ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#499963ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#77877cff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#526057ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#499963ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#488b90ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#171c19ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#488b90ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#488b90ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#9f713cff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#468b8fff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#66847cff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#55859bff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#488b90ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#9f713cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#499963ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#9f703bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#526057ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#75958bff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#171c19ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#232a25ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#b16038ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#232a25ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#526057ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#526057ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#232a25ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#857368ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#526057ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#1b9aa0ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#857368ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#479962ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#488b90ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#9f713cff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#171c19ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#232a25ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#55859bff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Seaside Dark", + "appearance": "dark", + "style": { + "border": "#5c6c5cff", + "border.variant": "#333b33ff", + "border.focused": "#102667ff", + "border.selected": "#102667ff", + "border.transparent": "#00000000", + "border.disabled": "#4b584bff", + "elevated_surface.background": "#1f231fff", + "surface.background": "#1f231fff", + "background": "#3b453bff", + "element.background": "#1f231fff", + "element.hover": "#333b33ff", + "element.active": "#5a6a5aff", + "element.selected": "#5a6a5aff", + "element.disabled": "#1f231fff", + "drop_target.background": "#8ba48b80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#333b33ff", + "ghost_element.active": "#5a6a5aff", + "ghost_element.selected": "#5a6a5aff", + "ghost_element.disabled": "#1f231fff", + "text": "#f3faf3ff", + "text.muted": "#8ba48bff", + "text.placeholder": "#778f77ff", + "text.disabled": "#778f77ff", + "text.accent": "#3e62f4ff", + "icon": "#f3faf3ff", + "icon.muted": "#8ba48bff", + "icon.disabled": "#778f77ff", + "icon.placeholder": "#8ba48bff", + "icon.accent": "#3e62f4ff", + "status_bar.background": "#3b453bff", + "title_bar.background": "#3b453bff", + "toolbar.background": "#131513ff", + "tab_bar.background": "#1f231fff", + "tab.inactive_background": "#1f231fff", + "tab.active_background": "#131513ff", + "search.match_background": "#3e62f466", + "panel.background": "#1f231fff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f3faf34c", + "scrollbar.thumb.hover_background": "#333b33ff", + "scrollbar.thumb.border": "#333b33ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#1d201dff", + "editor.foreground": "#cfe8cfff", + "editor.background": "#131513ff", + "editor.gutter.background": "#131513ff", + "editor.subheader.background": "#1f231fff", + "editor.active_line.background": "#1f231fbf", + "editor.highlighted_line.background": "#1f231fff", + "editor.line_number": "#f3faf359", + "editor.active_line_number": "#f3faf3ff", + "editor.invisible": "#738b73ff", + "editor.wrap_guide": "#f3faf30d", + "editor.active_wrap_guide": "#f3faf31a", + "editor.document_highlight.read_background": "#3e62f41a", + "editor.document_highlight.write_background": "#738b7366", + "terminal.background": "#131513ff", + "terminal.foreground": "#f3faf3ff", + "terminal.bright_foreground": "#f3faf3ff", + "terminal.dim_foreground": "#131513ff", + "terminal.ansi.black": "#131513ff", + "terminal.ansi.bright_black": "#667a66ff", + "terminal.ansi.dim_black": "#f3faf3ff", + "terminal.ansi.red": "#e61c3bff", + "terminal.ansi.bright_red": "#840a20ff", + "terminal.ansi.dim_red": "#fe9c97ff", + "terminal.ansi.green": "#2aa329ff", + "terminal.ansi.bright_green": "#204f1bff", + "terminal.ansi.dim_green": "#a0d294ff", + "terminal.ansi.yellow": "#98981bff", + "terminal.ansi.bright_yellow": "#4b4a17ff", + "terminal.ansi.dim_yellow": "#d0ca90ff", + "terminal.ansi.blue": "#3e62f4ff", + "terminal.ansi.bright_blue": "#183385ff", + "terminal.ansi.dim_blue": "#b1adfcff", + "terminal.ansi.magenta": "#e61cc3ff", + "terminal.ansi.bright_magenta": "#810d5fff", + "terminal.ansi.dim_magenta": "#f9a1e1ff", + "terminal.ansi.cyan": "#1b99b3ff", + "terminal.ansi.bright_cyan": "#1c4a56ff", + "terminal.ansi.dim_cyan": "#9fccd9ff", + "terminal.ansi.white": "#f3faf3ff", + "terminal.ansi.bright_white": "#f3faf3ff", + "terminal.ansi.dim_white": "#829b82ff", + "link_text.hover": "#3e62f4ff", + "conflict": "#98981bff", + "conflict.background": "#22210fff", + "conflict.border": "#373614ff", + "created": "#2aa329ff", + "created.background": "#132310ff", + "created.border": "#1a3817ff", + "deleted": "#e61c3bff", + "deleted.background": "#4f0412ff", + "deleted.border": "#6a071aff", + "error": "#e61c3bff", + "error.background": "#4f0412ff", + "error.border": "#6a071aff", + "hidden": "#778f77ff", + "hidden.background": "#3b453bff", + "hidden.border": "#4b584bff", + "hint": "#008b9fff", + "hint.background": "#051949ff", + "hint.border": "#102667ff", + "ignored": "#778f77ff", + "ignored.background": "#3b453bff", + "ignored.border": "#5c6c5cff", + "info": "#3e62f4ff", + "info.background": "#051949ff", + "info.border": "#102667ff", + "modified": "#98981bff", + "modified.background": "#22210fff", + "modified.border": "#373614ff", + "predictive": "#00788bff", + "predictive.background": "#132310ff", + "predictive.border": "#1a3817ff", + "renamed": "#3e62f4ff", + "renamed.background": "#051949ff", + "renamed.border": "#102667ff", + "success": "#2aa329ff", + "success.background": "#132310ff", + "success.border": "#1a3817ff", + "unreachable": "#8ba48bff", + "unreachable.background": "#3b453bff", + "unreachable.border": "#5c6c5cff", + "warning": "#98981bff", + "warning.background": "#22210fff", + "warning.border": "#373614ff", + "players": [ + { + "cursor": "#3e62f4ff", + "background": "#3e62f4ff", + "selection": "#3e62f43d" + }, + { + "cursor": "#e61cc3ff", + "background": "#e61cc3ff", + "selection": "#e61cc33d" + }, + { + "cursor": "#87711dff", + "background": "#87711dff", + "selection": "#87711d3d" + }, + { + "cursor": "#ac2dedff", + "background": "#ac2dedff", + "selection": "#ac2ded3d" + }, + { + "cursor": "#1b99b3ff", + "background": "#1b99b3ff", + "selection": "#1b99b33d" + }, + { + "cursor": "#e61c3bff", + "background": "#e61c3bff", + "selection": "#e61c3b3d" + }, + { + "cursor": "#98981bff", + "background": "#98981bff", + "selection": "#98981b3d" + }, + { + "cursor": "#2aa329ff", + "background": "#2aa329ff", + "selection": "#2aa3293d" + } + ], + "syntax": { + "attribute": { + "color": "#3e62f4ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#2aa329ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#687d68ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#8ca68cff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#2aa329ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#3e62f4ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f3faf3ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#3e62f4ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#3e62f4ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#87711dff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#3d62f5ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#3d62f5ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#98981bff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#008b9fff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#ac2aeeff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#3e62f4ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#87711dff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#2aa329ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#87711cff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#8ca68cff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#00788bff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f3faf3ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#cfe8cfff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#e6183bff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#cfe8cfff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#8ca68cff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#8ca68cff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#cfe8cfff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#e618c3ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#28a328ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#8ca68cff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#1899b3ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#e618c3ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#28a328ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#3e62f4ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#87711dff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f3faf3ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#98981bff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#cfe8cfff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#ac2aeeff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#98981bff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Seaside Light", + "appearance": "light", + "style": { + "border": "#8ea88eff", + "border.variant": "#bed7beff", + "border.focused": "#c9c4fdff", + "border.selected": "#c9c4fdff", + "border.transparent": "#00000000", + "border.disabled": "#a1bba1ff", + "elevated_surface.background": "#daeedaff", + "surface.background": "#daeedaff", + "background": "#b4ceb4ff", + "element.background": "#daeedaff", + "element.hover": "#bed7beff", + "element.active": "#90aa90ff", + "element.selected": "#90aa90ff", + "element.disabled": "#daeedaff", + "drop_target.background": "#5f705f80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#bed7beff", + "ghost_element.active": "#90aa90ff", + "ghost_element.selected": "#90aa90ff", + "ghost_element.disabled": "#daeedaff", + "text": "#131513ff", + "text.muted": "#5f705fff", + "text.placeholder": "#718771ff", + "text.disabled": "#718771ff", + "text.accent": "#3e61f4ff", + "icon": "#131513ff", + "icon.muted": "#5f705fff", + "icon.disabled": "#718771ff", + "icon.placeholder": "#5f705fff", + "icon.accent": "#3e61f4ff", + "status_bar.background": "#b4ceb4ff", + "title_bar.background": "#b4ceb4ff", + "toolbar.background": "#f3faf3ff", + "tab_bar.background": "#daeedaff", + "tab.inactive_background": "#daeedaff", + "tab.active_background": "#f3faf3ff", + "search.match_background": "#3f62f466", + "panel.background": "#daeedaff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#1315134c", + "scrollbar.thumb.hover_background": "#bed7beff", + "scrollbar.thumb.border": "#bed7beff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#dff0dfff", + "editor.foreground": "#242924ff", + "editor.background": "#f3faf3ff", + "editor.gutter.background": "#f3faf3ff", + "editor.subheader.background": "#daeedaff", + "editor.active_line.background": "#daeedabf", + "editor.highlighted_line.background": "#daeedaff", + "editor.line_number": "#13151359", + "editor.active_line_number": "#131513ff", + "editor.invisible": "#738b73ff", + "editor.wrap_guide": "#1315130d", + "editor.active_wrap_guide": "#1315131a", + "editor.document_highlight.read_background": "#3e61f41a", + "editor.document_highlight.write_background": "#738b7366", + "terminal.background": "#f3faf3ff", + "terminal.foreground": "#131513ff", + "terminal.bright_foreground": "#131513ff", + "terminal.dim_foreground": "#f3faf3ff", + "terminal.ansi.black": "#f3faf3ff", + "terminal.ansi.bright_black": "#829b82ff", + "terminal.ansi.dim_black": "#131513ff", + "terminal.ansi.red": "#e61c3dff", + "terminal.ansi.bright_red": "#fe9c97ff", + "terminal.ansi.dim_red": "#840a20ff", + "terminal.ansi.green": "#2aa32aff", + "terminal.ansi.bright_green": "#a0d294ff", + "terminal.ansi.dim_green": "#204f1bff", + "terminal.ansi.yellow": "#98981cff", + "terminal.ansi.bright_yellow": "#d0ca90ff", + "terminal.ansi.dim_yellow": "#4b4a17ff", + "terminal.ansi.blue": "#3e61f4ff", + "terminal.ansi.bright_blue": "#b1adfcff", + "terminal.ansi.dim_blue": "#183385ff", + "terminal.ansi.magenta": "#e61cc2ff", + "terminal.ansi.bright_magenta": "#f9a1e1ff", + "terminal.ansi.dim_magenta": "#810d5fff", + "terminal.ansi.cyan": "#1c99b3ff", + "terminal.ansi.bright_cyan": "#9fccd9ff", + "terminal.ansi.dim_cyan": "#1c4a56ff", + "terminal.ansi.white": "#131513ff", + "terminal.ansi.bright_white": "#131513ff", + "terminal.ansi.dim_white": "#667a66ff", + "link_text.hover": "#3e61f4ff", + "conflict": "#98981cff", + "conflict.background": "#ece8d1ff", + "conflict.border": "#ddd8afff", + "created": "#2aa32aff", + "created.background": "#d9edd4ff", + "created.border": "#bbdeb2ff", + "deleted": "#e61c3dff", + "deleted.background": "#fed7d3ff", + "deleted.border": "#feb8b3ff", + "error": "#e61c3dff", + "error.background": "#fed7d3ff", + "error.border": "#feb8b3ff", + "hidden": "#718771ff", + "hidden.background": "#b4ceb4ff", + "hidden.border": "#a1bba1ff", + "hint": "#008fa1ff", + "hint.background": "#e1ddfeff", + "hint.border": "#c9c4fdff", + "ignored": "#718771ff", + "ignored.background": "#b4ceb4ff", + "ignored.border": "#8ea88eff", + "info": "#3e61f4ff", + "info.background": "#e1ddfeff", + "info.border": "#c9c4fdff", + "modified": "#98981cff", + "modified.background": "#ece8d1ff", + "modified.border": "#ddd8afff", + "predictive": "#00a2b5ff", + "predictive.background": "#d9edd4ff", + "predictive.border": "#bbdeb2ff", + "renamed": "#3e61f4ff", + "renamed.background": "#e1ddfeff", + "renamed.border": "#c9c4fdff", + "success": "#2aa32aff", + "success.background": "#d9edd4ff", + "success.border": "#bbdeb2ff", + "unreachable": "#5f705fff", + "unreachable.background": "#b4ceb4ff", + "unreachable.border": "#8ea88eff", + "warning": "#98981cff", + "warning.background": "#ece8d1ff", + "warning.border": "#ddd8afff", + "players": [ + { + "cursor": "#3e61f4ff", + "background": "#3e61f4ff", + "selection": "#3e61f43d" + }, + { + "cursor": "#e61cc2ff", + "background": "#e61cc2ff", + "selection": "#e61cc23d" + }, + { + "cursor": "#87711fff", + "background": "#87711fff", + "selection": "#87711f3d" + }, + { + "cursor": "#ac2dedff", + "background": "#ac2dedff", + "selection": "#ac2ded3d" + }, + { + "cursor": "#1c99b3ff", + "background": "#1c99b3ff", + "selection": "#1c99b33d" + }, + { + "cursor": "#e61c3dff", + "background": "#e61c3dff", + "selection": "#e61c3d3d" + }, + { + "cursor": "#98981cff", + "background": "#98981cff", + "selection": "#98981c3d" + }, + { + "cursor": "#2aa32aff", + "background": "#2aa32aff", + "selection": "#2aa32a3d" + } + ], + "syntax": { + "attribute": { + "color": "#3e61f4ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#2aa32aff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#809980ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#5e6e5eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#2aa32aff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#3e61f4ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#131513ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#3e61f4ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#3e61f4ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#87711fff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#3d62f5ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#3d62f5ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#98981bff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#008fa1ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#ac2aeeff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#3e61f4ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#87711fff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#2aa32aff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#87711cff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#5e6e5eff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#00a2b5ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#131513ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#242924ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#e6183bff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#242924ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#5e6e5eff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#5e6e5eff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#242924ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#e618c3ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#28a328ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#5e6e5eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#1899b3ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#e618c3ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#28a328ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#3e61f4ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#87711fff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#131513ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#98981bff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#242924ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#ac2aeeff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#98981bff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Sulphurpool Dark", + "appearance": "dark", + "style": { + "border": "#5b6385ff", + "border.variant": "#363f62ff", + "border.focused": "#203348ff", + "border.selected": "#203348ff", + "border.transparent": "#00000000", + "border.disabled": "#4d5477ff", + "elevated_surface.background": "#262f51ff", + "surface.background": "#262f51ff", + "background": "#3e4769ff", + "element.background": "#262f51ff", + "element.hover": "#363f62ff", + "element.active": "#5a6284ff", + "element.selected": "#5a6284ff", + "element.disabled": "#262f51ff", + "drop_target.background": "#959bb280", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#363f62ff", + "ghost_element.active": "#5a6284ff", + "ghost_element.selected": "#5a6284ff", + "ghost_element.disabled": "#262f51ff", + "text": "#f5f7ffff", + "text.muted": "#959bb2ff", + "text.placeholder": "#7e849eff", + "text.disabled": "#7e849eff", + "text.accent": "#3e8ed0ff", + "icon": "#f5f7ffff", + "icon.muted": "#959bb2ff", + "icon.disabled": "#7e849eff", + "icon.placeholder": "#959bb2ff", + "icon.accent": "#3e8ed0ff", + "status_bar.background": "#3e4769ff", + "title_bar.background": "#3e4769ff", + "toolbar.background": "#202646ff", + "tab_bar.background": "#262f51ff", + "tab.inactive_background": "#262f51ff", + "tab.active_background": "#202646ff", + "search.match_background": "#3e8fd066", + "panel.background": "#262f51ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f5f7ff4c", + "scrollbar.thumb.hover_background": "#363f62ff", + "scrollbar.thumb.border": "#363f62ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#252c4fff", + "editor.foreground": "#dfe2f1ff", + "editor.background": "#202646ff", + "editor.gutter.background": "#202646ff", + "editor.subheader.background": "#262f51ff", + "editor.active_line.background": "#262f51bf", + "editor.highlighted_line.background": "#262f51ff", + "editor.line_number": "#f5f7ff59", + "editor.active_line_number": "#f5f7ffff", + "editor.invisible": "#7a819cff", + "editor.wrap_guide": "#f5f7ff0d", + "editor.active_wrap_guide": "#f5f7ff1a", + "editor.document_highlight.read_background": "#3e8ed01a", + "editor.document_highlight.write_background": "#7a819c66", + "terminal.background": "#202646ff", + "terminal.foreground": "#f5f7ffff", + "terminal.bright_foreground": "#f5f7ffff", + "terminal.dim_foreground": "#202646ff", + "terminal.ansi.black": "#202646ff", + "terminal.ansi.bright_black": "#687091ff", + "terminal.ansi.dim_black": "#f5f7ffff", + "terminal.ansi.red": "#c94922ff", + "terminal.ansi.bright_red": "#6d2616ff", + "terminal.ansi.dim_red": "#eea48bff", + "terminal.ansi.green": "#ac9739ff", + "terminal.ansi.bright_green": "#534921ff", + "terminal.ansi.dim_green": "#d9ca9bff", + "terminal.ansi.yellow": "#c08b30ff", + "terminal.ansi.bright_yellow": "#63441eff", + "terminal.ansi.dim_yellow": "#e5c497ff", + "terminal.ansi.blue": "#3e8ed0ff", + "terminal.ansi.bright_blue": "#274664ff", + "terminal.ansi.dim_blue": "#a9c6e8ff", + "terminal.ansi.magenta": "#9b6279ff", + "terminal.ansi.bright_magenta": "#4c333dff", + "terminal.ansi.dim_magenta": "#cfafbbff", + "terminal.ansi.cyan": "#24a1c9ff", + "terminal.ansi.bright_cyan": "#214d5fff", + "terminal.ansi.dim_cyan": "#a4d0e4ff", + "terminal.ansi.white": "#f5f7ffff", + "terminal.ansi.bright_white": "#f5f7ffff", + "terminal.ansi.dim_white": "#8b90a7ff", + "link_text.hover": "#3e8ed0ff", + "conflict": "#c08b30ff", + "conflict.background": "#301e11ff", + "conflict.border": "#4b3117ff", + "created": "#ac9739ff", + "created.background": "#252113ff", + "created.border": "#3c351bff", + "deleted": "#c94922ff", + "deleted.background": "#3c110cff", + "deleted.border": "#551b12ff", + "error": "#c94922ff", + "error.background": "#3c110cff", + "error.border": "#551b12ff", + "hidden": "#7e849eff", + "hidden.background": "#3e4769ff", + "hidden.border": "#4d5477ff", + "hint": "#6c81a5ff", + "hint.background": "#161f2bff", + "hint.border": "#203348ff", + "ignored": "#7e849eff", + "ignored.background": "#3e4769ff", + "ignored.border": "#5b6385ff", + "info": "#3e8ed0ff", + "info.background": "#161f2bff", + "info.border": "#203348ff", + "modified": "#c08b30ff", + "modified.background": "#301e11ff", + "modified.border": "#4b3117ff", + "predictive": "#58709aff", + "predictive.background": "#252113ff", + "predictive.border": "#3c351bff", + "renamed": "#3e8ed0ff", + "renamed.background": "#161f2bff", + "renamed.border": "#203348ff", + "success": "#ac9739ff", + "success.background": "#252113ff", + "success.border": "#3c351bff", + "unreachable": "#959bb2ff", + "unreachable.background": "#3e4769ff", + "unreachable.border": "#5b6385ff", + "warning": "#c08b30ff", + "warning.background": "#301e11ff", + "warning.border": "#4b3117ff", + "players": [ + { + "cursor": "#3e8ed0ff", + "background": "#3e8ed0ff", + "selection": "#3e8ed03d" + }, + { + "cursor": "#9b6279ff", + "background": "#9b6279ff", + "selection": "#9b62793d" + }, + { + "cursor": "#c76a29ff", + "background": "#c76a29ff", + "selection": "#c76a293d" + }, + { + "cursor": "#6679ccff", + "background": "#6679ccff", + "selection": "#6679cc3d" + }, + { + "cursor": "#24a1c9ff", + "background": "#24a1c9ff", + "selection": "#24a1c93d" + }, + { + "cursor": "#c94922ff", + "background": "#c94922ff", + "selection": "#c949223d" + }, + { + "cursor": "#c08b30ff", + "background": "#c08b30ff", + "selection": "#c08b303d" + }, + { + "cursor": "#ac9739ff", + "background": "#ac9739ff", + "selection": "#ac97393d" + } + ], + "syntax": { + "attribute": { + "color": "#3e8ed0ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#ac9739ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#6a7293ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#979db4ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#ac9739ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#3e8ed0ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f5f7ffff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#3e8ed0ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#3e8ed0ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#c76a29ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#3d8fd1ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#3d8fd1ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#c08b2fff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#6c81a5ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#6679ccff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#3e8ed0ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#c76a29ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#ac9739ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#c76a28ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#979db4ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#58709aff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f5f7ffff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#dfe2f1ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#c94821ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#dfe2f1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#979db4ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#979db4ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#dfe2f1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#9b6279ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#ac9738ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#979db4ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#21a2c9ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#9b6279ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#ac9738ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#3e8ed0ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#c76a29ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f5f7ffff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#c08b2fff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#dfe2f1ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#6679ccff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#c08b2fff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Atelier Sulphurpool Light", + "appearance": "light", + "style": { + "border": "#9a9fb6ff", + "border.variant": "#ccd0e1ff", + "border.focused": "#c2d5efff", + "border.selected": "#c2d5efff", + "border.transparent": "#00000000", + "border.disabled": "#aeb3c7ff", + "elevated_surface.background": "#e5e8f5ff", + "surface.background": "#e5e8f5ff", + "background": "#c1c5d8ff", + "element.background": "#e5e8f5ff", + "element.hover": "#ccd0e1ff", + "element.active": "#9ca1b8ff", + "element.selected": "#9ca1b8ff", + "element.disabled": "#e5e8f5ff", + "drop_target.background": "#5f678980", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#ccd0e1ff", + "ghost_element.active": "#9ca1b8ff", + "ghost_element.selected": "#9ca1b8ff", + "ghost_element.disabled": "#e5e8f5ff", + "text": "#202646ff", + "text.muted": "#5f6789ff", + "text.placeholder": "#767d9aff", + "text.disabled": "#767d9aff", + "text.accent": "#3e8fd0ff", + "icon": "#202646ff", + "icon.muted": "#5f6789ff", + "icon.disabled": "#767d9aff", + "icon.placeholder": "#5f6789ff", + "icon.accent": "#3e8fd0ff", + "status_bar.background": "#c1c5d8ff", + "title_bar.background": "#c1c5d8ff", + "toolbar.background": "#f5f7ffff", + "tab_bar.background": "#e5e8f5ff", + "tab.inactive_background": "#e5e8f5ff", + "tab.active_background": "#f5f7ffff", + "search.match_background": "#3f8fd066", + "panel.background": "#e5e8f5ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#2026464c", + "scrollbar.thumb.hover_background": "#ccd0e1ff", + "scrollbar.thumb.border": "#ccd0e1ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#e9ebf7ff", + "editor.foreground": "#293256ff", + "editor.background": "#f5f7ffff", + "editor.gutter.background": "#f5f7ffff", + "editor.subheader.background": "#e5e8f5ff", + "editor.active_line.background": "#e5e8f5bf", + "editor.highlighted_line.background": "#e5e8f5ff", + "editor.line_number": "#20264659", + "editor.active_line_number": "#202646ff", + "editor.invisible": "#7a819cff", + "editor.wrap_guide": "#2026460d", + "editor.active_wrap_guide": "#2026461a", + "editor.document_highlight.read_background": "#3e8fd01a", + "editor.document_highlight.write_background": "#7a819c66", + "terminal.background": "#f5f7ffff", + "terminal.foreground": "#202646ff", + "terminal.bright_foreground": "#202646ff", + "terminal.dim_foreground": "#f5f7ffff", + "terminal.ansi.black": "#f5f7ffff", + "terminal.ansi.bright_black": "#8b90a7ff", + "terminal.ansi.dim_black": "#202646ff", + "terminal.ansi.red": "#c94922ff", + "terminal.ansi.bright_red": "#eea48bff", + "terminal.ansi.dim_red": "#6d2616ff", + "terminal.ansi.green": "#ac9739ff", + "terminal.ansi.bright_green": "#d9ca9bff", + "terminal.ansi.dim_green": "#534921ff", + "terminal.ansi.yellow": "#c08b30ff", + "terminal.ansi.bright_yellow": "#e5c497ff", + "terminal.ansi.dim_yellow": "#63441eff", + "terminal.ansi.blue": "#3e8fd0ff", + "terminal.ansi.bright_blue": "#a9c6e8ff", + "terminal.ansi.dim_blue": "#274664ff", + "terminal.ansi.magenta": "#9b6279ff", + "terminal.ansi.bright_magenta": "#cfafbbff", + "terminal.ansi.dim_magenta": "#4c333dff", + "terminal.ansi.cyan": "#24a1c9ff", + "terminal.ansi.bright_cyan": "#a4d0e4ff", + "terminal.ansi.dim_cyan": "#214d5fff", + "terminal.ansi.white": "#202646ff", + "terminal.ansi.bright_white": "#202646ff", + "terminal.ansi.dim_white": "#687091ff", + "link_text.hover": "#3e8fd0ff", + "conflict": "#c08b30ff", + "conflict.background": "#f6e6d4ff", + "conflict.border": "#eed4b3ff", + "created": "#ac9739ff", + "created.background": "#f0e8d5ff", + "created.border": "#e4d8b7ff", + "deleted": "#c94922ff", + "deleted.background": "#fcdad0ff", + "deleted.border": "#f6beabff", + "error": "#c94922ff", + "error.background": "#fcdad0ff", + "error.border": "#f6beabff", + "hidden": "#767d9aff", + "hidden.background": "#c1c5d8ff", + "hidden.border": "#aeb3c7ff", + "hint": "#7087b2ff", + "hint.background": "#dde7f6ff", + "hint.border": "#c2d5efff", + "ignored": "#767d9aff", + "ignored.background": "#c1c5d8ff", + "ignored.border": "#9a9fb6ff", + "info": "#3e8fd0ff", + "info.background": "#dde7f6ff", + "info.border": "#c2d5efff", + "modified": "#c08b30ff", + "modified.background": "#f6e6d4ff", + "modified.border": "#eed4b3ff", + "predictive": "#8599beff", + "predictive.background": "#f0e8d5ff", + "predictive.border": "#e4d8b7ff", + "renamed": "#3e8fd0ff", + "renamed.background": "#dde7f6ff", + "renamed.border": "#c2d5efff", + "success": "#ac9739ff", + "success.background": "#f0e8d5ff", + "success.border": "#e4d8b7ff", + "unreachable": "#5f6789ff", + "unreachable.background": "#c1c5d8ff", + "unreachable.border": "#9a9fb6ff", + "warning": "#c08b30ff", + "warning.background": "#f6e6d4ff", + "warning.border": "#eed4b3ff", + "players": [ + { + "cursor": "#3e8fd0ff", + "background": "#3e8fd0ff", + "selection": "#3e8fd03d" + }, + { + "cursor": "#9b6279ff", + "background": "#9b6279ff", + "selection": "#9b62793d" + }, + { + "cursor": "#c76a29ff", + "background": "#c76a29ff", + "selection": "#c76a293d" + }, + { + "cursor": "#6679cbff", + "background": "#6679cbff", + "selection": "#6679cb3d" + }, + { + "cursor": "#24a1c9ff", + "background": "#24a1c9ff", + "selection": "#24a1c93d" + }, + { + "cursor": "#c94922ff", + "background": "#c94922ff", + "selection": "#c949223d" + }, + { + "cursor": "#c08b30ff", + "background": "#c08b30ff", + "selection": "#c08b303d" + }, + { + "cursor": "#ac9739ff", + "background": "#ac9739ff", + "selection": "#ac97393d" + } + ], + "syntax": { + "attribute": { + "color": "#3e8fd0ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#ac9739ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#898ea4ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#5d6587ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#ac9739ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#3e8fd0ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#202646ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#3e8fd0ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#3e8fd0ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#c76a29ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#3d8fd1ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#3d8fd1ff", + "font_style": null, + "font_weight": null + }, + "function.special.definition": { + "color": "#c08b2fff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#7087b2ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#6679ccff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#3e8fd0ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#c76a29ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#ac9739ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#c76a28ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#5d6587ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#8599beff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#202646ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#293256ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#c94821ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#293256ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#5d6587ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#5d6587ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#293256ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#9b6279ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#ac9738ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#5d6587ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#21a2c9ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#9b6279ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#ac9738ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#3e8fd0ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#c76a29ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#202646ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#c08b2fff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#293256ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#6679ccff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#c08b2fff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/ayu/LICENSE b/assets/themes/ayu/LICENSE new file mode 100644 index 0000000..37a9229 --- /dev/null +++ b/assets/themes/ayu/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Ike Ku + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json new file mode 100644 index 0000000..e83d35f --- /dev/null +++ b/assets/themes/ayu/ayu.json @@ -0,0 +1,1113 @@ +{ + "name": "Ayu", + "author": "Zed Industries", + "themes": [ + { + "name": "Ayu Dark", + "appearance": "dark", + "style": { + "border": "#3f4043ff", + "border.variant": "#2d2f34ff", + "border.focused": "#1b4a6eff", + "border.selected": "#1b4a6eff", + "border.transparent": "#00000000", + "border.disabled": "#383a3eff", + "elevated_surface.background": "#1f2127ff", + "surface.background": "#1f2127ff", + "background": "#313337ff", + "element.background": "#1f2127ff", + "element.hover": "#2d2f34ff", + "element.active": "#3e4043ff", + "element.selected": "#3e4043ff", + "element.disabled": "#1f2127ff", + "drop_target.background": "#8a898680", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#2d2f34ff", + "ghost_element.active": "#3e4043ff", + "ghost_element.selected": "#3e4043ff", + "ghost_element.disabled": "#1f2127ff", + "text": "#bfbdb6ff", + "text.muted": "#8a8986ff", + "text.placeholder": "#696a6aff", + "text.disabled": "#696a6aff", + "text.accent": "#5ac1feff", + "icon": "#bfbdb6ff", + "icon.muted": "#8a8986ff", + "icon.disabled": "#696a6aff", + "icon.placeholder": "#8a8986ff", + "icon.accent": "#5ac1feff", + "status_bar.background": "#313337ff", + "title_bar.background": "#313337ff", + "toolbar.background": "#0d1016ff", + "tab_bar.background": "#1f2127ff", + "tab.inactive_background": "#1f2127ff", + "tab.active_background": "#0d1016ff", + "search.match_background": "#5ac2fe66", + "panel.background": "#1f2127ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#bfbdb64c", + "scrollbar.thumb.hover_background": "#2d2f34ff", + "scrollbar.thumb.border": "#2d2f34ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#1b1e24ff", + "editor.foreground": "#bfbdb6ff", + "editor.background": "#0d1016ff", + "editor.gutter.background": "#0d1016ff", + "editor.subheader.background": "#1f2127ff", + "editor.active_line.background": "#1f2127bf", + "editor.highlighted_line.background": "#1f2127ff", + "editor.line_number": "#bfbdb659", + "editor.active_line_number": "#bfbdb6ff", + "editor.invisible": "#666767ff", + "editor.wrap_guide": "#bfbdb60d", + "editor.active_wrap_guide": "#bfbdb61a", + "editor.document_highlight.read_background": "#5ac1fe1a", + "editor.document_highlight.write_background": "#66676766", + "terminal.background": "#0d1016ff", + "terminal.foreground": "#bfbdb6ff", + "terminal.bright_foreground": "#bfbdb6ff", + "terminal.dim_foreground": "#0d1016ff", + "terminal.ansi.black": "#0d1016ff", + "terminal.ansi.bright_black": "#545557ff", + "terminal.ansi.dim_black": "#bfbdb6ff", + "terminal.ansi.red": "#ef7177ff", + "terminal.ansi.bright_red": "#83353bff", + "terminal.ansi.dim_red": "#febab9ff", + "terminal.ansi.green": "#aad84cff", + "terminal.ansi.bright_green": "#567627ff", + "terminal.ansi.dim_green": "#d8eca8ff", + "terminal.ansi.yellow": "#feb454ff", + "terminal.ansi.bright_yellow": "#92582bff", + "terminal.ansi.dim_yellow": "#ffd9aaff", + "terminal.ansi.blue": "#5ac1feff", + "terminal.ansi.bright_blue": "#27618cff", + "terminal.ansi.dim_blue": "#b7dffeff", + "terminal.ansi.magenta": "#39bae5ff", + "terminal.ansi.bright_magenta": "#205a78ff", + "terminal.ansi.dim_magenta": "#addcf3ff", + "terminal.ansi.cyan": "#95e5cbff", + "terminal.ansi.bright_cyan": "#4c806fff", + "terminal.ansi.dim_cyan": "#cbf2e4ff", + "terminal.ansi.white": "#bfbdb6ff", + "terminal.ansi.bright_white": "#bfbdb6ff", + "terminal.ansi.dim_white": "#787876ff", + "link_text.hover": "#5ac1feff", + "conflict": "#feb454ff", + "conflict.background": "#572815ff", + "conflict.border": "#754221ff", + "created": "#aad84cff", + "created.background": "#294113ff", + "created.border": "#405c1cff", + "deleted": "#ef7177ff", + "deleted.background": "#48161bff", + "deleted.border": "#66272dff", + "error": "#ef7177ff", + "error.background": "#48161bff", + "error.border": "#66272dff", + "hidden": "#696a6aff", + "hidden.background": "#313337ff", + "hidden.border": "#383a3eff", + "hint": "#628b80ff", + "hint.background": "#0d2f4eff", + "hint.border": "#1b4a6eff", + "ignored": "#696a6aff", + "ignored.background": "#313337ff", + "ignored.border": "#3f4043ff", + "info": "#5ac1feff", + "info.background": "#0d2f4eff", + "info.border": "#1b4a6eff", + "modified": "#feb454ff", + "modified.background": "#572815ff", + "modified.border": "#754221ff", + "predictive": "#5a728bff", + "predictive.background": "#294113ff", + "predictive.border": "#405c1cff", + "renamed": "#5ac1feff", + "renamed.background": "#0d2f4eff", + "renamed.border": "#1b4a6eff", + "success": "#aad84cff", + "success.background": "#294113ff", + "success.border": "#405c1cff", + "unreachable": "#8a8986ff", + "unreachable.background": "#313337ff", + "unreachable.border": "#3f4043ff", + "warning": "#feb454ff", + "warning.background": "#572815ff", + "warning.border": "#754221ff", + "players": [ + { + "cursor": "#5ac1feff", + "background": "#5ac1feff", + "selection": "#5ac1fe3d" + }, + { + "cursor": "#39bae5ff", + "background": "#39bae5ff", + "selection": "#39bae53d" + }, + { + "cursor": "#fe8f40ff", + "background": "#fe8f40ff", + "selection": "#fe8f403d" + }, + { + "cursor": "#d2a6feff", + "background": "#d2a6feff", + "selection": "#d2a6fe3d" + }, + { + "cursor": "#95e5cbff", + "background": "#95e5cbff", + "selection": "#95e5cb3d" + }, + { + "cursor": "#ef7177ff", + "background": "#ef7177ff", + "selection": "#ef71773d" + }, + { + "cursor": "#feb454ff", + "background": "#feb454ff", + "selection": "#feb4543d" + }, + { + "cursor": "#aad84cff", + "background": "#aad84cff", + "selection": "#aad84c3d" + } + ], + "syntax": { + "attribute": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#d2a6ffff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#abb5be8c", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#8c8b88ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#d2a6ffff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#bfbdb6ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#fe8f40ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#ffb353ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#628b80ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#ff8f3fff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#fe8f40ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#aad84cff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#d2a6ffff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#f29668ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#5a728bff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#bfbdb6ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#bfbdb6ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#a6a5a0ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#a6a5a0ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#a6a5a0ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#a6a5a0ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#d2a6ffff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#a9d94bff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#8c8b88ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#95e6cbff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#e5b572ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#fe8f40ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#fe8f40ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#bfbdb6ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#59c2ffff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#bfbdb6ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#5ac1feff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Ayu Light", + "appearance": "light", + "style": { + "border": "#cfd1d2ff", + "border.variant": "#dfe0e1ff", + "border.focused": "#c4daf6ff", + "border.selected": "#c4daf6ff", + "border.transparent": "#00000000", + "border.disabled": "#d5d6d8ff", + "elevated_surface.background": "#ececedff", + "surface.background": "#ececedff", + "background": "#dcdddeff", + "element.background": "#ececedff", + "element.hover": "#dfe0e1ff", + "element.active": "#cfd0d2ff", + "element.selected": "#cfd0d2ff", + "element.disabled": "#ececedff", + "drop_target.background": "#8b8e9280", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#dfe0e1ff", + "ghost_element.active": "#cfd0d2ff", + "ghost_element.selected": "#cfd0d2ff", + "ghost_element.disabled": "#ececedff", + "text": "#5c6166ff", + "text.muted": "#8b8e92ff", + "text.placeholder": "#a9acaeff", + "text.disabled": "#a9acaeff", + "text.accent": "#3b9ee5ff", + "icon": "#5c6166ff", + "icon.muted": "#8b8e92ff", + "icon.disabled": "#a9acaeff", + "icon.placeholder": "#8b8e92ff", + "icon.accent": "#3b9ee5ff", + "status_bar.background": "#dcdddeff", + "title_bar.background": "#dcdddeff", + "toolbar.background": "#fcfcfcff", + "tab_bar.background": "#ececedff", + "tab.inactive_background": "#ececedff", + "tab.active_background": "#fcfcfcff", + "search.match_background": "#3b9ee566", + "panel.background": "#ececedff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#5c61664c", + "scrollbar.thumb.hover_background": "#dfe0e1ff", + "scrollbar.thumb.border": "#dfe0e1ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#efeff0ff", + "editor.foreground": "#5c6166ff", + "editor.background": "#fcfcfcff", + "editor.gutter.background": "#fcfcfcff", + "editor.subheader.background": "#ececedff", + "editor.active_line.background": "#ececedbf", + "editor.highlighted_line.background": "#ececedff", + "editor.line_number": "#5c616659", + "editor.active_line_number": "#5c6166ff", + "editor.invisible": "#acafb1ff", + "editor.wrap_guide": "#5c61660d", + "editor.active_wrap_guide": "#5c61661a", + "editor.document_highlight.read_background": "#3b9ee51a", + "editor.document_highlight.write_background": "#acafb166", + "terminal.background": "#fcfcfcff", + "terminal.foreground": "#5c6166ff", + "terminal.bright_foreground": "#5c6166ff", + "terminal.dim_foreground": "#fcfcfcff", + "terminal.ansi.black": "#fcfcfcff", + "terminal.ansi.bright_black": "#bcbec0ff", + "terminal.ansi.dim_black": "#5c6166ff", + "terminal.ansi.red": "#ef7271ff", + "terminal.ansi.bright_red": "#febab6ff", + "terminal.ansi.dim_red": "#833538ff", + "terminal.ansi.green": "#85b304ff", + "terminal.ansi.bright_green": "#c7d98fff", + "terminal.ansi.dim_green": "#445613ff", + "terminal.ansi.yellow": "#f1ad49ff", + "terminal.ansi.bright_yellow": "#fed5a3ff", + "terminal.ansi.dim_yellow": "#8a5227ff", + "terminal.ansi.blue": "#3b9ee5ff", + "terminal.ansi.bright_blue": "#abcdf2ff", + "terminal.ansi.dim_blue": "#214c76ff", + "terminal.ansi.magenta": "#55b4d3ff", + "terminal.ansi.bright_magenta": "#b1d8e8ff", + "terminal.ansi.dim_magenta": "#2f5669ff", + "terminal.ansi.cyan": "#4dbf99ff", + "terminal.ansi.bright_cyan": "#ace0cbff", + "terminal.ansi.dim_cyan": "#2a5f4aff", + "terminal.ansi.white": "#5c6166ff", + "terminal.ansi.bright_white": "#5c6166ff", + "terminal.ansi.dim_white": "#9c9fa2ff", + "link_text.hover": "#3b9ee5ff", + "conflict": "#f1ad49ff", + "conflict.background": "#ffeedaff", + "conflict.border": "#ffe1beff", + "created": "#85b304ff", + "created.background": "#e9efd2ff", + "created.border": "#d7e3aeff", + "deleted": "#ef7271ff", + "deleted.background": "#ffe3e1ff", + "deleted.border": "#ffcdcaff", + "error": "#ef7271ff", + "error.background": "#ffe3e1ff", + "error.border": "#ffcdcaff", + "hidden": "#a9acaeff", + "hidden.background": "#dcdddeff", + "hidden.border": "#d5d6d8ff", + "hint": "#8ca7c2ff", + "hint.background": "#deebfaff", + "hint.border": "#c4daf6ff", + "ignored": "#a9acaeff", + "ignored.background": "#dcdddeff", + "ignored.border": "#cfd1d2ff", + "info": "#3b9ee5ff", + "info.background": "#deebfaff", + "info.border": "#c4daf6ff", + "modified": "#f1ad49ff", + "modified.background": "#ffeedaff", + "modified.border": "#ffe1beff", + "predictive": "#9eb9d3ff", + "predictive.background": "#e9efd2ff", + "predictive.border": "#d7e3aeff", + "renamed": "#3b9ee5ff", + "renamed.background": "#deebfaff", + "renamed.border": "#c4daf6ff", + "success": "#85b304ff", + "success.background": "#e9efd2ff", + "success.border": "#d7e3aeff", + "unreachable": "#8b8e92ff", + "unreachable.background": "#dcdddeff", + "unreachable.border": "#cfd1d2ff", + "warning": "#f1ad49ff", + "warning.background": "#ffeedaff", + "warning.border": "#ffe1beff", + "players": [ + { + "cursor": "#3b9ee5ff", + "background": "#3b9ee5ff", + "selection": "#3b9ee53d" + }, + { + "cursor": "#55b4d3ff", + "background": "#55b4d3ff", + "selection": "#55b4d33d" + }, + { + "cursor": "#f98d3fff", + "background": "#f98d3fff", + "selection": "#f98d3f3d" + }, + { + "cursor": "#a37accff", + "background": "#a37accff", + "selection": "#a37acc3d" + }, + { + "cursor": "#4dbf99ff", + "background": "#4dbf99ff", + "selection": "#4dbf993d" + }, + { + "cursor": "#ef7271ff", + "background": "#ef7271ff", + "selection": "#ef72713d" + }, + { + "cursor": "#f1ad49ff", + "background": "#f1ad49ff", + "selection": "#f1ad493d" + }, + { + "cursor": "#85b304ff", + "background": "#85b304ff", + "selection": "#85b3043d" + } + ], + "syntax": { + "attribute": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#a37accff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#787b8099", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#898d90ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#a37accff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#5c6166ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#f98d3fff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#f2ad48ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8ca7c2ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#fa8d3eff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#f98d3fff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#85b304ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#a37accff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#ed9365ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#9eb9d3ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#5c6166ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#5c6166ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#73777bff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#73777bff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#73777bff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#73777bff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#a37accff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#86b300ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#898d90ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#4bbf98ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#e6ba7eff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#f98d3fff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#f98d3fff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#5c6166ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#389ee6ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#5c6166ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#3b9ee5ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Ayu Mirage", + "appearance": "dark", + "style": { + "border": "#53565dff", + "border.variant": "#43464fff", + "border.focused": "#24556fff", + "border.selected": "#24556fff", + "border.transparent": "#00000000", + "border.disabled": "#4d5058ff", + "elevated_surface.background": "#353944ff", + "surface.background": "#353944ff", + "background": "#464a52ff", + "element.background": "#353944ff", + "element.hover": "#43464fff", + "element.active": "#53565dff", + "element.selected": "#53565dff", + "element.disabled": "#353944ff", + "drop_target.background": "#9a9a9880", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#43464fff", + "ghost_element.active": "#53565dff", + "ghost_element.selected": "#53565dff", + "ghost_element.disabled": "#353944ff", + "text": "#cccac2ff", + "text.muted": "#9a9a98ff", + "text.placeholder": "#7b7d7fff", + "text.disabled": "#7b7d7fff", + "text.accent": "#72cffeff", + "icon": "#cccac2ff", + "icon.muted": "#9a9a98ff", + "icon.disabled": "#7b7d7fff", + "icon.placeholder": "#9a9a98ff", + "icon.accent": "#72cffeff", + "status_bar.background": "#464a52ff", + "title_bar.background": "#464a52ff", + "toolbar.background": "#242835ff", + "tab_bar.background": "#353944ff", + "tab.inactive_background": "#353944ff", + "tab.active_background": "#242835ff", + "search.match_background": "#73cffe66", + "panel.background": "#353944ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#cccac24c", + "scrollbar.thumb.hover_background": "#43464fff", + "scrollbar.thumb.border": "#43464fff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#323641ff", + "editor.foreground": "#cccac2ff", + "editor.background": "#242835ff", + "editor.gutter.background": "#242835ff", + "editor.subheader.background": "#353944ff", + "editor.active_line.background": "#353944bf", + "editor.highlighted_line.background": "#353944ff", + "editor.line_number": "#cccac259", + "editor.active_line_number": "#cccac2ff", + "editor.invisible": "#787a7cff", + "editor.wrap_guide": "#cccac20d", + "editor.active_wrap_guide": "#cccac21a", + "editor.document_highlight.read_background": "#72cffe1a", + "editor.document_highlight.write_background": "#787a7c66", + "terminal.background": "#242835ff", + "terminal.foreground": "#cccac2ff", + "terminal.bright_foreground": "#cccac2ff", + "terminal.dim_foreground": "#242835ff", + "terminal.ansi.black": "#242835ff", + "terminal.ansi.bright_black": "#67696eff", + "terminal.ansi.dim_black": "#cccac2ff", + "terminal.ansi.red": "#f18779ff", + "terminal.ansi.bright_red": "#833f3cff", + "terminal.ansi.dim_red": "#fec4baff", + "terminal.ansi.green": "#d5fe80ff", + "terminal.ansi.bright_green": "#75993cff", + "terminal.ansi.dim_green": "#ecffc1ff", + "terminal.ansi.yellow": "#fecf72ff", + "terminal.ansi.bright_yellow": "#937237ff", + "terminal.ansi.dim_yellow": "#ffe7b9ff", + "terminal.ansi.blue": "#72cffeff", + "terminal.ansi.bright_blue": "#336d8dff", + "terminal.ansi.dim_blue": "#c1e7ffff", + "terminal.ansi.magenta": "#5bcde5ff", + "terminal.ansi.bright_magenta": "#2b6c7bff", + "terminal.ansi.dim_magenta": "#b7e7f2ff", + "terminal.ansi.cyan": "#95e5cbff", + "terminal.ansi.bright_cyan": "#4c806fff", + "terminal.ansi.dim_cyan": "#cbf2e4ff", + "terminal.ansi.white": "#cccac2ff", + "terminal.ansi.bright_white": "#cccac2ff", + "terminal.ansi.dim_white": "#898a8aff", + "link_text.hover": "#72cffeff", + "conflict": "#fecf72ff", + "conflict.background": "#574018ff", + "conflict.border": "#765a29ff", + "created": "#d5fe80ff", + "created.background": "#426117ff", + "created.border": "#5d7e2cff", + "deleted": "#f18779ff", + "deleted.background": "#481a1bff", + "deleted.border": "#662e2dff", + "error": "#f18779ff", + "error.background": "#481a1bff", + "error.border": "#662e2dff", + "hidden": "#7b7d7fff", + "hidden.background": "#464a52ff", + "hidden.border": "#4d5058ff", + "hint": "#7399a3ff", + "hint.background": "#123950ff", + "hint.border": "#24556fff", + "ignored": "#7b7d7fff", + "ignored.background": "#464a52ff", + "ignored.border": "#53565dff", + "info": "#72cffeff", + "info.background": "#123950ff", + "info.border": "#24556fff", + "modified": "#fecf72ff", + "modified.background": "#574018ff", + "modified.border": "#765a29ff", + "predictive": "#6d839bff", + "predictive.background": "#426117ff", + "predictive.border": "#5d7e2cff", + "renamed": "#72cffeff", + "renamed.background": "#123950ff", + "renamed.border": "#24556fff", + "success": "#d5fe80ff", + "success.background": "#426117ff", + "success.border": "#5d7e2cff", + "unreachable": "#9a9a98ff", + "unreachable.background": "#464a52ff", + "unreachable.border": "#53565dff", + "warning": "#fecf72ff", + "warning.background": "#574018ff", + "warning.border": "#765a29ff", + "players": [ + { + "cursor": "#72cffeff", + "background": "#72cffeff", + "selection": "#72cffe3d" + }, + { + "cursor": "#5bcde5ff", + "background": "#5bcde5ff", + "selection": "#5bcde53d" + }, + { + "cursor": "#fead66ff", + "background": "#fead66ff", + "selection": "#fead663d" + }, + { + "cursor": "#debffeff", + "background": "#debffeff", + "selection": "#debffe3d" + }, + { + "cursor": "#95e5cbff", + "background": "#95e5cbff", + "selection": "#95e5cb3d" + }, + { + "cursor": "#f18779ff", + "background": "#f18779ff", + "selection": "#f187793d" + }, + { + "cursor": "#fecf72ff", + "background": "#fecf72ff", + "selection": "#fecf723d" + }, + { + "cursor": "#d5fe80ff", + "background": "#d5fe80ff", + "selection": "#d5fe803d" + } + ], + "syntax": { + "attribute": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#dfbfffff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#b8cfe680", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#9b9b99ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#dfbfffff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#cccac2ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#72cffeff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#fead66ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#ffd173ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#7399a3ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#ffad65ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#fead66ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#d5fe80ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#dfbfffff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#f29e74ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#6d839bff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#cccac2ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#cccac2ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#b4b3aeff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#b4b3aeff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#b4b3aeff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#b4b3aeff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#dfbfffff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#d4fe7fff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#9b9b99ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#95e6cbff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#ffdfb3ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#fead66ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#fead66ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#cccac2ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#73cfffff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#cccac2ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#72cffeff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/gruvbox/LICENSE b/assets/themes/gruvbox/LICENSE new file mode 100644 index 0000000..0e18d6d --- /dev/null +++ b/assets/themes/gruvbox/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json new file mode 100644 index 0000000..5089f6c --- /dev/null +++ b/assets/themes/gruvbox/gruvbox.json @@ -0,0 +1,2250 @@ +{ + "name": "Gruvbox", + "author": "Zed Industries", + "themes": [ + { + "name": "Gruvbox Dark", + "appearance": "dark", + "style": { + "border": "#5b534dff", + "border.variant": "#494340ff", + "border.focused": "#303a36ff", + "border.selected": "#303a36ff", + "border.transparent": "#00000000", + "border.disabled": "#544c48ff", + "elevated_surface.background": "#3a3735ff", + "surface.background": "#3a3735ff", + "background": "#4c4642ff", + "element.background": "#3a3735ff", + "element.hover": "#494340ff", + "element.active": "#5b524cff", + "element.selected": "#5b524cff", + "element.disabled": "#3a3735ff", + "drop_target.background": "#c5b59780", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#494340ff", + "ghost_element.active": "#5b524cff", + "ghost_element.selected": "#5b524cff", + "ghost_element.disabled": "#3a3735ff", + "text": "#fbf1c7ff", + "text.muted": "#c5b597ff", + "text.placeholder": "#998b78ff", + "text.disabled": "#998b78ff", + "text.accent": "#83a598ff", + "icon": "#fbf1c7ff", + "icon.muted": "#c5b597ff", + "icon.disabled": "#998b78ff", + "icon.placeholder": "#c5b597ff", + "icon.accent": "#83a598ff", + "status_bar.background": "#4c4642ff", + "title_bar.background": "#4c4642ff", + "toolbar.background": "#282828ff", + "tab_bar.background": "#3a3735ff", + "tab.inactive_background": "#3a3735ff", + "tab.active_background": "#282828ff", + "search.match_background": "#83a59866", + "panel.background": "#3a3735ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#fbf1c74c", + "scrollbar.thumb.hover_background": "#494340ff", + "scrollbar.thumb.border": "#494340ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#373432ff", + "editor.foreground": "#ebdbb2ff", + "editor.background": "#282828ff", + "editor.gutter.background": "#282828ff", + "editor.subheader.background": "#3a3735ff", + "editor.active_line.background": "#3a3735bf", + "editor.highlighted_line.background": "#3a3735ff", + "editor.line_number": "#fbf1c759", + "editor.active_line_number": "#fbf1c7ff", + "editor.invisible": "#928474ff", + "editor.wrap_guide": "#fbf1c70d", + "editor.active_wrap_guide": "#fbf1c71a", + "editor.document_highlight.read_background": "#83a5981a", + "editor.document_highlight.write_background": "#92847466", + "terminal.background": "#282828ff", + "terminal.foreground": "#fbf1c7ff", + "terminal.bright_foreground": "#fbf1c7ff", + "terminal.dim_foreground": "#282828ff", + "terminal.ansi.black": "#282828ff", + "terminal.ansi.bright_black": "#73675eff", + "terminal.ansi.dim_black": "#fbf1c7ff", + "terminal.ansi.red": "#fb4a35ff", + "terminal.ansi.bright_red": "#93201dff", + "terminal.ansi.dim_red": "#ffaa95ff", + "terminal.ansi.green": "#b7bb26ff", + "terminal.ansi.bright_green": "#605c1bff", + "terminal.ansi.dim_green": "#e0dc98ff", + "terminal.ansi.yellow": "#f9bd2fff", + "terminal.ansi.bright_yellow": "#91611bff", + "terminal.ansi.dim_yellow": "#fedc9bff", + "terminal.ansi.blue": "#83a598ff", + "terminal.ansi.bright_blue": "#414f4aff", + "terminal.ansi.dim_blue": "#c0d2cbff", + "terminal.ansi.magenta": "#a89984ff", + "terminal.ansi.bright_magenta": "#514a41ff", + "terminal.ansi.dim_magenta": "#d2cabfff", + "terminal.ansi.cyan": "#8ec07cff", + "terminal.ansi.bright_cyan": "#45603eff", + "terminal.ansi.dim_cyan": "#c7dfbdff", + "terminal.ansi.white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.dim_white": "#b0a189ff", + "link_text.hover": "#83a598ff", + "conflict": "#f9bd2fff", + "conflict.background": "#572e10ff", + "conflict.border": "#754916ff", + "created": "#b7bb26ff", + "created.background": "#322b11ff", + "created.border": "#4a4516ff", + "deleted": "#fb4a35ff", + "deleted.background": "#590a0fff", + "deleted.border": "#771617ff", + "error": "#fb4a35ff", + "error.background": "#590a0fff", + "error.border": "#771617ff", + "hidden": "#998b78ff", + "hidden.background": "#4c4642ff", + "hidden.border": "#544c48ff", + "hint": "#8c957dff", + "hint.background": "#1e2321ff", + "hint.border": "#303a36ff", + "ignored": "#998b78ff", + "ignored.background": "#4c4642ff", + "ignored.border": "#5b534dff", + "info": "#83a598ff", + "info.background": "#1e2321ff", + "info.border": "#303a36ff", + "modified": "#f9bd2fff", + "modified.background": "#572e10ff", + "modified.border": "#754916ff", + "predictive": "#717363ff", + "predictive.background": "#322b11ff", + "predictive.border": "#4a4516ff", + "renamed": "#83a598ff", + "renamed.background": "#1e2321ff", + "renamed.border": "#303a36ff", + "success": "#b7bb26ff", + "success.background": "#322b11ff", + "success.border": "#4a4516ff", + "unreachable": "#c5b597ff", + "unreachable.background": "#4c4642ff", + "unreachable.border": "#5b534dff", + "warning": "#f9bd2fff", + "warning.background": "#572e10ff", + "warning.border": "#754916ff", + "players": [ + { + "cursor": "#83a598ff", + "background": "#83a598ff", + "selection": "#83a5983d" + }, + { + "cursor": "#a89984ff", + "background": "#a89984ff", + "selection": "#a899843d" + }, + { + "cursor": "#fd801bff", + "background": "#fd801bff", + "selection": "#fd801b3d" + }, + { + "cursor": "#d3869bff", + "background": "#d3869bff", + "selection": "#d3869b3d" + }, + { + "cursor": "#8ec07cff", + "background": "#8ec07cff", + "selection": "#8ec07c3d" + }, + { + "cursor": "#fb4a35ff", + "background": "#fb4a35ff", + "selection": "#fb4a353d" + }, + { + "cursor": "#f9bd2fff", + "background": "#f9bd2fff", + "selection": "#f9bd2f3d" + }, + { + "cursor": "#b7bb26ff", + "background": "#b7bb26ff", + "selection": "#b7bb263d" + } + ], + "syntax": { + "attribute": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#c6b697ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#fabd2eff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#83a598ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#fe7f18ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": null + }, + "function.builtin": { + "color": "#fb4833ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8c957dff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#fb4833ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#8ec07cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#717363ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#fbf1c7ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#d5c4a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#e5d5adff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#e5d5adff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#c6b697ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#fe7f18ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#fabd2eff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Gruvbox Dark Hard", + "appearance": "dark", + "style": { + "border": "#5b534dff", + "border.variant": "#494340ff", + "border.focused": "#303a36ff", + "border.selected": "#303a36ff", + "border.transparent": "#00000000", + "border.disabled": "#544c48ff", + "elevated_surface.background": "#393634ff", + "surface.background": "#393634ff", + "background": "#4c4642ff", + "element.background": "#393634ff", + "element.hover": "#494340ff", + "element.active": "#5b524cff", + "element.selected": "#5b524cff", + "element.disabled": "#393634ff", + "drop_target.background": "#c5b59780", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#494340ff", + "ghost_element.active": "#5b524cff", + "ghost_element.selected": "#5b524cff", + "ghost_element.disabled": "#393634ff", + "text": "#fbf1c7ff", + "text.muted": "#c5b597ff", + "text.placeholder": "#998b78ff", + "text.disabled": "#998b78ff", + "text.accent": "#83a598ff", + "icon": "#fbf1c7ff", + "icon.muted": "#c5b597ff", + "icon.disabled": "#998b78ff", + "icon.placeholder": "#c5b597ff", + "icon.accent": "#83a598ff", + "status_bar.background": "#4c4642ff", + "title_bar.background": "#4c4642ff", + "toolbar.background": "#1d2021ff", + "tab_bar.background": "#393634ff", + "tab.inactive_background": "#393634ff", + "tab.active_background": "#1d2021ff", + "search.match_background": "#83a59866", + "panel.background": "#393634ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#fbf1c74c", + "scrollbar.thumb.hover_background": "#494340ff", + "scrollbar.thumb.border": "#494340ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#343130ff", + "editor.foreground": "#ebdbb2ff", + "editor.background": "#1d2021ff", + "editor.gutter.background": "#1d2021ff", + "editor.subheader.background": "#393634ff", + "editor.active_line.background": "#393634bf", + "editor.highlighted_line.background": "#393634ff", + "editor.line_number": "#fbf1c759", + "editor.active_line_number": "#fbf1c7ff", + "editor.invisible": "#928474ff", + "editor.wrap_guide": "#fbf1c70d", + "editor.active_wrap_guide": "#fbf1c71a", + "editor.document_highlight.read_background": "#83a5981a", + "editor.document_highlight.write_background": "#92847466", + "terminal.background": "#1d2021ff", + "terminal.foreground": "#fbf1c7ff", + "terminal.bright_foreground": "#fbf1c7ff", + "terminal.dim_foreground": "#1d2021ff", + "terminal.ansi.black": "#1d2021ff", + "terminal.ansi.bright_black": "#73675eff", + "terminal.ansi.dim_black": "#fbf1c7ff", + "terminal.ansi.red": "#fb4a35ff", + "terminal.ansi.bright_red": "#93201dff", + "terminal.ansi.dim_red": "#ffaa95ff", + "terminal.ansi.green": "#b7bb26ff", + "terminal.ansi.bright_green": "#605c1bff", + "terminal.ansi.dim_green": "#e0dc98ff", + "terminal.ansi.yellow": "#f9bd2fff", + "terminal.ansi.bright_yellow": "#91611bff", + "terminal.ansi.dim_yellow": "#fedc9bff", + "terminal.ansi.blue": "#83a598ff", + "terminal.ansi.bright_blue": "#414f4aff", + "terminal.ansi.dim_blue": "#c0d2cbff", + "terminal.ansi.magenta": "#a89984ff", + "terminal.ansi.bright_magenta": "#514a41ff", + "terminal.ansi.dim_magenta": "#d2cabfff", + "terminal.ansi.cyan": "#8ec07cff", + "terminal.ansi.bright_cyan": "#45603eff", + "terminal.ansi.dim_cyan": "#c7dfbdff", + "terminal.ansi.white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.dim_white": "#b0a189ff", + "link_text.hover": "#83a598ff", + "conflict": "#f9bd2fff", + "conflict.background": "#572e10ff", + "conflict.border": "#754916ff", + "created": "#b7bb26ff", + "created.background": "#322b11ff", + "created.border": "#4a4516ff", + "deleted": "#fb4a35ff", + "deleted.background": "#590a0fff", + "deleted.border": "#771617ff", + "error": "#fb4a35ff", + "error.background": "#590a0fff", + "error.border": "#771617ff", + "hidden": "#998b78ff", + "hidden.background": "#4c4642ff", + "hidden.border": "#544c48ff", + "hint": "#6a695bff", + "hint.background": "#1e2321ff", + "hint.border": "#303a36ff", + "ignored": "#998b78ff", + "ignored.background": "#4c4642ff", + "ignored.border": "#5b534dff", + "info": "#83a598ff", + "info.background": "#1e2321ff", + "info.border": "#303a36ff", + "modified": "#f9bd2fff", + "modified.background": "#572e10ff", + "modified.border": "#754916ff", + "predictive": "#717363ff", + "predictive.background": "#322b11ff", + "predictive.border": "#4a4516ff", + "renamed": "#83a598ff", + "renamed.background": "#1e2321ff", + "renamed.border": "#303a36ff", + "success": "#b7bb26ff", + "success.background": "#322b11ff", + "success.border": "#4a4516ff", + "unreachable": "#c5b597ff", + "unreachable.background": "#4c4642ff", + "unreachable.border": "#5b534dff", + "warning": "#f9bd2fff", + "warning.background": "#572e10ff", + "warning.border": "#754916ff", + "players": [ + { + "cursor": "#83a598ff", + "background": "#83a598ff", + "selection": "#83a5983d" + }, + { + "cursor": "#a89984ff", + "background": "#a89984ff", + "selection": "#a899843d" + }, + { + "cursor": "#fd801bff", + "background": "#fd801bff", + "selection": "#fd801b3d" + }, + { + "cursor": "#d3869bff", + "background": "#d3869bff", + "selection": "#d3869b3d" + }, + { + "cursor": "#8ec07cff", + "background": "#8ec07cff", + "selection": "#8ec07c3d" + }, + { + "cursor": "#fb4a35ff", + "background": "#fb4a35ff", + "selection": "#fb4a353d" + }, + { + "cursor": "#f9bd2fff", + "background": "#f9bd2fff", + "selection": "#f9bd2f3d" + }, + { + "cursor": "#b7bb26ff", + "background": "#b7bb26ff", + "selection": "#b7bb263d" + } + ], + "syntax": { + "attribute": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#c6b697ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#fabd2eff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#83a598ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#fe7f18ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": null + }, + "function.builtin": { + "color": "#fb4833ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8c957dff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#fb4833ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#8ec07cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#717363ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#fbf1c7ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#d5c4a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#e5d5adff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#e5d5adff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#c6b697ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#fe7f18ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#fabd2eff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Gruvbox Dark Soft", + "appearance": "dark", + "style": { + "border": "#5b534dff", + "border.variant": "#494340ff", + "border.focused": "#303a36ff", + "border.selected": "#303a36ff", + "border.transparent": "#00000000", + "border.disabled": "#544c48ff", + "elevated_surface.background": "#3b3735ff", + "surface.background": "#3b3735ff", + "background": "#4c4642ff", + "element.background": "#3b3735ff", + "element.hover": "#494340ff", + "element.active": "#5b524cff", + "element.selected": "#5b524cff", + "element.disabled": "#3b3735ff", + "drop_target.background": "#c5b59780", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#494340ff", + "ghost_element.active": "#5b524cff", + "ghost_element.selected": "#5b524cff", + "ghost_element.disabled": "#3b3735ff", + "text": "#fbf1c7ff", + "text.muted": "#c5b597ff", + "text.placeholder": "#998b78ff", + "text.disabled": "#998b78ff", + "text.accent": "#83a598ff", + "icon": "#fbf1c7ff", + "icon.muted": "#c5b597ff", + "icon.disabled": "#998b78ff", + "icon.placeholder": "#c5b597ff", + "icon.accent": "#83a598ff", + "status_bar.background": "#4c4642ff", + "title_bar.background": "#4c4642ff", + "toolbar.background": "#32302fff", + "tab_bar.background": "#3b3735ff", + "tab.inactive_background": "#3b3735ff", + "tab.active_background": "#32302fff", + "search.match_background": "#83a59866", + "panel.background": "#3b3735ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#fbf1c74c", + "scrollbar.thumb.hover_background": "#494340ff", + "scrollbar.thumb.border": "#494340ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#393634ff", + "editor.foreground": "#ebdbb2ff", + "editor.background": "#32302fff", + "editor.gutter.background": "#32302fff", + "editor.subheader.background": "#3b3735ff", + "editor.active_line.background": "#3b3735bf", + "editor.highlighted_line.background": "#3b3735ff", + "editor.line_number": "#fbf1c759", + "editor.active_line_number": "#fbf1c7ff", + "editor.invisible": "#928474ff", + "editor.wrap_guide": "#fbf1c70d", + "editor.active_wrap_guide": "#fbf1c71a", + "editor.document_highlight.read_background": "#83a5981a", + "editor.document_highlight.write_background": "#92847466", + "terminal.background": "#32302fff", + "terminal.foreground": "#fbf1c7ff", + "terminal.bright_foreground": "#fbf1c7ff", + "terminal.dim_foreground": "#32302fff", + "terminal.ansi.black": "#32302fff", + "terminal.ansi.bright_black": "#73675eff", + "terminal.ansi.dim_black": "#fbf1c7ff", + "terminal.ansi.red": "#fb4a35ff", + "terminal.ansi.bright_red": "#93201dff", + "terminal.ansi.dim_red": "#ffaa95ff", + "terminal.ansi.green": "#b7bb26ff", + "terminal.ansi.bright_green": "#605c1bff", + "terminal.ansi.dim_green": "#e0dc98ff", + "terminal.ansi.yellow": "#f9bd2fff", + "terminal.ansi.bright_yellow": "#91611bff", + "terminal.ansi.dim_yellow": "#fedc9bff", + "terminal.ansi.blue": "#83a598ff", + "terminal.ansi.bright_blue": "#414f4aff", + "terminal.ansi.dim_blue": "#c0d2cbff", + "terminal.ansi.magenta": "#a89984ff", + "terminal.ansi.bright_magenta": "#514a41ff", + "terminal.ansi.dim_magenta": "#d2cabfff", + "terminal.ansi.cyan": "#8ec07cff", + "terminal.ansi.bright_cyan": "#45603eff", + "terminal.ansi.dim_cyan": "#c7dfbdff", + "terminal.ansi.white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.dim_white": "#b0a189ff", + "link_text.hover": "#83a598ff", + "conflict": "#f9bd2fff", + "conflict.background": "#572e10ff", + "conflict.border": "#754916ff", + "created": "#b7bb26ff", + "created.background": "#322b11ff", + "created.border": "#4a4516ff", + "deleted": "#fb4a35ff", + "deleted.background": "#590a0fff", + "deleted.border": "#771617ff", + "error": "#fb4a35ff", + "error.background": "#590a0fff", + "error.border": "#771617ff", + "hidden": "#998b78ff", + "hidden.background": "#4c4642ff", + "hidden.border": "#544c48ff", + "hint": "#8c957dff", + "hint.background": "#1e2321ff", + "hint.border": "#303a36ff", + "ignored": "#998b78ff", + "ignored.background": "#4c4642ff", + "ignored.border": "#5b534dff", + "info": "#83a598ff", + "info.background": "#1e2321ff", + "info.border": "#303a36ff", + "modified": "#f9bd2fff", + "modified.background": "#572e10ff", + "modified.border": "#754916ff", + "predictive": "#717363ff", + "predictive.background": "#322b11ff", + "predictive.border": "#4a4516ff", + "renamed": "#83a598ff", + "renamed.background": "#1e2321ff", + "renamed.border": "#303a36ff", + "success": "#b7bb26ff", + "success.background": "#322b11ff", + "success.border": "#4a4516ff", + "unreachable": "#c5b597ff", + "unreachable.background": "#4c4642ff", + "unreachable.border": "#5b534dff", + "warning": "#f9bd2fff", + "warning.background": "#572e10ff", + "warning.border": "#754916ff", + "players": [ + { + "cursor": "#83a598ff", + "background": "#83a598ff", + "selection": "#83a5983d" + }, + { + "cursor": "#a89984ff", + "background": "#a89984ff", + "selection": "#a899843d" + }, + { + "cursor": "#fd801bff", + "background": "#fd801bff", + "selection": "#fd801b3d" + }, + { + "cursor": "#d3869bff", + "background": "#d3869bff", + "selection": "#d3869b3d" + }, + { + "cursor": "#8ec07cff", + "background": "#8ec07cff", + "selection": "#8ec07c3d" + }, + { + "cursor": "#fb4a35ff", + "background": "#fb4a35ff", + "selection": "#fb4a353d" + }, + { + "cursor": "#f9bd2fff", + "background": "#f9bd2fff", + "selection": "#f9bd2f3d" + }, + { + "cursor": "#b7bb26ff", + "background": "#b7bb26ff", + "selection": "#b7bb263d" + } + ], + "syntax": { + "attribute": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#c6b697ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#fabd2eff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#83a598ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#fe7f18ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": null + }, + "function.builtin": { + "color": "#fb4833ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#8c957dff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#fb4833ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#8ec07cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#717363ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#fbf1c7ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#d5c4a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#e5d5adff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#ebdbb2ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#e5d5adff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#c6b697ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#fe7f18ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#d3869bff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#8ec07cff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#b8bb25ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#fabd2eff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Gruvbox Light", + "appearance": "light", + "style": { + "border": "#c8b899ff", + "border.variant": "#ddcca7ff", + "border.focused": "#adc5ccff", + "border.selected": "#adc5ccff", + "border.transparent": "#00000000", + "border.disabled": "#d0bf9dff", + "elevated_surface.background": "#ecddb4ff", + "surface.background": "#ecddb4ff", + "background": "#d9c8a4ff", + "element.background": "#ecddb4ff", + "element.hover": "#ddcca7ff", + "element.active": "#c8b899ff", + "element.selected": "#c8b899ff", + "element.disabled": "#ecddb4ff", + "drop_target.background": "#5f565080", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#ddcca7ff", + "ghost_element.active": "#c8b899ff", + "ghost_element.selected": "#c8b899ff", + "ghost_element.disabled": "#ecddb4ff", + "text": "#282828ff", + "text.muted": "#5f5650ff", + "text.placeholder": "#897b6eff", + "text.disabled": "#897b6eff", + "text.accent": "#0b6678ff", + "icon": "#282828ff", + "icon.muted": "#5f5650ff", + "icon.disabled": "#897b6eff", + "icon.placeholder": "#5f5650ff", + "icon.accent": "#0b6678ff", + "status_bar.background": "#d9c8a4ff", + "title_bar.background": "#d9c8a4ff", + "toolbar.background": "#fbf1c7ff", + "tab_bar.background": "#ecddb4ff", + "tab.inactive_background": "#ecddb4ff", + "tab.active_background": "#fbf1c7ff", + "search.match_background": "#0b667866", + "panel.background": "#ecddb4ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#2828284c", + "scrollbar.thumb.hover_background": "#ddcca7ff", + "scrollbar.thumb.border": "#ddcca7ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#eee0b7ff", + "editor.foreground": "#282828ff", + "editor.background": "#fbf1c7ff", + "editor.gutter.background": "#fbf1c7ff", + "editor.subheader.background": "#ecddb4ff", + "editor.active_line.background": "#ecddb4bf", + "editor.highlighted_line.background": "#ecddb4ff", + "editor.line_number": "#28282859", + "editor.active_line_number": "#282828ff", + "editor.invisible": "#928474ff", + "editor.wrap_guide": "#2828280d", + "editor.active_wrap_guide": "#2828281a", + "editor.document_highlight.read_background": "#0b66781a", + "editor.document_highlight.write_background": "#92847466", + "terminal.background": "#fbf1c7ff", + "terminal.foreground": "#282828ff", + "terminal.bright_foreground": "#282828ff", + "terminal.dim_foreground": "#fbf1c7ff", + "terminal.ansi.black": "#fbf1c7ff", + "terminal.ansi.bright_black": "#b0a189ff", + "terminal.ansi.dim_black": "#282828ff", + "terminal.ansi.red": "#9d0308ff", + "terminal.ansi.bright_red": "#db8b7aff", + "terminal.ansi.dim_red": "#4e1207ff", + "terminal.ansi.green": "#797410ff", + "terminal.ansi.bright_green": "#bfb787ff", + "terminal.ansi.dim_green": "#3e3a11ff", + "terminal.ansi.yellow": "#b57615ff", + "terminal.ansi.bright_yellow": "#e2b88bff", + "terminal.ansi.dim_yellow": "#5c3a12ff", + "terminal.ansi.blue": "#0b6678ff", + "terminal.ansi.bright_blue": "#8fb0baff", + "terminal.ansi.dim_blue": "#14333bff", + "terminal.ansi.magenta": "#7c6f64ff", + "terminal.ansi.bright_magenta": "#bcb5afff", + "terminal.ansi.dim_magenta": "#3e3833ff", + "terminal.ansi.cyan": "#437b59ff", + "terminal.ansi.bright_cyan": "#9fbca8ff", + "terminal.ansi.dim_cyan": "#253e2eff", + "terminal.ansi.white": "#282828ff", + "terminal.ansi.bright_white": "#282828ff", + "terminal.ansi.dim_white": "#73675eff", + "link_text.hover": "#0b6678ff", + "conflict": "#b57615ff", + "conflict.background": "#f5e2d0ff", + "conflict.border": "#ebccabff", + "created": "#797410ff", + "created.background": "#e4e0cdff", + "created.border": "#d1cba8ff", + "deleted": "#9d0308ff", + "deleted.background": "#f4d1c9ff", + "deleted.border": "#e8ac9eff", + "error": "#9d0308ff", + "error.background": "#f4d1c9ff", + "error.border": "#e8ac9eff", + "hidden": "#897b6eff", + "hidden.background": "#d9c8a4ff", + "hidden.border": "#d0bf9dff", + "hint": "#677562ff", + "hint.background": "#d2dee2ff", + "hint.border": "#adc5ccff", + "ignored": "#897b6eff", + "ignored.background": "#d9c8a4ff", + "ignored.border": "#c8b899ff", + "info": "#0b6678ff", + "info.background": "#d2dee2ff", + "info.border": "#adc5ccff", + "modified": "#b57615ff", + "modified.background": "#f5e2d0ff", + "modified.border": "#ebccabff", + "predictive": "#7c9780ff", + "predictive.background": "#e4e0cdff", + "predictive.border": "#d1cba8ff", + "renamed": "#0b6678ff", + "renamed.background": "#d2dee2ff", + "renamed.border": "#adc5ccff", + "success": "#797410ff", + "success.background": "#e4e0cdff", + "success.border": "#d1cba8ff", + "unreachable": "#5f5650ff", + "unreachable.background": "#d9c8a4ff", + "unreachable.border": "#c8b899ff", + "warning": "#b57615ff", + "warning.background": "#f5e2d0ff", + "warning.border": "#ebccabff", + "players": [ + { + "cursor": "#0b6678ff", + "background": "#0b6678ff", + "selection": "#0b66783d" + }, + { + "cursor": "#7c6f64ff", + "background": "#7c6f64ff", + "selection": "#7c6f643d" + }, + { + "cursor": "#af3a04ff", + "background": "#af3a04ff", + "selection": "#af3a043d" + }, + { + "cursor": "#8f3f70ff", + "background": "#8f3f70ff", + "selection": "#8f3f703d" + }, + { + "cursor": "#437b59ff", + "background": "#437b59ff", + "selection": "#437b593d" + }, + { + "cursor": "#9d0308ff", + "background": "#9d0308ff", + "selection": "#9d03083d" + }, + { + "cursor": "#b57615ff", + "background": "#b57615ff", + "selection": "#b576153d" + }, + { + "cursor": "#797410ff", + "background": "#797410ff", + "selection": "#7974103d" + } + ], + "syntax": { + "attribute": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7c6f64ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#5d544eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#b57613ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#af3a02ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "function.builtin": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#677562ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#427b58ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#7c9780ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#3c3836ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#665c54ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#413d3aff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#413d3aff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#5d544eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#af3a02ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#066578ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#79740eff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#b57613ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#066578ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Gruvbox Light Hard", + "appearance": "light", + "style": { + "border": "#c8b899ff", + "border.variant": "#ddcca7ff", + "border.focused": "#adc5ccff", + "border.selected": "#adc5ccff", + "border.transparent": "#00000000", + "border.disabled": "#d0bf9dff", + "elevated_surface.background": "#ecddb5ff", + "surface.background": "#ecddb5ff", + "background": "#d9c8a4ff", + "element.background": "#ecddb5ff", + "element.hover": "#ddcca7ff", + "element.active": "#c8b899ff", + "element.selected": "#c8b899ff", + "element.disabled": "#ecddb5ff", + "drop_target.background": "#5f565080", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#ddcca7ff", + "ghost_element.active": "#c8b899ff", + "ghost_element.selected": "#c8b899ff", + "ghost_element.disabled": "#ecddb5ff", + "text": "#282828ff", + "text.muted": "#5f5650ff", + "text.placeholder": "#897b6eff", + "text.disabled": "#897b6eff", + "text.accent": "#0b6678ff", + "icon": "#282828ff", + "icon.muted": "#5f5650ff", + "icon.disabled": "#897b6eff", + "icon.placeholder": "#5f5650ff", + "icon.accent": "#0b6678ff", + "status_bar.background": "#d9c8a4ff", + "title_bar.background": "#d9c8a4ff", + "toolbar.background": "#f9f5d7ff", + "tab_bar.background": "#ecddb5ff", + "tab.inactive_background": "#ecddb5ff", + "tab.active_background": "#f9f5d7ff", + "search.match_background": "#0b667866", + "panel.background": "#ecddb5ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#2828284c", + "scrollbar.thumb.hover_background": "#ddcca7ff", + "scrollbar.thumb.border": "#ddcca7ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#eee1bbff", + "editor.foreground": "#282828ff", + "editor.background": "#f9f5d7ff", + "editor.gutter.background": "#f9f5d7ff", + "editor.subheader.background": "#ecddb5ff", + "editor.active_line.background": "#ecddb5bf", + "editor.highlighted_line.background": "#ecddb5ff", + "editor.line_number": "#28282859", + "editor.active_line_number": "#282828ff", + "editor.invisible": "#928474ff", + "editor.wrap_guide": "#2828280d", + "editor.active_wrap_guide": "#2828281a", + "editor.document_highlight.read_background": "#0b66781a", + "editor.document_highlight.write_background": "#92847466", + "terminal.background": "#f9f5d7ff", + "terminal.foreground": "#282828ff", + "terminal.bright_foreground": "#282828ff", + "terminal.dim_foreground": "#f9f5d7ff", + "terminal.ansi.black": "#f9f5d7ff", + "terminal.ansi.bright_black": "#b0a189ff", + "terminal.ansi.dim_black": "#282828ff", + "terminal.ansi.red": "#9d0308ff", + "terminal.ansi.bright_red": "#db8b7aff", + "terminal.ansi.dim_red": "#4e1207ff", + "terminal.ansi.green": "#797410ff", + "terminal.ansi.bright_green": "#bfb787ff", + "terminal.ansi.dim_green": "#3e3a11ff", + "terminal.ansi.yellow": "#b57615ff", + "terminal.ansi.bright_yellow": "#e2b88bff", + "terminal.ansi.dim_yellow": "#5c3a12ff", + "terminal.ansi.blue": "#0b6678ff", + "terminal.ansi.bright_blue": "#8fb0baff", + "terminal.ansi.dim_blue": "#14333bff", + "terminal.ansi.magenta": "#7c6f64ff", + "terminal.ansi.bright_magenta": "#bcb5afff", + "terminal.ansi.dim_magenta": "#3e3833ff", + "terminal.ansi.cyan": "#437b59ff", + "terminal.ansi.bright_cyan": "#9fbca8ff", + "terminal.ansi.dim_cyan": "#253e2eff", + "terminal.ansi.white": "#282828ff", + "terminal.ansi.bright_white": "#282828ff", + "terminal.ansi.dim_white": "#73675eff", + "link_text.hover": "#0b6678ff", + "conflict": "#b57615ff", + "conflict.background": "#f5e2d0ff", + "conflict.border": "#ebccabff", + "created": "#797410ff", + "created.background": "#e4e0cdff", + "created.border": "#d1cba8ff", + "deleted": "#9d0308ff", + "deleted.background": "#f4d1c9ff", + "deleted.border": "#e8ac9eff", + "error": "#9d0308ff", + "error.background": "#f4d1c9ff", + "error.border": "#e8ac9eff", + "hidden": "#897b6eff", + "hidden.background": "#d9c8a4ff", + "hidden.border": "#d0bf9dff", + "hint": "#677562ff", + "hint.background": "#d2dee2ff", + "hint.border": "#adc5ccff", + "ignored": "#897b6eff", + "ignored.background": "#d9c8a4ff", + "ignored.border": "#c8b899ff", + "info": "#0b6678ff", + "info.background": "#d2dee2ff", + "info.border": "#adc5ccff", + "modified": "#b57615ff", + "modified.background": "#f5e2d0ff", + "modified.border": "#ebccabff", + "predictive": "#7c9780ff", + "predictive.background": "#e4e0cdff", + "predictive.border": "#d1cba8ff", + "renamed": "#0b6678ff", + "renamed.background": "#d2dee2ff", + "renamed.border": "#adc5ccff", + "success": "#797410ff", + "success.background": "#e4e0cdff", + "success.border": "#d1cba8ff", + "unreachable": "#5f5650ff", + "unreachable.background": "#d9c8a4ff", + "unreachable.border": "#c8b899ff", + "warning": "#b57615ff", + "warning.background": "#f5e2d0ff", + "warning.border": "#ebccabff", + "players": [ + { + "cursor": "#0b6678ff", + "background": "#0b6678ff", + "selection": "#0b66783d" + }, + { + "cursor": "#7c6f64ff", + "background": "#7c6f64ff", + "selection": "#7c6f643d" + }, + { + "cursor": "#af3a04ff", + "background": "#af3a04ff", + "selection": "#af3a043d" + }, + { + "cursor": "#8f3f70ff", + "background": "#8f3f70ff", + "selection": "#8f3f703d" + }, + { + "cursor": "#437b59ff", + "background": "#437b59ff", + "selection": "#437b593d" + }, + { + "cursor": "#9d0308ff", + "background": "#9d0308ff", + "selection": "#9d03083d" + }, + { + "cursor": "#b57615ff", + "background": "#b57615ff", + "selection": "#b576153d" + }, + { + "cursor": "#797410ff", + "background": "#797410ff", + "selection": "#7974103d" + } + ], + "syntax": { + "attribute": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7c6f64ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#5d544eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#b57613ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#af3a02ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "function.builtin": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#677562ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#427b58ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#7c9780ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#3c3836ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#665c54ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#413d3aff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#413d3aff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#5d544eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#af3a02ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#066578ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#79740eff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#b57613ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#066578ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Gruvbox Light Soft", + "appearance": "light", + "style": { + "border": "#c8b899ff", + "border.variant": "#ddcca7ff", + "border.focused": "#adc5ccff", + "border.selected": "#adc5ccff", + "border.transparent": "#00000000", + "border.disabled": "#d0bf9dff", + "elevated_surface.background": "#ecdcb3ff", + "surface.background": "#ecdcb3ff", + "background": "#d9c8a4ff", + "element.background": "#ecdcb3ff", + "element.hover": "#ddcca7ff", + "element.active": "#c8b899ff", + "element.selected": "#c8b899ff", + "element.disabled": "#ecdcb3ff", + "drop_target.background": "#5f565080", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#ddcca7ff", + "ghost_element.active": "#c8b899ff", + "ghost_element.selected": "#c8b899ff", + "ghost_element.disabled": "#ecdcb3ff", + "text": "#282828ff", + "text.muted": "#5f5650ff", + "text.placeholder": "#897b6eff", + "text.disabled": "#897b6eff", + "text.accent": "#0b6678ff", + "icon": "#282828ff", + "icon.muted": "#5f5650ff", + "icon.disabled": "#897b6eff", + "icon.placeholder": "#5f5650ff", + "icon.accent": "#0b6678ff", + "status_bar.background": "#d9c8a4ff", + "title_bar.background": "#d9c8a4ff", + "toolbar.background": "#f2e5bcff", + "tab_bar.background": "#ecdcb3ff", + "tab.inactive_background": "#ecdcb3ff", + "tab.active_background": "#f2e5bcff", + "search.match_background": "#0b667866", + "panel.background": "#ecdcb3ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#2828284c", + "scrollbar.thumb.hover_background": "#ddcca7ff", + "scrollbar.thumb.border": "#ddcca7ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#eddeb5ff", + "editor.foreground": "#282828ff", + "editor.background": "#f2e5bcff", + "editor.gutter.background": "#f2e5bcff", + "editor.subheader.background": "#ecdcb3ff", + "editor.active_line.background": "#ecdcb3bf", + "editor.highlighted_line.background": "#ecdcb3ff", + "editor.line_number": "#28282859", + "editor.active_line_number": "#282828ff", + "editor.invisible": "#928474ff", + "editor.wrap_guide": "#2828280d", + "editor.active_wrap_guide": "#2828281a", + "editor.document_highlight.read_background": "#0b66781a", + "editor.document_highlight.write_background": "#92847466", + "terminal.background": "#f2e5bcff", + "terminal.foreground": "#282828ff", + "terminal.bright_foreground": "#282828ff", + "terminal.dim_foreground": "#f2e5bcff", + "terminal.ansi.black": "#f2e5bcff", + "terminal.ansi.bright_black": "#b0a189ff", + "terminal.ansi.dim_black": "#282828ff", + "terminal.ansi.red": "#9d0308ff", + "terminal.ansi.bright_red": "#db8b7aff", + "terminal.ansi.dim_red": "#4e1207ff", + "terminal.ansi.green": "#797410ff", + "terminal.ansi.bright_green": "#bfb787ff", + "terminal.ansi.dim_green": "#3e3a11ff", + "terminal.ansi.yellow": "#b57615ff", + "terminal.ansi.bright_yellow": "#e2b88bff", + "terminal.ansi.dim_yellow": "#5c3a12ff", + "terminal.ansi.blue": "#0b6678ff", + "terminal.ansi.bright_blue": "#8fb0baff", + "terminal.ansi.dim_blue": "#14333bff", + "terminal.ansi.magenta": "#7c6f64ff", + "terminal.ansi.bright_magenta": "#bcb5afff", + "terminal.ansi.dim_magenta": "#3e3833ff", + "terminal.ansi.cyan": "#437b59ff", + "terminal.ansi.bright_cyan": "#9fbca8ff", + "terminal.ansi.dim_cyan": "#253e2eff", + "terminal.ansi.white": "#282828ff", + "terminal.ansi.bright_white": "#282828ff", + "terminal.ansi.dim_white": "#73675eff", + "link_text.hover": "#0b6678ff", + "conflict": "#b57615ff", + "conflict.background": "#f5e2d0ff", + "conflict.border": "#ebccabff", + "created": "#797410ff", + "created.background": "#e4e0cdff", + "created.border": "#d1cba8ff", + "deleted": "#9d0308ff", + "deleted.background": "#f4d1c9ff", + "deleted.border": "#e8ac9eff", + "error": "#9d0308ff", + "error.background": "#f4d1c9ff", + "error.border": "#e8ac9eff", + "hidden": "#897b6eff", + "hidden.background": "#d9c8a4ff", + "hidden.border": "#d0bf9dff", + "hint": "#677562ff", + "hint.background": "#d2dee2ff", + "hint.border": "#adc5ccff", + "ignored": "#897b6eff", + "ignored.background": "#d9c8a4ff", + "ignored.border": "#c8b899ff", + "info": "#0b6678ff", + "info.background": "#d2dee2ff", + "info.border": "#adc5ccff", + "modified": "#b57615ff", + "modified.background": "#f5e2d0ff", + "modified.border": "#ebccabff", + "predictive": "#7c9780ff", + "predictive.background": "#e4e0cdff", + "predictive.border": "#d1cba8ff", + "renamed": "#0b6678ff", + "renamed.background": "#d2dee2ff", + "renamed.border": "#adc5ccff", + "success": "#797410ff", + "success.background": "#e4e0cdff", + "success.border": "#d1cba8ff", + "unreachable": "#5f5650ff", + "unreachable.background": "#d9c8a4ff", + "unreachable.border": "#c8b899ff", + "warning": "#b57615ff", + "warning.background": "#f5e2d0ff", + "warning.border": "#ebccabff", + "players": [ + { + "cursor": "#0b6678ff", + "background": "#0b6678ff", + "selection": "#0b66783d" + }, + { + "cursor": "#7c6f64ff", + "background": "#7c6f64ff", + "selection": "#7c6f643d" + }, + { + "cursor": "#af3a04ff", + "background": "#af3a04ff", + "selection": "#af3a043d" + }, + { + "cursor": "#8f3f70ff", + "background": "#8f3f70ff", + "selection": "#8f3f703d" + }, + { + "cursor": "#437b59ff", + "background": "#437b59ff", + "selection": "#437b593d" + }, + { + "cursor": "#9d0308ff", + "background": "#9d0308ff", + "selection": "#9d03083d" + }, + { + "cursor": "#b57615ff", + "background": "#b57615ff", + "selection": "#b576153d" + }, + { + "cursor": "#797410ff", + "background": "#797410ff", + "selection": "#7974103d" + } + ], + "syntax": { + "attribute": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#7c6f64ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#5d544eff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#b57613ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#af3a02ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "function.builtin": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#677562ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#427b58ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#7c9780ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#3c3836ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#665c54ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#413d3aff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#282828ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#413d3aff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#5d544eff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#af3a02ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#8f3e71ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#427b58ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#066578ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#79740eff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#b57613ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#066578ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#0b6678ff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/one/LICENSE b/assets/themes/one/LICENSE new file mode 100644 index 0000000..f7637d3 --- /dev/null +++ b/assets/themes/one/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json new file mode 100644 index 0000000..6c22473 --- /dev/null +++ b/assets/themes/one/one.json @@ -0,0 +1,754 @@ +{ + "name": "One", + "author": "Zed Industries", + "themes": [ + { + "name": "One Dark", + "appearance": "dark", + "style": { + "border": "#464b57ff", + "border.variant": "#363c46ff", + "border.focused": "#293b5bff", + "border.selected": "#293b5bff", + "border.transparent": "#00000000", + "border.disabled": "#414754ff", + "elevated_surface.background": "#2f343eff", + "surface.background": "#2f343eff", + "background": "#3b414dff", + "element.background": "#2f343eff", + "element.hover": "#363c46ff", + "element.active": "#454a56ff", + "element.selected": "#454a56ff", + "element.disabled": "#2f343eff", + "drop_target.background": "#83899480", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#363c46ff", + "ghost_element.active": "#454a56ff", + "ghost_element.selected": "#454a56ff", + "ghost_element.disabled": "#2f343eff", + "text": "#c8ccd4ff", + "text.muted": "#838994ff", + "text.placeholder": "#555a63ff", + "text.disabled": "#555a63ff", + "text.accent": "#74ade8ff", + "icon": "#c8ccd4ff", + "icon.muted": "#838994ff", + "icon.disabled": "#555a63ff", + "icon.placeholder": "#838994ff", + "icon.accent": "#74ade8ff", + "status_bar.background": "#3b414dff", + "title_bar.background": "#3b414dff", + "toolbar.background": "#282c33ff", + "tab_bar.background": "#2f343eff", + "tab.inactive_background": "#2f343eff", + "tab.active_background": "#282c33ff", + "search.match_background": "#74ade866", + "panel.background": "#2f343eff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#c8ccd44c", + "scrollbar.thumb.hover_background": "#363c46ff", + "scrollbar.thumb.border": "#363c46ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#2e333cff", + "editor.foreground": "#acb2beff", + "editor.background": "#282c33ff", + "editor.gutter.background": "#282c33ff", + "editor.subheader.background": "#2f343eff", + "editor.active_line.background": "#2f343ebf", + "editor.highlighted_line.background": "#2f343eff", + "editor.line_number": "#c8ccd459", + "editor.active_line_number": "#c8ccd4ff", + "editor.invisible": "#555a63ff", + "editor.wrap_guide": "#c8ccd40d", + "editor.active_wrap_guide": "#c8ccd41a", + "editor.document_highlight.read_background": "#74ade81a", + "editor.document_highlight.write_background": "#555a6366", + "terminal.background": "#282c33ff", + "terminal.foreground": "#c8ccd4ff", + "terminal.bright_foreground": "#c8ccd4ff", + "terminal.dim_foreground": "#282c33ff", + "terminal.ansi.black": "#282c33ff", + "terminal.ansi.bright_black": "#525561ff", + "terminal.ansi.dim_black": "#c8ccd4ff", + "terminal.ansi.red": "#d07277ff", + "terminal.ansi.bright_red": "#673a3cff", + "terminal.ansi.dim_red": "#eab7b9ff", + "terminal.ansi.green": "#a1c181ff", + "terminal.ansi.bright_green": "#4d6140ff", + "terminal.ansi.dim_green": "#d1e0bfff", + "terminal.ansi.yellow": "#dec184ff", + "terminal.ansi.bright_yellow": "#786441ff", + "terminal.ansi.dim_yellow": "#f1dfc1ff", + "terminal.ansi.blue": "#74ade8ff", + "terminal.ansi.bright_blue": "#385378ff", + "terminal.ansi.dim_blue": "#bed5f4ff", + "terminal.ansi.magenta": "#be5046ff", + "terminal.ansi.bright_magenta": "#5e2b26ff", + "terminal.ansi.dim_magenta": "#e6a79eff", + "terminal.ansi.cyan": "#6eb4bfff", + "terminal.ansi.bright_cyan": "#3a565bff", + "terminal.ansi.dim_cyan": "#b9d9dfff", + "terminal.ansi.white": "#c8ccd4ff", + "terminal.ansi.bright_white": "#c8ccd4ff", + "terminal.ansi.dim_white": "#575d65ff", + "link_text.hover": "#74ade8ff", + "conflict": "#dec184ff", + "conflict.background": "#41321dff", + "conflict.border": "#5d4c2fff", + "created": "#a1c181ff", + "created.background": "#222e1dff", + "created.border": "#38482fff", + "deleted": "#d07277ff", + "deleted.background": "#301b1bff", + "deleted.border": "#4c2b2cff", + "error": "#d07277ff", + "error.background": "#301b1bff", + "error.border": "#4c2b2cff", + "hidden": "#555a63ff", + "hidden.background": "#3b414dff", + "hidden.border": "#414754ff", + "hint": "#5a6f89ff", + "hint.background": "#18243dff", + "hint.border": "#293b5bff", + "ignored": "#555a63ff", + "ignored.background": "#3b414dff", + "ignored.border": "#464b57ff", + "info": "#74ade8ff", + "info.background": "#18243dff", + "info.border": "#293b5bff", + "modified": "#dec184ff", + "modified.background": "#41321dff", + "modified.border": "#5d4c2fff", + "predictive": "#5a6a87ff", + "predictive.background": "#222e1dff", + "predictive.border": "#38482fff", + "renamed": "#74ade8ff", + "renamed.background": "#18243dff", + "renamed.border": "#293b5bff", + "success": "#a1c181ff", + "success.background": "#222e1dff", + "success.border": "#38482fff", + "unreachable": "#838994ff", + "unreachable.background": "#3b414dff", + "unreachable.border": "#464b57ff", + "warning": "#dec184ff", + "warning.background": "#41321dff", + "warning.border": "#5d4c2fff", + "players": [ + { + "cursor": "#74ade8ff", + "background": "#74ade8ff", + "selection": "#74ade83d" + }, + { + "cursor": "#be5046ff", + "background": "#be5046ff", + "selection": "#be50463d" + }, + { + "cursor": "#bf956aff", + "background": "#bf956aff", + "selection": "#bf956a3d" + }, + { + "cursor": "#b477cfff", + "background": "#b477cfff", + "selection": "#b477cf3d" + }, + { + "cursor": "#6eb4bfff", + "background": "#6eb4bfff", + "selection": "#6eb4bf3d" + }, + { + "cursor": "#d07277ff", + "background": "#d07277ff", + "selection": "#d072773d" + }, + { + "cursor": "#dec184ff", + "background": "#dec184ff", + "selection": "#dec1843d" + }, + { + "cursor": "#a1c181ff", + "background": "#a1c181ff", + "selection": "#a1c1813d" + } + ], + "syntax": { + "attribute": { + "color": "#74ade8ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#bf956aff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#5d636fff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#878e98ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#dfc184ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#73ade9ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#c8ccd4ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#74ade8ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#bf956aff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#d07277ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#73ade9ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#5a6f89ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#b477cfff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#74ade8ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#73ade9ff", + "font_style": "normal", + "font_weight": null + }, + "link_uri": { + "color": "#6eb4bfff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#bf956aff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#6eb4bfff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#5a6a87ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#c8ccd4ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#acb2beff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#d07277ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#acb2beff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#b2b9c6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#b2b9c6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#d07277ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#b1574bff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#a1c181ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#878e98ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#bf956aff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#bf956aff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#bf956aff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#74ade8ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#a1c181ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#d07277ff", + "font_style": null, + "font_weight": 400 + }, + "type": { + "color": "#6eb4bfff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#c8ccd4ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#bf956aff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#73ade9ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "One Light", + "appearance": "light", + "style": { + "border": "#c9c9caff", + "border.variant": "#dfdfe0ff", + "border.focused": "#cbcdf6ff", + "border.selected": "#cbcdf6ff", + "border.transparent": "#00000000", + "border.disabled": "#d3d3d4ff", + "elevated_surface.background": "#ebebecff", + "surface.background": "#ebebecff", + "background": "#dcdcddff", + "element.background": "#ebebecff", + "element.hover": "#dfdfe0ff", + "element.active": "#cacacaff", + "element.selected": "#cacacaff", + "element.disabled": "#ebebecff", + "drop_target.background": "#7e808780", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#dfdfe0ff", + "ghost_element.active": "#cacacaff", + "ghost_element.selected": "#cacacaff", + "ghost_element.disabled": "#ebebecff", + "text": "#383a41ff", + "text.muted": "#7e8087ff", + "text.placeholder": "#a1a1a3ff", + "text.disabled": "#a1a1a3ff", + "text.accent": "#5c78e2ff", + "icon": "#383a41ff", + "icon.muted": "#7e8087ff", + "icon.disabled": "#a1a1a3ff", + "icon.placeholder": "#7e8087ff", + "icon.accent": "#5c78e2ff", + "status_bar.background": "#dcdcddff", + "title_bar.background": "#dcdcddff", + "toolbar.background": "#fafafaff", + "tab_bar.background": "#ebebecff", + "tab.inactive_background": "#ebebecff", + "tab.active_background": "#fafafaff", + "search.match_background": "#5c79e266", + "panel.background": "#ebebecff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#383a414c", + "scrollbar.thumb.hover_background": "#dfdfe0ff", + "scrollbar.thumb.border": "#dfdfe0ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#eeeeeeff", + "editor.foreground": "#383a41ff", + "editor.background": "#fafafaff", + "editor.gutter.background": "#fafafaff", + "editor.subheader.background": "#ebebecff", + "editor.active_line.background": "#ebebecbf", + "editor.highlighted_line.background": "#ebebecff", + "editor.line_number": "#383a4159", + "editor.active_line_number": "#383a41ff", + "editor.invisible": "#a3a3a4ff", + "editor.wrap_guide": "#383a410d", + "editor.active_wrap_guide": "#383a411a", + "editor.document_highlight.read_background": "#5c78e21a", + "editor.document_highlight.write_background": "#a3a3a466", + "terminal.background": "#fafafaff", + "terminal.foreground": "#383a41ff", + "terminal.bright_foreground": "#383a41ff", + "terminal.dim_foreground": "#fafafaff", + "terminal.ansi.black": "#fafafaff", + "terminal.ansi.bright_black": "#aaaaaaff", + "terminal.ansi.dim_black": "#383a41ff", + "terminal.ansi.red": "#d36151ff", + "terminal.ansi.bright_red": "#f0b0a4ff", + "terminal.ansi.dim_red": "#6f312aff", + "terminal.ansi.green": "#669f59ff", + "terminal.ansi.bright_green": "#b2cfa9ff", + "terminal.ansi.dim_green": "#354d2eff", + "terminal.ansi.yellow": "#dec184ff", + "terminal.ansi.bright_yellow": "#f1dfc1ff", + "terminal.ansi.dim_yellow": "#786441ff", + "terminal.ansi.blue": "#5c78e2ff", + "terminal.ansi.bright_blue": "#b5baf2ff", + "terminal.ansi.dim_blue": "#2d3d75ff", + "terminal.ansi.magenta": "#984ea5ff", + "terminal.ansi.bright_magenta": "#cea6d3ff", + "terminal.ansi.dim_magenta": "#4b2a50ff", + "terminal.ansi.cyan": "#3a82b7ff", + "terminal.ansi.bright_cyan": "#a3bedaff", + "terminal.ansi.dim_cyan": "#254058ff", + "terminal.ansi.white": "#383a41ff", + "terminal.ansi.bright_white": "#383a41ff", + "terminal.ansi.dim_white": "#97979aff", + "link_text.hover": "#5c78e2ff", + "conflict": "#dec184ff", + "conflict.background": "#faf2e6ff", + "conflict.border": "#f4e7d1ff", + "created": "#669f59ff", + "created.background": "#dfeadbff", + "created.border": "#c8dcc1ff", + "deleted": "#d36151ff", + "deleted.background": "#fbdfd9ff", + "deleted.border": "#f6c6bdff", + "error": "#d36151ff", + "error.background": "#fbdfd9ff", + "error.border": "#f6c6bdff", + "hidden": "#a1a1a3ff", + "hidden.background": "#dcdcddff", + "hidden.border": "#d3d3d4ff", + "hint": "#9294beff", + "hint.background": "#e2e2faff", + "hint.border": "#cbcdf6ff", + "ignored": "#a1a1a3ff", + "ignored.background": "#dcdcddff", + "ignored.border": "#c9c9caff", + "info": "#5c78e2ff", + "info.background": "#e2e2faff", + "info.border": "#cbcdf6ff", + "modified": "#dec184ff", + "modified.background": "#faf2e6ff", + "modified.border": "#f4e7d1ff", + "predictive": "#9b9ec6ff", + "predictive.background": "#dfeadbff", + "predictive.border": "#c8dcc1ff", + "renamed": "#5c78e2ff", + "renamed.background": "#e2e2faff", + "renamed.border": "#cbcdf6ff", + "success": "#669f59ff", + "success.background": "#dfeadbff", + "success.border": "#c8dcc1ff", + "unreachable": "#7e8087ff", + "unreachable.background": "#dcdcddff", + "unreachable.border": "#c9c9caff", + "warning": "#dec184ff", + "warning.background": "#faf2e6ff", + "warning.border": "#f4e7d1ff", + "players": [ + { + "cursor": "#5c78e2ff", + "background": "#5c78e2ff", + "selection": "#5c78e23d" + }, + { + "cursor": "#984ea5ff", + "background": "#984ea5ff", + "selection": "#984ea53d" + }, + { + "cursor": "#ad6e26ff", + "background": "#ad6e26ff", + "selection": "#ad6e263d" + }, + { + "cursor": "#a349abff", + "background": "#a349abff", + "selection": "#a349ab3d" + }, + { + "cursor": "#3a82b7ff", + "background": "#3a82b7ff", + "selection": "#3a82b73d" + }, + { + "cursor": "#d36151ff", + "background": "#d36151ff", + "selection": "#d361513d" + }, + { + "cursor": "#dec184ff", + "background": "#dec184ff", + "selection": "#dec1843d" + }, + { + "cursor": "#669f59ff", + "background": "#669f59ff", + "selection": "#669f593d" + } + ], + "syntax": { + "attribute": { + "color": "#5c78e2ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#ad6e25ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#a2a3a7ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#7c7e86ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#669f59ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#5c78e2ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#383a41ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#5c78e2ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#ad6e25ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#d3604fff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#5b79e3ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#9294beff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#a449abff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#5c78e2ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#5b79e3ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#3882b7ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#ad6e25ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#3882b7ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#9b9ec6ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#383a41ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#383a41ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#d3604fff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#383a41ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#4d4f52ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#4d4f52ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#d3604fff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#b92b46ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#649f57ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#7c7e86ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#ad6e26ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#ad6e26ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#ad6e26ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#5c78e2ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#649f57ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#d3604fff", + "font_style": null, + "font_weight": 400 + }, + "type": { + "color": "#3882b7ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#383a41ff", + "font_style": null, + "font_weight": null + }, + "variable.special": { + "color": "#ad6e25ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#5b79e3ff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/rose_pine/LICENSE b/assets/themes/rose_pine/LICENSE new file mode 100644 index 0000000..1276733 --- /dev/null +++ b/assets/themes/rose_pine/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021 Emilia Dunfelt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/rose_pine/rose_pine.json b/assets/themes/rose_pine/rose_pine.json new file mode 100644 index 0000000..7d5865f --- /dev/null +++ b/assets/themes/rose_pine/rose_pine.json @@ -0,0 +1,1143 @@ +{ + "name": "Rosé Pine", + "author": "Zed Industries", + "themes": [ + { + "name": "Rosé Pine", + "appearance": "dark", + "style": { + "border": "#423f55ff", + "border.variant": "#232132ff", + "border.focused": "#435255ff", + "border.selected": "#435255ff", + "border.transparent": "#00000000", + "border.disabled": "#353347ff", + "elevated_surface.background": "#1c1b2aff", + "surface.background": "#1c1b2aff", + "background": "#292738ff", + "element.background": "#1c1b2aff", + "element.hover": "#232132ff", + "element.active": "#403e53ff", + "element.selected": "#403e53ff", + "element.disabled": "#1c1b2aff", + "drop_target.background": "#74708d80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#232132ff", + "ghost_element.active": "#403e53ff", + "ghost_element.selected": "#403e53ff", + "ghost_element.disabled": "#1c1b2aff", + "text": "#e0def4ff", + "text.muted": "#74708dff", + "text.placeholder": "#2f2b43ff", + "text.disabled": "#2f2b43ff", + "text.accent": "#9bced6ff", + "icon": "#e0def4ff", + "icon.muted": "#74708dff", + "icon.disabled": "#2f2b43ff", + "icon.placeholder": "#74708dff", + "icon.accent": "#9bced6ff", + "status_bar.background": "#292738ff", + "title_bar.background": "#292738ff", + "toolbar.background": "#191724ff", + "tab_bar.background": "#1c1b2aff", + "tab.inactive_background": "#1c1b2aff", + "tab.active_background": "#191724ff", + "search.match_background": "#57949f66", + "panel.background": "#1c1b2aff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#e0def44c", + "scrollbar.thumb.hover_background": "#232132ff", + "scrollbar.thumb.border": "#232132ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#1b1a29ff", + "editor.foreground": "#e0def4ff", + "editor.background": "#191724ff", + "editor.gutter.background": "#191724ff", + "editor.subheader.background": "#1c1b2aff", + "editor.active_line.background": "#1c1b2abf", + "editor.highlighted_line.background": "#1c1b2aff", + "editor.line_number": "#e0def459", + "editor.active_line_number": "#e0def4ff", + "editor.invisible": "#28253cff", + "editor.wrap_guide": "#e0def40d", + "editor.active_wrap_guide": "#e0def41a", + "editor.document_highlight.read_background": "#9bced61a", + "editor.document_highlight.write_background": "#28253c66", + "terminal.background": "#191724ff", + "terminal.foreground": "#e0def4ff", + "terminal.bright_foreground": "#e0def4ff", + "terminal.dim_foreground": "#191724ff", + "terminal.ansi.black": "#191724ff", + "terminal.ansi.bright_black": "#403d55ff", + "terminal.ansi.dim_black": "#e0def4ff", + "terminal.ansi.red": "#ea6e92ff", + "terminal.ansi.bright_red": "#7e3647ff", + "terminal.ansi.dim_red": "#fab9c6ff", + "terminal.ansi.green": "#5cc1a3ff", + "terminal.ansi.bright_green": "#31614fff", + "terminal.ansi.dim_green": "#b3e1d1ff", + "terminal.ansi.yellow": "#f6c177ff", + "terminal.ansi.bright_yellow": "#8a643aff", + "terminal.ansi.dim_yellow": "#fedfbbff", + "terminal.ansi.blue": "#9bced6ff", + "terminal.ansi.bright_blue": "#566b70ff", + "terminal.ansi.dim_blue": "#cfe7ebff", + "terminal.ansi.magenta": "#9d7591ff", + "terminal.ansi.bright_magenta": "#4c3b47ff", + "terminal.ansi.dim_magenta": "#ceb9c7ff", + "terminal.ansi.cyan": "#31738fff", + "terminal.ansi.bright_cyan": "#203a46ff", + "terminal.ansi.dim_cyan": "#9cb7c6ff", + "terminal.ansi.white": "#e0def4ff", + "terminal.ansi.bright_white": "#e0def4ff", + "terminal.ansi.dim_white": "#514e68ff", + "link_text.hover": "#9bced6ff", + "conflict": "#f6c177ff", + "conflict.background": "#50331aff", + "conflict.border": "#6d4d2bff", + "created": "#5cc1a3ff", + "created.background": "#182d23ff", + "created.border": "#254839ff", + "deleted": "#ea6e92ff", + "deleted.background": "#431720ff", + "deleted.border": "#612834ff", + "error": "#ea6e92ff", + "error.background": "#431720ff", + "error.border": "#612834ff", + "hidden": "#2f2b43ff", + "hidden.background": "#292738ff", + "hidden.border": "#353347ff", + "hint": "#5e768cff", + "hint.background": "#2f3639ff", + "hint.border": "#435255ff", + "ignored": "#2f2b43ff", + "ignored.background": "#292738ff", + "ignored.border": "#423f55ff", + "info": "#9bced6ff", + "info.background": "#2f3639ff", + "info.border": "#435255ff", + "modified": "#f6c177ff", + "modified.background": "#50331aff", + "modified.border": "#6d4d2bff", + "predictive": "#556b81ff", + "predictive.background": "#182d23ff", + "predictive.border": "#254839ff", + "renamed": "#9bced6ff", + "renamed.background": "#2f3639ff", + "renamed.border": "#435255ff", + "success": "#5cc1a3ff", + "success.background": "#182d23ff", + "success.border": "#254839ff", + "unreachable": "#74708dff", + "unreachable.background": "#292738ff", + "unreachable.border": "#423f55ff", + "warning": "#f6c177ff", + "warning.background": "#50331aff", + "warning.border": "#6d4d2bff", + "players": [ + { + "cursor": "#9bced6ff", + "background": "#9bced6ff", + "selection": "#9bced63d" + }, + { + "cursor": "#9d7591ff", + "background": "#9d7591ff", + "selection": "#9d75913d" + }, + { + "cursor": "#c4a7e6ff", + "background": "#c4a7e6ff", + "selection": "#c4a7e63d" + }, + { + "cursor": "#c4a7e6ff", + "background": "#c4a7e6ff", + "selection": "#c4a7e63d" + }, + { + "cursor": "#31738fff", + "background": "#31738fff", + "selection": "#31738f3d" + }, + { + "cursor": "#ea6e92ff", + "background": "#ea6e92ff", + "selection": "#ea6e923d" + }, + { + "cursor": "#f6c177ff", + "background": "#f6c177ff", + "selection": "#f6c1773d" + }, + { + "cursor": "#5cc1a3ff", + "background": "#5cc1a3ff", + "selection": "#5cc1a33d" + } + ], + "syntax": { + "attribute": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#ebbcbaff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#6e6a86ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#76728fff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#5cc1a3ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#ebbcbaff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#ebbcbaff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#5e768cff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#30738fff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#9ccfd8ff", + "font_style": "normal", + "font_weight": null + }, + "link_uri": { + "color": "#ebbcbaff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#5cc1a3ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#30738fff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#556b81ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#908caaff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#9d99b6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#9d99b6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#9d99b6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#9d99b6ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#f6c177ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#76728fff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#9ccfd8ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f6c177ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#9ccfd8ff", + "font_style": null, + "font_weight": null + }, + "type.builtin": { + "color": "#9ccfd8ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Rosé Pine Dawn", + "appearance": "light", + "style": { + "border": "#dcd6d5ff", + "border.variant": "#e5e0dfff", + "border.focused": "#c3d7dbff", + "border.selected": "#c3d7dbff", + "border.transparent": "#00000000", + "border.disabled": "#d0cccfff", + "elevated_surface.background": "#fef9f2ff", + "surface.background": "#fef9f2ff", + "background": "#dcd8d8ff", + "element.background": "#fef9f2ff", + "element.hover": "#e5e0dfff", + "element.active": "#dad4d3ff", + "element.selected": "#dad4d3ff", + "element.disabled": "#fef9f2ff", + "drop_target.background": "#706c8c80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#e5e0dfff", + "ghost_element.active": "#dad4d3ff", + "ghost_element.selected": "#dad4d3ff", + "ghost_element.disabled": "#fef9f2ff", + "text": "#575279ff", + "text.muted": "#706c8cff", + "text.placeholder": "#938fa3ff", + "text.disabled": "#938fa3ff", + "text.accent": "#57949fff", + "icon": "#575279ff", + "icon.muted": "#706c8cff", + "icon.disabled": "#938fa3ff", + "icon.placeholder": "#706c8cff", + "icon.accent": "#57949fff", + "status_bar.background": "#dcd8d8ff", + "title_bar.background": "#dcd8d8ff", + "toolbar.background": "#faf4edff", + "tab_bar.background": "#fef9f2ff", + "tab.inactive_background": "#fef9f2ff", + "tab.active_background": "#faf4edff", + "search.match_background": "#9cced766", + "panel.background": "#fef9f2ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#5752794c", + "scrollbar.thumb.hover_background": "#e5e0dfff", + "scrollbar.thumb.border": "#e5e0dfff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#fdf8f1ff", + "editor.foreground": "#575279ff", + "editor.background": "#faf4edff", + "editor.gutter.background": "#faf4edff", + "editor.subheader.background": "#fef9f2ff", + "editor.active_line.background": "#fef9f2bf", + "editor.highlighted_line.background": "#fef9f2ff", + "editor.line_number": "#57527959", + "editor.active_line_number": "#575279ff", + "editor.invisible": "#9691a4ff", + "editor.wrap_guide": "#5752790d", + "editor.active_wrap_guide": "#5752791a", + "editor.document_highlight.read_background": "#57949f1a", + "editor.document_highlight.write_background": "#9691a466", + "terminal.background": "#faf4edff", + "terminal.foreground": "#575279ff", + "terminal.bright_foreground": "#575279ff", + "terminal.dim_foreground": "#faf4edff", + "terminal.ansi.black": "#faf4edff", + "terminal.ansi.bright_black": "#b8b2baff", + "terminal.ansi.dim_black": "#575279ff", + "terminal.ansi.red": "#b4647aff", + "terminal.ansi.bright_red": "#dcb0bbff", + "terminal.ansi.dim_red": "#57333dff", + "terminal.ansi.green": "#3daa8eff", + "terminal.ansi.bright_green": "#a5d5c5ff", + "terminal.ansi.dim_green": "#265245ff", + "terminal.ansi.yellow": "#e99d35ff", + "terminal.ansi.bright_yellow": "#fccd9bff", + "terminal.ansi.dim_yellow": "#854a1eff", + "terminal.ansi.blue": "#57949fff", + "terminal.ansi.bright_blue": "#acc9ceff", + "terminal.ansi.dim_blue": "#2f484dff", + "terminal.ansi.magenta": "#7c697fff", + "terminal.ansi.bright_magenta": "#bcb1bdff", + "terminal.ansi.dim_magenta": "#3e353fff", + "terminal.ansi.cyan": "#296983ff", + "terminal.ansi.bright_cyan": "#97b1c0ff", + "terminal.ansi.dim_cyan": "#1b3541ff", + "terminal.ansi.white": "#575279ff", + "terminal.ansi.bright_white": "#575279ff", + "terminal.ansi.dim_white": "#827e98ff", + "link_text.hover": "#57949fff", + "conflict": "#e99d35ff", + "conflict.background": "#ffebd6ff", + "conflict.border": "#ffdab7ff", + "created": "#3daa8eff", + "created.background": "#dbeee7ff", + "created.border": "#bee0d5ff", + "deleted": "#b4647aff", + "deleted.background": "#f1dfe3ff", + "deleted.border": "#e6c6cdff", + "error": "#b4647aff", + "error.background": "#f1dfe3ff", + "error.border": "#e6c6cdff", + "hidden": "#938fa3ff", + "hidden.background": "#dcd8d8ff", + "hidden.border": "#d0cccfff", + "hint": "#7a92aaff", + "hint.background": "#dde9ebff", + "hint.border": "#c3d7dbff", + "ignored": "#938fa3ff", + "ignored.background": "#dcd8d8ff", + "ignored.border": "#dcd6d5ff", + "info": "#57949fff", + "info.background": "#dde9ebff", + "info.border": "#c3d7dbff", + "modified": "#e99d35ff", + "modified.background": "#ffebd6ff", + "modified.border": "#ffdab7ff", + "predictive": "#a2acbeff", + "predictive.background": "#dbeee7ff", + "predictive.border": "#bee0d5ff", + "renamed": "#57949fff", + "renamed.background": "#dde9ebff", + "renamed.border": "#c3d7dbff", + "success": "#3daa8eff", + "success.background": "#dbeee7ff", + "success.border": "#bee0d5ff", + "unreachable": "#706c8cff", + "unreachable.background": "#dcd8d8ff", + "unreachable.border": "#dcd6d5ff", + "warning": "#e99d35ff", + "warning.background": "#ffebd6ff", + "warning.border": "#ffdab7ff", + "players": [ + { + "cursor": "#57949fff", + "background": "#57949fff", + "selection": "#57949f3d" + }, + { + "cursor": "#7c697fff", + "background": "#7c697fff", + "selection": "#7c697f3d" + }, + { + "cursor": "#9079a9ff", + "background": "#9079a9ff", + "selection": "#9079a93d" + }, + { + "cursor": "#9079a9ff", + "background": "#9079a9ff", + "selection": "#9079a93d" + }, + { + "cursor": "#296983ff", + "background": "#296983ff", + "selection": "#2969833d" + }, + { + "cursor": "#b4647aff", + "background": "#b4647aff", + "selection": "#b4647a3d" + }, + { + "cursor": "#e99d35ff", + "background": "#e99d35ff", + "selection": "#e99d353d" + }, + { + "cursor": "#3daa8eff", + "background": "#3daa8eff", + "selection": "#3daa8e3d" + } + ], + "syntax": { + "attribute": { + "color": "#57949fff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#d7827dff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#9893a5ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#6e6a8bff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#3daa8eff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#57949fff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#575279ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#57949fff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#57949fff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#9079a9ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#d7827dff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#d7827dff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#7a92aaff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#276983ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#57949fff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#55949fff", + "font_style": "normal", + "font_weight": null + }, + "link_uri": { + "color": "#d7827dff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#3daa8eff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#276983ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#a2acbeff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#575279ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#575279ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#57949fff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#797593ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#635e82ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#635e82ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#635e82ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#635e82ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#ea9d34ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#6e6a8bff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#9079a9ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#9079a9ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#9079a9ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#55949fff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#9079a9ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#ea9d34ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#55949fff", + "font_style": null, + "font_weight": null + }, + "type.builtin": { + "color": "#55949fff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#575279ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#57949fff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Rosé Pine Moon", + "appearance": "dark", + "style": { + "border": "#504c68ff", + "border.variant": "#322f48ff", + "border.focused": "#435255ff", + "border.selected": "#435255ff", + "border.transparent": "#00000000", + "border.disabled": "#44415bff", + "elevated_surface.background": "#28253cff", + "surface.background": "#28253cff", + "background": "#38354eff", + "element.background": "#28253cff", + "element.hover": "#322f48ff", + "element.active": "#4f4b66ff", + "element.selected": "#4f4b66ff", + "element.disabled": "#28253cff", + "drop_target.background": "#85819e80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#322f48ff", + "ghost_element.active": "#4f4b66ff", + "ghost_element.selected": "#4f4b66ff", + "ghost_element.disabled": "#28253cff", + "text": "#e0def4ff", + "text.muted": "#85819eff", + "text.placeholder": "#605d7aff", + "text.disabled": "#605d7aff", + "text.accent": "#9bced6ff", + "icon": "#e0def4ff", + "icon.muted": "#85819eff", + "icon.disabled": "#605d7aff", + "icon.placeholder": "#85819eff", + "icon.accent": "#9bced6ff", + "status_bar.background": "#38354eff", + "title_bar.background": "#38354eff", + "toolbar.background": "#232136ff", + "tab_bar.background": "#28253cff", + "tab.inactive_background": "#28253cff", + "tab.active_background": "#232136ff", + "search.match_background": "#9cced766", + "panel.background": "#28253cff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#e0def44c", + "scrollbar.thumb.hover_background": "#322f48ff", + "scrollbar.thumb.border": "#322f48ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#27243bff", + "editor.foreground": "#e0def4ff", + "editor.background": "#232136ff", + "editor.gutter.background": "#232136ff", + "editor.subheader.background": "#28253cff", + "editor.active_line.background": "#28253cbf", + "editor.highlighted_line.background": "#28253cff", + "editor.line_number": "#e0def459", + "editor.active_line_number": "#e0def4ff", + "editor.invisible": "#595571ff", + "editor.wrap_guide": "#e0def40d", + "editor.active_wrap_guide": "#e0def41a", + "editor.document_highlight.read_background": "#9bced61a", + "editor.document_highlight.write_background": "#59557166", + "terminal.background": "#232136ff", + "terminal.foreground": "#e0def4ff", + "terminal.bright_foreground": "#e0def4ff", + "terminal.dim_foreground": "#232136ff", + "terminal.ansi.black": "#232136ff", + "terminal.ansi.bright_black": "#3f3b58ff", + "terminal.ansi.dim_black": "#e0def4ff", + "terminal.ansi.red": "#ea6e92ff", + "terminal.ansi.bright_red": "#7e3647ff", + "terminal.ansi.dim_red": "#fab9c6ff", + "terminal.ansi.green": "#5cc1a3ff", + "terminal.ansi.bright_green": "#31614fff", + "terminal.ansi.dim_green": "#b3e1d1ff", + "terminal.ansi.yellow": "#f6c177ff", + "terminal.ansi.bright_yellow": "#8a643aff", + "terminal.ansi.dim_yellow": "#fedfbbff", + "terminal.ansi.blue": "#9bced6ff", + "terminal.ansi.bright_blue": "#566b70ff", + "terminal.ansi.dim_blue": "#cfe7ebff", + "terminal.ansi.magenta": "#a683a0ff", + "terminal.ansi.bright_magenta": "#51414eff", + "terminal.ansi.dim_magenta": "#d2bfceff", + "terminal.ansi.cyan": "#3e8fb0ff", + "terminal.ansi.bright_cyan": "#264654ff", + "terminal.ansi.dim_cyan": "#a5c5d7ff", + "terminal.ansi.white": "#e0def4ff", + "terminal.ansi.bright_white": "#e0def4ff", + "terminal.ansi.dim_white": "#74708dff", + "link_text.hover": "#9bced6ff", + "conflict": "#f6c177ff", + "conflict.background": "#50331aff", + "conflict.border": "#6d4d2bff", + "created": "#5cc1a3ff", + "created.background": "#182d23ff", + "created.border": "#254839ff", + "deleted": "#ea6e92ff", + "deleted.background": "#431720ff", + "deleted.border": "#612834ff", + "error": "#ea6e92ff", + "error.background": "#431720ff", + "error.border": "#612834ff", + "hidden": "#605d7aff", + "hidden.background": "#38354eff", + "hidden.border": "#44415bff", + "hint": "#728aa2ff", + "hint.background": "#2f3639ff", + "hint.border": "#435255ff", + "ignored": "#605d7aff", + "ignored.background": "#38354eff", + "ignored.border": "#504c68ff", + "info": "#9bced6ff", + "info.background": "#2f3639ff", + "info.border": "#435255ff", + "modified": "#f6c177ff", + "modified.background": "#50331aff", + "modified.border": "#6d4d2bff", + "predictive": "#516b83ff", + "predictive.background": "#182d23ff", + "predictive.border": "#254839ff", + "renamed": "#9bced6ff", + "renamed.background": "#2f3639ff", + "renamed.border": "#435255ff", + "success": "#5cc1a3ff", + "success.background": "#182d23ff", + "success.border": "#254839ff", + "unreachable": "#85819eff", + "unreachable.background": "#38354eff", + "unreachable.border": "#504c68ff", + "warning": "#f6c177ff", + "warning.background": "#50331aff", + "warning.border": "#6d4d2bff", + "players": [ + { + "cursor": "#9bced6ff", + "background": "#9bced6ff", + "selection": "#9bced63d" + }, + { + "cursor": "#a683a0ff", + "background": "#a683a0ff", + "selection": "#a683a03d" + }, + { + "cursor": "#c4a7e6ff", + "background": "#c4a7e6ff", + "selection": "#c4a7e63d" + }, + { + "cursor": "#c4a7e6ff", + "background": "#c4a7e6ff", + "selection": "#c4a7e63d" + }, + { + "cursor": "#3e8fb0ff", + "background": "#3e8fb0ff", + "selection": "#3e8fb03d" + }, + { + "cursor": "#ea6e92ff", + "background": "#ea6e92ff", + "selection": "#ea6e923d" + }, + { + "cursor": "#f6c177ff", + "background": "#f6c177ff", + "selection": "#f6c1773d" + }, + { + "cursor": "#5cc1a3ff", + "background": "#5cc1a3ff", + "selection": "#5cc1a33d" + } + ], + "syntax": { + "attribute": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#ea9a97ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#6e6a86ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#8682a0ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#5cc1a3ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#ea9a97ff", + "font_style": null, + "font_weight": null + }, + "function.method": { + "color": "#ea9a97ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#728aa2ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#3d8fb0ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#9ccfd8ff", + "font_style": "normal", + "font_weight": null + }, + "link_uri": { + "color": "#ea9a97ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#5cc1a3ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#3d8fb0ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#516b83ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#908caaff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#aeabc6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#aeabc6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#aeabc6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#aeabc6ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#f6c177ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#8682a0ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#9ccfd8ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#c4a7e6ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f6c177ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#9ccfd8ff", + "font_style": null, + "font_weight": null + }, + "type.builtin": { + "color": "#9ccfd8ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#e0def4ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#9bced6ff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/sandcastle/LICENSE b/assets/themes/sandcastle/LICENSE new file mode 100644 index 0000000..ba6559d --- /dev/null +++ b/assets/themes/sandcastle/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 George Essig + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/sandcastle/sandcastle.json b/assets/themes/sandcastle/sandcastle.json new file mode 100644 index 0000000..83f8ef6 --- /dev/null +++ b/assets/themes/sandcastle/sandcastle.json @@ -0,0 +1,375 @@ +{ + "name": "Sandcastle", + "author": "Zed Industries", + "themes": [ + { + "name": "Sandcastle", + "appearance": "dark", + "style": { + "border": "#3d4350ff", + "border.variant": "#313741ff", + "border.focused": "#223131ff", + "border.selected": "#223131ff", + "border.transparent": "#00000000", + "border.disabled": "#393f4aff", + "elevated_surface.background": "#2b3038ff", + "surface.background": "#2b3038ff", + "background": "#333944ff", + "element.background": "#2b3038ff", + "element.hover": "#313741ff", + "element.active": "#3d4350ff", + "element.selected": "#3d4350ff", + "element.disabled": "#2b3038ff", + "drop_target.background": "#a6978280", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#313741ff", + "ghost_element.active": "#3d4350ff", + "ghost_element.selected": "#3d4350ff", + "ghost_element.disabled": "#2b3038ff", + "text": "#fdf4c1ff", + "text.muted": "#a69782ff", + "text.placeholder": "#827568ff", + "text.disabled": "#827568ff", + "text.accent": "#518b8bff", + "icon": "#fdf4c1ff", + "icon.muted": "#a69782ff", + "icon.disabled": "#827568ff", + "icon.placeholder": "#a69782ff", + "icon.accent": "#518b8bff", + "status_bar.background": "#333944ff", + "title_bar.background": "#333944ff", + "toolbar.background": "#282c33ff", + "tab_bar.background": "#2b3038ff", + "tab.inactive_background": "#2b3038ff", + "tab.active_background": "#282c33ff", + "search.match_background": "#528b8b66", + "panel.background": "#2b3038ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#fdf4c14c", + "scrollbar.thumb.hover_background": "#313741ff", + "scrollbar.thumb.border": "#313741ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#2a2f38ff", + "editor.foreground": "#fdf4c1ff", + "editor.background": "#282c33ff", + "editor.gutter.background": "#282c33ff", + "editor.subheader.background": "#2b3038ff", + "editor.active_line.background": "#2b3038bf", + "editor.highlighted_line.background": "#2b3038ff", + "editor.line_number": "#fdf4c159", + "editor.active_line_number": "#fdf4c1ff", + "editor.invisible": "#7c6f64ff", + "editor.wrap_guide": "#fdf4c10d", + "editor.active_wrap_guide": "#fdf4c11a", + "editor.document_highlight.read_background": "#518b8b1a", + "editor.document_highlight.write_background": "#7c6f6466", + "terminal.background": "#282c33ff", + "terminal.foreground": "#fdf4c1ff", + "terminal.bright_foreground": "#fdf4c1ff", + "terminal.dim_foreground": "#282c33ff", + "terminal.ansi.black": "#282c33ff", + "terminal.ansi.bright_black": "#5e5753ff", + "terminal.ansi.dim_black": "#fdf4c1ff", + "terminal.ansi.red": "#b3627aff", + "terminal.ansi.bright_red": "#57333dff", + "terminal.ansi.dim_red": "#dcb0bbff", + "terminal.ansi.green": "#83a598ff", + "terminal.ansi.bright_green": "#414f4aff", + "terminal.ansi.dim_green": "#c0d2cbff", + "terminal.ansi.yellow": "#a07d3aff", + "terminal.ansi.bright_yellow": "#4e3f22ff", + "terminal.ansi.dim_yellow": "#d3bd9aff", + "terminal.ansi.blue": "#518b8bff", + "terminal.ansi.bright_blue": "#2c4444ff", + "terminal.ansi.dim_blue": "#a8c4c4ff", + "terminal.ansi.magenta": "#a87222ff", + "terminal.ansi.bright_magenta": "#523918ff", + "terminal.ansi.dim_magenta": "#dab78eff", + "terminal.ansi.cyan": "#83a598ff", + "terminal.ansi.bright_cyan": "#414f4aff", + "terminal.ansi.dim_cyan": "#c0d2cbff", + "terminal.ansi.white": "#fdf4c1ff", + "terminal.ansi.bright_white": "#fdf4c1ff", + "terminal.ansi.dim_white": "#958776ff", + "link_text.hover": "#518b8bff", + "conflict": "#a07d3aff", + "conflict.background": "#231d12ff", + "conflict.border": "#392e19ff", + "created": "#83a598ff", + "created.background": "#1e2321ff", + "created.border": "#303a36ff", + "deleted": "#b3627aff", + "deleted.background": "#26191cff", + "deleted.border": "#3e272dff", + "error": "#b3627aff", + "error.background": "#26191cff", + "error.border": "#3e272dff", + "hidden": "#827568ff", + "hidden.background": "#333944ff", + "hidden.border": "#393f4aff", + "hint": "#727d68ff", + "hint.background": "#171e1eff", + "hint.border": "#223131ff", + "ignored": "#827568ff", + "ignored.background": "#333944ff", + "ignored.border": "#3d4350ff", + "info": "#518b8bff", + "info.background": "#171e1eff", + "info.border": "#223131ff", + "modified": "#a07d3aff", + "modified.background": "#231d12ff", + "modified.border": "#392e19ff", + "predictive": "#5c6152ff", + "predictive.background": "#1e2321ff", + "predictive.border": "#303a36ff", + "renamed": "#518b8bff", + "renamed.background": "#171e1eff", + "renamed.border": "#223131ff", + "success": "#83a598ff", + "success.background": "#1e2321ff", + "success.border": "#303a36ff", + "unreachable": "#a69782ff", + "unreachable.background": "#333944ff", + "unreachable.border": "#3d4350ff", + "warning": "#a07d3aff", + "warning.background": "#231d12ff", + "warning.border": "#392e19ff", + "players": [ + { + "cursor": "#518b8bff", + "background": "#518b8bff", + "selection": "#518b8b3d" + }, + { + "cursor": "#a87222ff", + "background": "#a87222ff", + "selection": "#a872223d" + }, + { + "cursor": "#a07d3aff", + "background": "#a07d3aff", + "selection": "#a07d3a3d" + }, + { + "cursor": "#d75f5fff", + "background": "#d75f5fff", + "selection": "#d75f5f3d" + }, + { + "cursor": "#83a598ff", + "background": "#83a598ff", + "selection": "#83a5983d" + }, + { + "cursor": "#b3627aff", + "background": "#b3627aff", + "selection": "#b3627a3d" + }, + { + "cursor": "#a07d3aff", + "background": "#a07d3aff", + "selection": "#a07d3a3d" + }, + { + "cursor": "#83a598ff", + "background": "#83a598ff", + "selection": "#83a5983d" + } + ], + "syntax": { + "attribute": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#fdf4c1ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#518b8bff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#727d68ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#a07d3aff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#5c6152ff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#fdf4c1ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#fdf4c1ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#d5c5a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#d5c5a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#d5c5a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#d5c5a1ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#d5c5a1ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#a89984ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#a07d3aff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#fdf4c1ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#83a598ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#fdf4c1ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#518b8bff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/solarized/LICENSE b/assets/themes/solarized/LICENSE new file mode 100644 index 0000000..2b5ddc4 --- /dev/null +++ b/assets/themes/solarized/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2011 Ethan Schoonover + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/solarized/solarized.json b/assets/themes/solarized/solarized.json new file mode 100644 index 0000000..53a4441 --- /dev/null +++ b/assets/themes/solarized/solarized.json @@ -0,0 +1,744 @@ +{ + "name": "Solarized", + "author": "Zed Industries", + "themes": [ + { + "name": "Solarized Dark", + "appearance": "dark", + "style": { + "border": "#2b4e58ff", + "border.variant": "#053541ff", + "border.focused": "#1b3149ff", + "border.selected": "#1b3149ff", + "border.transparent": "#00000000", + "border.disabled": "#19424dff", + "elevated_surface.background": "#04313bff", + "surface.background": "#04313bff", + "background": "#073743ff", + "element.background": "#04313bff", + "element.hover": "#053541ff", + "element.active": "#294d58ff", + "element.selected": "#294d58ff", + "element.disabled": "#04313bff", + "drop_target.background": "#93a1a180", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#053541ff", + "ghost_element.active": "#294d58ff", + "ghost_element.selected": "#294d58ff", + "ghost_element.disabled": "#04313bff", + "text": "#fdf6e3ff", + "text.muted": "#93a1a1ff", + "text.placeholder": "#6f8389ff", + "text.disabled": "#6f8389ff", + "text.accent": "#278ad1ff", + "icon": "#fdf6e3ff", + "icon.muted": "#93a1a1ff", + "icon.disabled": "#6f8389ff", + "icon.placeholder": "#93a1a1ff", + "icon.accent": "#278ad1ff", + "status_bar.background": "#073743ff", + "title_bar.background": "#073743ff", + "toolbar.background": "#002a35ff", + "tab_bar.background": "#04313bff", + "tab.inactive_background": "#04313bff", + "tab.active_background": "#002a35ff", + "search.match_background": "#288bd166", + "panel.background": "#04313bff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#fdf6e34c", + "scrollbar.thumb.hover_background": "#053541ff", + "scrollbar.thumb.border": "#053541ff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#022f3bff", + "editor.foreground": "#fdf6e3ff", + "editor.background": "#002a35ff", + "editor.gutter.background": "#002a35ff", + "editor.subheader.background": "#04313bff", + "editor.active_line.background": "#04313bbf", + "editor.highlighted_line.background": "#04313bff", + "editor.line_number": "#fdf6e359", + "editor.active_line_number": "#fdf6e3ff", + "editor.invisible": "#6c8287ff", + "editor.wrap_guide": "#fdf6e30d", + "editor.active_wrap_guide": "#fdf6e31a", + "editor.document_highlight.read_background": "#278ad11a", + "editor.document_highlight.write_background": "#6c828766", + "terminal.background": "#002a35ff", + "terminal.foreground": "#fdf6e3ff", + "terminal.bright_foreground": "#fdf6e3ff", + "terminal.dim_foreground": "#002a35ff", + "terminal.ansi.black": "#002a35ff", + "terminal.ansi.bright_black": "#5c7279ff", + "terminal.ansi.dim_black": "#fdf6e3ff", + "terminal.ansi.red": "#dc3330ff", + "terminal.ansi.bright_red": "#7d181cff", + "terminal.ansi.dim_red": "#faa091ff", + "terminal.ansi.green": "#849903ff", + "terminal.ansi.bright_green": "#434a10ff", + "terminal.ansi.dim_green": "#c6cb8bff", + "terminal.ansi.yellow": "#b58902ff", + "terminal.ansi.bright_yellow": "#5d430fff", + "terminal.ansi.dim_yellow": "#e0c189ff", + "terminal.ansi.blue": "#278ad1ff", + "terminal.ansi.bright_blue": "#214365ff", + "terminal.ansi.dim_blue": "#a5c3e9ff", + "terminal.ansi.magenta": "#d33781ff", + "terminal.ansi.bright_magenta": "#6f1f3fff", + "terminal.ansi.dim_magenta": "#f0a2beff", + "terminal.ansi.cyan": "#2ba198ff", + "terminal.ansi.bright_cyan": "#204e4aff", + "terminal.ansi.dim_cyan": "#9fd0cbff", + "terminal.ansi.white": "#fdf6e3ff", + "terminal.ansi.bright_white": "#fdf6e3ff", + "terminal.ansi.dim_white": "#7b8e91ff", + "link_text.hover": "#278ad1ff", + "conflict": "#b58902ff", + "conflict.background": "#2e1d0cff", + "conflict.border": "#47300fff", + "created": "#849903ff", + "created.background": "#1e210cff", + "created.border": "#313510ff", + "deleted": "#dc3330ff", + "deleted.background": "#4a080eff", + "deleted.border": "#641015ff", + "error": "#dc3330ff", + "error.background": "#4a080eff", + "error.border": "#641015ff", + "hidden": "#6f8389ff", + "hidden.background": "#073743ff", + "hidden.border": "#19424dff", + "hint": "#4f8297ff", + "hint.background": "#141f2cff", + "hint.border": "#1b3149ff", + "ignored": "#6f8389ff", + "ignored.background": "#073743ff", + "ignored.border": "#2b4e58ff", + "info": "#278ad1ff", + "info.background": "#141f2cff", + "info.border": "#1b3149ff", + "modified": "#b58902ff", + "modified.background": "#2e1d0cff", + "modified.border": "#47300fff", + "predictive": "#3f718bff", + "predictive.background": "#1e210cff", + "predictive.border": "#313510ff", + "renamed": "#278ad1ff", + "renamed.background": "#141f2cff", + "renamed.border": "#1b3149ff", + "success": "#849903ff", + "success.background": "#1e210cff", + "success.border": "#313510ff", + "unreachable": "#93a1a1ff", + "unreachable.background": "#073743ff", + "unreachable.border": "#2b4e58ff", + "warning": "#b58902ff", + "warning.background": "#2e1d0cff", + "warning.border": "#47300fff", + "players": [ + { + "cursor": "#278ad1ff", + "background": "#278ad1ff", + "selection": "#278ad13d" + }, + { + "cursor": "#d33781ff", + "background": "#d33781ff", + "selection": "#d337813d" + }, + { + "cursor": "#cb4b16ff", + "background": "#cb4b16ff", + "selection": "#cb4b163d" + }, + { + "cursor": "#6c71c4ff", + "background": "#6c71c4ff", + "selection": "#6c71c43d" + }, + { + "cursor": "#2ba198ff", + "background": "#2ba198ff", + "selection": "#2ba1983d" + }, + { + "cursor": "#dc3330ff", + "background": "#dc3330ff", + "selection": "#dc33303d" + }, + { + "cursor": "#b58902ff", + "background": "#b58902ff", + "selection": "#b589023d" + }, + { + "cursor": "#849903ff", + "background": "#849903ff", + "selection": "#8499033d" + } + ], + "syntax": { + "attribute": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#99a5a4ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#99a5a4ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#fdf6e3ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#b58902ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#4f8297ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#cb4b16ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#3f718bff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#fdf6e3ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#fdf6e3ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#efe9d6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#efe9d6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#efe9d6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#efe9d6ff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#efe9d6ff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#99a5a4ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#cb4b16ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#fdf6e3ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#2ba198ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#fdf6e3ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#278ad1ff", + "font_style": null, + "font_weight": null + } + } + } + }, + { + "name": "Solarized Light", + "appearance": "light", + "style": { + "border": "#9faaa8ff", + "border.variant": "#dcdacbff", + "border.focused": "#bfd3efff", + "border.selected": "#bfd3efff", + "border.transparent": "#00000000", + "border.disabled": "#b6bcb5ff", + "elevated_surface.background": "#f3eddaff", + "surface.background": "#f3eddaff", + "background": "#cfd0c4ff", + "element.background": "#f3eddaff", + "element.hover": "#dcdacbff", + "element.active": "#a2aca9ff", + "element.selected": "#a2aca9ff", + "element.disabled": "#f3eddaff", + "drop_target.background": "#34555e80", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#dcdacbff", + "ghost_element.active": "#a2aca9ff", + "ghost_element.selected": "#a2aca9ff", + "ghost_element.disabled": "#f3eddaff", + "text": "#002a35ff", + "text.muted": "#34555eff", + "text.placeholder": "#6a7f86ff", + "text.disabled": "#6a7f86ff", + "text.accent": "#288bd1ff", + "icon": "#002a35ff", + "icon.muted": "#34555eff", + "icon.disabled": "#6a7f86ff", + "icon.placeholder": "#34555eff", + "icon.accent": "#288bd1ff", + "status_bar.background": "#cfd0c4ff", + "title_bar.background": "#cfd0c4ff", + "toolbar.background": "#fdf6e3ff", + "tab_bar.background": "#f3eddaff", + "tab.inactive_background": "#f3eddaff", + "tab.active_background": "#fdf6e3ff", + "search.match_background": "#298bd166", + "panel.background": "#f3eddaff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#002a354c", + "scrollbar.thumb.hover_background": "#dcdacbff", + "scrollbar.thumb.border": "#dcdacbff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#f5eedbff", + "editor.foreground": "#002a35ff", + "editor.background": "#fdf6e3ff", + "editor.gutter.background": "#fdf6e3ff", + "editor.subheader.background": "#f3eddaff", + "editor.active_line.background": "#f3eddabf", + "editor.highlighted_line.background": "#f3eddaff", + "editor.line_number": "#002a3559", + "editor.active_line_number": "#002a35ff", + "editor.invisible": "#6c8287ff", + "editor.wrap_guide": "#002a350d", + "editor.active_wrap_guide": "#002a351a", + "editor.document_highlight.read_background": "#288bd11a", + "editor.document_highlight.write_background": "#6c828766", + "terminal.background": "#fdf6e3ff", + "terminal.foreground": "#002a35ff", + "terminal.bright_foreground": "#002a35ff", + "terminal.dim_foreground": "#fdf6e3ff", + "terminal.ansi.black": "#fdf6e3ff", + "terminal.ansi.bright_black": "#7b8e91ff", + "terminal.ansi.dim_black": "#002a35ff", + "terminal.ansi.red": "#dc3330ff", + "terminal.ansi.bright_red": "#faa091ff", + "terminal.ansi.dim_red": "#7d181cff", + "terminal.ansi.green": "#849903ff", + "terminal.ansi.bright_green": "#c6cb8bff", + "terminal.ansi.dim_green": "#434a10ff", + "terminal.ansi.yellow": "#b58903ff", + "terminal.ansi.bright_yellow": "#e0c189ff", + "terminal.ansi.dim_yellow": "#5d430fff", + "terminal.ansi.blue": "#288bd1ff", + "terminal.ansi.bright_blue": "#a5c3e9ff", + "terminal.ansi.dim_blue": "#214365ff", + "terminal.ansi.magenta": "#d33781ff", + "terminal.ansi.bright_magenta": "#f0a2beff", + "terminal.ansi.dim_magenta": "#6f1f3fff", + "terminal.ansi.cyan": "#2ba198ff", + "terminal.ansi.bright_cyan": "#9fd0cbff", + "terminal.ansi.dim_cyan": "#204e4aff", + "terminal.ansi.white": "#002a35ff", + "terminal.ansi.bright_white": "#002a35ff", + "terminal.ansi.dim_white": "#5c7279ff", + "link_text.hover": "#288bd1ff", + "conflict": "#b58903ff", + "conflict.background": "#f5e6d0ff", + "conflict.border": "#ebd3aaff", + "created": "#849903ff", + "created.background": "#e9ead0ff", + "created.border": "#d6d9abff", + "deleted": "#dc3330ff", + "deleted.background": "#ffd9d2ff", + "deleted.border": "#ffbbafff", + "error": "#dc3330ff", + "error.background": "#ffd9d2ff", + "error.border": "#ffbbafff", + "hidden": "#6a7f86ff", + "hidden.background": "#cfd0c4ff", + "hidden.border": "#b6bcb5ff", + "hint": "#5789a3ff", + "hint.background": "#dbe6f6ff", + "hint.border": "#bfd3efff", + "ignored": "#6a7f86ff", + "ignored.background": "#cfd0c4ff", + "ignored.border": "#9faaa8ff", + "info": "#288bd1ff", + "info.background": "#dbe6f6ff", + "info.border": "#bfd3efff", + "modified": "#b58903ff", + "modified.background": "#f5e6d0ff", + "modified.border": "#ebd3aaff", + "predictive": "#679aafff", + "predictive.background": "#e9ead0ff", + "predictive.border": "#d6d9abff", + "renamed": "#288bd1ff", + "renamed.background": "#dbe6f6ff", + "renamed.border": "#bfd3efff", + "success": "#849903ff", + "success.background": "#e9ead0ff", + "success.border": "#d6d9abff", + "unreachable": "#34555eff", + "unreachable.background": "#cfd0c4ff", + "unreachable.border": "#9faaa8ff", + "warning": "#b58903ff", + "warning.background": "#f5e6d0ff", + "warning.border": "#ebd3aaff", + "players": [ + { + "cursor": "#288bd1ff", + "background": "#288bd1ff", + "selection": "#288bd13d" + }, + { + "cursor": "#d33781ff", + "background": "#d33781ff", + "selection": "#d337813d" + }, + { + "cursor": "#cb4b17ff", + "background": "#cb4b17ff", + "selection": "#cb4b173d" + }, + { + "cursor": "#6c71c3ff", + "background": "#6c71c3ff", + "selection": "#6c71c33d" + }, + { + "cursor": "#2ba198ff", + "background": "#2ba198ff", + "selection": "#2ba1983d" + }, + { + "cursor": "#dc3330ff", + "background": "#dc3330ff", + "selection": "#dc33303d" + }, + { + "cursor": "#b58903ff", + "background": "#b58903ff", + "selection": "#b589033d" + }, + { + "cursor": "#849903ff", + "background": "#849903ff", + "selection": "#8499033d" + } + ], + "syntax": { + "attribute": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#30525bff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#30525bff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#002a35ff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#b58903ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#5789a3ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#cb4b17ff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#849903ff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#679aafff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#002a35ff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#002a35ff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#04333eff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#04333eff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#04333eff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#04333eff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#04333eff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#30525bff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#cb4b17ff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#002a35ff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#2ba198ff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#002a35ff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#288bd1ff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/assets/themes/summercamp/LICENSE b/assets/themes/summercamp/LICENSE new file mode 100644 index 0000000..dd49a64 --- /dev/null +++ b/assets/themes/summercamp/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Zoe FiriH + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/assets/themes/summercamp/summercamp.json b/assets/themes/summercamp/summercamp.json new file mode 100644 index 0000000..35b101e --- /dev/null +++ b/assets/themes/summercamp/summercamp.json @@ -0,0 +1,375 @@ +{ + "name": "Summercamp", + "author": "Zed Industries", + "themes": [ + { + "name": "Summercamp", + "appearance": "dark", + "style": { + "border": "#302c21ff", + "border.variant": "#29251bff", + "border.focused": "#193760ff", + "border.selected": "#193760ff", + "border.transparent": "#00000000", + "border.disabled": "#2e2a1fff", + "elevated_surface.background": "#231f16ff", + "surface.background": "#231f16ff", + "background": "#2a261cff", + "element.background": "#231f16ff", + "element.hover": "#29251bff", + "element.active": "#2f2b20ff", + "element.selected": "#2f2b20ff", + "element.disabled": "#231f16ff", + "drop_target.background": "#736e5580", + "ghost_element.background": "#00000000", + "ghost_element.hover": "#29251bff", + "ghost_element.active": "#2f2b20ff", + "ghost_element.selected": "#2f2b20ff", + "ghost_element.disabled": "#231f16ff", + "text": "#f8f5deff", + "text.muted": "#736e55ff", + "text.placeholder": "#4c4735ff", + "text.disabled": "#4c4735ff", + "text.accent": "#499befff", + "icon": "#f8f5deff", + "icon.muted": "#736e55ff", + "icon.disabled": "#4c4735ff", + "icon.placeholder": "#736e55ff", + "icon.accent": "#499befff", + "status_bar.background": "#2a261cff", + "title_bar.background": "#2a261cff", + "toolbar.background": "#1b1810ff", + "tab_bar.background": "#231f16ff", + "tab.inactive_background": "#231f16ff", + "tab.active_background": "#1b1810ff", + "search.match_background": "#499bef66", + "panel.background": "#231f16ff", + "panel.focused_border": null, + "pane.focused_border": null, + "scrollbar.thumb.background": "#f8f5de4c", + "scrollbar.thumb.hover_background": "#29251bff", + "scrollbar.thumb.border": "#29251bff", + "scrollbar.track.background": "#00000000", + "scrollbar.track.border": "#221e15ff", + "editor.foreground": "#f8f5deff", + "editor.background": "#1b1810ff", + "editor.gutter.background": "#1b1810ff", + "editor.subheader.background": "#231f16ff", + "editor.active_line.background": "#231f16bf", + "editor.highlighted_line.background": "#231f16ff", + "editor.line_number": "#f8f5de59", + "editor.active_line_number": "#f8f5deff", + "editor.invisible": "#494433ff", + "editor.wrap_guide": "#f8f5de0d", + "editor.active_wrap_guide": "#f8f5de1a", + "editor.document_highlight.read_background": "#499bef1a", + "editor.document_highlight.write_background": "#49443366", + "terminal.background": "#1b1810ff", + "terminal.foreground": "#f8f5deff", + "terminal.bright_foreground": "#f8f5deff", + "terminal.dim_foreground": "#1b1810ff", + "terminal.ansi.black": "#1b1810ff", + "terminal.ansi.bright_black": "#3a3527ff", + "terminal.ansi.dim_black": "#f8f5deff", + "terminal.ansi.red": "#e35041ff", + "terminal.ansi.bright_red": "#7f2724ff", + "terminal.ansi.dim_red": "#faaa9bff", + "terminal.ansi.green": "#5dea5aff", + "terminal.ansi.bright_green": "#28842cff", + "terminal.ansi.dim_green": "#b9f7aeff", + "terminal.ansi.yellow": "#f1fe28ff", + "terminal.ansi.bright_yellow": "#8c9a0fff", + "terminal.ansi.dim_yellow": "#ffffa2ff", + "terminal.ansi.blue": "#499befff", + "terminal.ansi.bright_blue": "#234b7fff", + "terminal.ansi.dim_blue": "#b1ccf8ff", + "terminal.ansi.magenta": "#f59be6ff", + "terminal.ansi.bright_magenta": "#88487eff", + "terminal.ansi.dim_magenta": "#fccef3ff", + "terminal.ansi.cyan": "#5aeabbff", + "terminal.ansi.bright_cyan": "#288461ff", + "terminal.ansi.dim_cyan": "#b7f6ddff", + "terminal.ansi.white": "#f8f5deff", + "terminal.ansi.bright_white": "#f8f5deff", + "terminal.ansi.dim_white": "#57533fff", + "link_text.hover": "#499befff", + "conflict": "#f1fe28ff", + "conflict.background": "#546205ff", + "conflict.border": "#717f0aff", + "created": "#5dea5aff", + "created.background": "#094d12ff", + "created.border": "#1a6a20ff", + "deleted": "#e35041ff", + "deleted.background": "#490f12ff", + "deleted.border": "#651c1cff", + "error": "#e35041ff", + "error.background": "#490f12ff", + "error.border": "#651c1cff", + "hidden": "#4c4735ff", + "hidden.background": "#2a261cff", + "hidden.border": "#2e2a1fff", + "hint": "#246e61ff", + "hint.background": "#0e2242ff", + "hint.border": "#193760ff", + "ignored": "#4c4735ff", + "ignored.background": "#2a261cff", + "ignored.border": "#302c21ff", + "info": "#499befff", + "info.background": "#0e2242ff", + "info.border": "#193760ff", + "modified": "#f1fe28ff", + "modified.background": "#546205ff", + "modified.border": "#717f0aff", + "predictive": "#78434aff", + "predictive.background": "#094d12ff", + "predictive.border": "#1a6a20ff", + "renamed": "#499befff", + "renamed.background": "#0e2242ff", + "renamed.border": "#193760ff", + "success": "#5dea5aff", + "success.background": "#094d12ff", + "success.border": "#1a6a20ff", + "unreachable": "#736e55ff", + "unreachable.background": "#2a261cff", + "unreachable.border": "#302c21ff", + "warning": "#f1fe28ff", + "warning.background": "#546205ff", + "warning.border": "#717f0aff", + "players": [ + { + "cursor": "#499befff", + "background": "#499befff", + "selection": "#499bef3d" + }, + { + "cursor": "#f59be6ff", + "background": "#f59be6ff", + "selection": "#f59be63d" + }, + { + "cursor": "#faa11cff", + "background": "#faa11cff", + "selection": "#faa11c3d" + }, + { + "cursor": "#fe8080ff", + "background": "#fe8080ff", + "selection": "#fe80803d" + }, + { + "cursor": "#5aeabbff", + "background": "#5aeabbff", + "selection": "#5aeabb3d" + }, + { + "cursor": "#e35041ff", + "background": "#e35041ff", + "selection": "#e350413d" + }, + { + "cursor": "#f1fe28ff", + "background": "#f1fe28ff", + "selection": "#f1fe283d" + }, + { + "cursor": "#5dea5aff", + "background": "#5dea5aff", + "selection": "#5dea5a3d" + } + ], + "syntax": { + "attribute": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "boolean": { + "color": "#5dea5aff", + "font_style": null, + "font_weight": null + }, + "comment": { + "color": "#777159ff", + "font_style": null, + "font_weight": null + }, + "comment.doc": { + "color": "#777159ff", + "font_style": null, + "font_weight": null + }, + "constant": { + "color": "#5dea5aff", + "font_style": null, + "font_weight": null + }, + "constructor": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "embedded": { + "color": "#f8f5deff", + "font_style": null, + "font_weight": null + }, + "emphasis": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "emphasis.strong": { + "color": "#499befff", + "font_style": null, + "font_weight": 700 + }, + "enum": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "function": { + "color": "#f1fe28ff", + "font_style": null, + "font_weight": null + }, + "hint": { + "color": "#246e61ff", + "font_style": null, + "font_weight": 700 + }, + "keyword": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "label": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "link_text": { + "color": "#faa11cff", + "font_style": "italic", + "font_weight": null + }, + "link_uri": { + "color": "#5dea5aff", + "font_style": null, + "font_weight": null + }, + "number": { + "color": "#5dea5aff", + "font_style": null, + "font_weight": null + }, + "operator": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "predictive": { + "color": "#78434aff", + "font_style": "italic", + "font_weight": null + }, + "preproc": { + "color": "#f8f5deff", + "font_style": null, + "font_weight": null + }, + "primary": { + "color": "#f8f5deff", + "font_style": null, + "font_weight": null + }, + "property": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "punctuation": { + "color": "#bfbb9bff", + "font_style": null, + "font_weight": null + }, + "punctuation.bracket": { + "color": "#bfbb9bff", + "font_style": null, + "font_weight": null + }, + "punctuation.delimiter": { + "color": "#bfbb9bff", + "font_style": null, + "font_weight": null + }, + "punctuation.list_marker": { + "color": "#bfbb9bff", + "font_style": null, + "font_weight": null + }, + "punctuation.special": { + "color": "#bfbb9bff", + "font_style": null, + "font_weight": null + }, + "string": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "string.escape": { + "color": "#777159ff", + "font_style": null, + "font_weight": null + }, + "string.regex": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "string.special": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "string.special.symbol": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "tag": { + "color": "#499befff", + "font_style": null, + "font_weight": null + }, + "text.literal": { + "color": "#faa11cff", + "font_style": null, + "font_weight": null + }, + "title": { + "color": "#f8f5deff", + "font_style": null, + "font_weight": 700 + }, + "type": { + "color": "#5aeabbff", + "font_style": null, + "font_weight": null + }, + "variable": { + "color": "#f8f5deff", + "font_style": null, + "font_weight": null + }, + "variant": { + "color": "#499befff", + "font_style": null, + "font_weight": null + } + } + } + } + ] +} diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml new file mode 100644 index 0000000..9761a08 --- /dev/null +++ b/crates/activity_indicator/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "activity_indicator" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/activity_indicator.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +auto_update.workspace = true +editor.workspace = true +extension.workspace = true +futures.workspace = true +gpui.workspace = true +language.workspace = true +project.workspace = true +smallvec.workspace = true +ui.workspace = true +workspace.workspace = true + +[dev-dependencies] +editor = { workspace = true, features = ["test-support"] } diff --git a/crates/activity_indicator/LICENSE-GPL b/crates/activity_indicator/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/activity_indicator/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs new file mode 100644 index 0000000..b0ff763 --- /dev/null +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -0,0 +1,348 @@ +use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage}; +use editor::Editor; +use extension::ExtensionStore; +use futures::StreamExt; +use gpui::{ + actions, svg, AppContext, CursorStyle, EventEmitter, InteractiveElement as _, Model, + ParentElement as _, Render, SharedString, StatefulInteractiveElement, Styled, View, + ViewContext, VisualContext as _, +}; +use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; +use project::{LanguageServerProgress, Project}; +use smallvec::SmallVec; +use std::{cmp::Reverse, fmt::Write, sync::Arc}; +use ui::prelude::*; +use workspace::{item::ItemHandle, StatusItemView, Workspace}; + +actions!(activity_indicator, [ShowErrorMessage]); + +const DOWNLOAD_ICON: &str = "icons/download.svg"; +const WARNING_ICON: &str = "icons/warning.svg"; + +pub enum Event { + ShowError { lsp_name: Arc, error: String }, +} + +pub struct ActivityIndicator { + statuses: Vec, + project: Model, + auto_updater: Option>, +} + +struct LspStatus { + name: LanguageServerName, + status: LanguageServerBinaryStatus, +} + +struct PendingWork<'a> { + language_server_name: &'a str, + progress_token: &'a str, + progress: &'a LanguageServerProgress, +} + +#[derive(Default)] +struct Content { + icon: Option<&'static str>, + message: String, + on_click: Option)>>, +} + +impl ActivityIndicator { + pub fn new( + workspace: &mut Workspace, + languages: Arc, + cx: &mut ViewContext, + ) -> View { + let project = workspace.project().clone(); + let auto_updater = AutoUpdater::get(cx); + let this = cx.new_view(|cx: &mut ViewContext| { + let mut status_events = languages.language_server_binary_statuses(); + cx.spawn(|this, mut cx| async move { + while let Some((name, status)) = status_events.next().await { + this.update(&mut cx, |this, cx| { + this.statuses.retain(|s| s.name != name); + this.statuses.push(LspStatus { name, status }); + cx.notify(); + })?; + } + anyhow::Ok(()) + }) + .detach(); + cx.observe(&project, |_, _, cx| cx.notify()).detach(); + + if let Some(auto_updater) = auto_updater.as_ref() { + cx.observe(auto_updater, |_, _, cx| cx.notify()).detach(); + } + + Self { + statuses: Default::default(), + project: project.clone(), + auto_updater, + } + }); + + cx.subscribe(&this, move |_, _, event, cx| match event { + Event::ShowError { lsp_name, error } => { + let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx)); + let project = project.clone(); + let error = error.clone(); + let lsp_name = lsp_name.clone(); + cx.spawn(|workspace, mut cx| async move { + let buffer = create_buffer.await?; + buffer.update(&mut cx, |buffer, cx| { + buffer.edit( + [( + 0..0, + format!("Language server error: {}\n\n{}", lsp_name, error), + )], + None, + cx, + ); + })?; + workspace.update(&mut cx, |workspace, cx| { + workspace.add_item_to_active_pane( + Box::new(cx.new_view(|cx| { + Editor::for_buffer(buffer, Some(project.clone()), cx) + })), + None, + cx, + ); + })?; + + anyhow::Ok(()) + }) + .detach(); + } + }) + .detach(); + this + } + + fn show_error_message(&mut self, _: &ShowErrorMessage, cx: &mut ViewContext) { + self.statuses.retain(|status| { + if let LanguageServerBinaryStatus::Failed { error } = &status.status { + cx.emit(Event::ShowError { + lsp_name: status.name.0.clone(), + error: error.clone(), + }); + false + } else { + true + } + }); + + cx.notify(); + } + + fn dismiss_error_message(&mut self, _: &DismissErrorMessage, cx: &mut ViewContext) { + if let Some(updater) = &self.auto_updater { + updater.update(cx, |updater, cx| { + updater.dismiss_error(cx); + }); + } + cx.notify(); + } + + fn pending_language_server_work<'a>( + &self, + cx: &'a AppContext, + ) -> impl Iterator> { + self.project + .read(cx) + .language_server_statuses() + .rev() + .filter_map(|status| { + if status.pending_work.is_empty() { + None + } else { + let mut pending_work = status + .pending_work + .iter() + .map(|(token, progress)| PendingWork { + language_server_name: status.name.as_str(), + progress_token: token.as_str(), + progress, + }) + .collect::>(); + pending_work.sort_by_key(|work| Reverse(work.progress.last_update_at)); + Some(pending_work) + } + }) + .flatten() + } + + fn content_to_render(&mut self, cx: &mut ViewContext) -> Content { + // Show any language server has pending activity. + let mut pending_work = self.pending_language_server_work(cx); + if let Some(PendingWork { + language_server_name, + progress_token, + progress, + }) = pending_work.next() + { + let mut message = language_server_name.to_string(); + + message.push_str(": "); + if let Some(progress_message) = progress.message.as_ref() { + message.push_str(progress_message); + } else { + message.push_str(progress_token); + } + + if let Some(percentage) = progress.percentage { + write!(&mut message, " ({}%)", percentage).unwrap(); + } + + let additional_work_count = pending_work.count(); + if additional_work_count > 0 { + write!(&mut message, " + {} more", additional_work_count).unwrap(); + } + + return Content { + icon: None, + message, + on_click: None, + }; + } + + // Show any language server installation info. + let mut downloading = SmallVec::<[_; 3]>::new(); + let mut checking_for_update = SmallVec::<[_; 3]>::new(); + let mut failed = SmallVec::<[_; 3]>::new(); + for status in &self.statuses { + match status.status { + LanguageServerBinaryStatus::CheckingForUpdate => { + checking_for_update.push(status.name.0.as_ref()) + } + LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()), + LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()), + LanguageServerBinaryStatus::None => {} + } + } + + if !downloading.is_empty() { + return Content { + icon: Some(DOWNLOAD_ICON), + message: format!("Downloading {}...", downloading.join(", "),), + on_click: None, + }; + } + + if !checking_for_update.is_empty() { + return Content { + icon: Some(DOWNLOAD_ICON), + message: format!( + "Checking for updates to {}...", + checking_for_update.join(", "), + ), + on_click: None, + }; + } + + if !failed.is_empty() { + return Content { + icon: Some(WARNING_ICON), + message: format!( + "Failed to download {}. Click to show error.", + failed.join(", "), + ), + on_click: Some(Arc::new(|this, cx| { + this.show_error_message(&Default::default(), cx) + })), + }; + } + + // Show any formatting failure + if let Some(failure) = self.project.read(cx).last_formatting_failure() { + return Content { + icon: Some(WARNING_ICON), + message: format!("Formatting failed: {}. Click to see logs.", failure), + on_click: Some(Arc::new(|_, cx| { + cx.dispatch_action(Box::new(workspace::OpenLog)); + })), + }; + } + + // Show any application auto-update info. + if let Some(updater) = &self.auto_updater { + return match &updater.read(cx).status() { + AutoUpdateStatus::Checking => Content { + icon: Some(DOWNLOAD_ICON), + message: "Checking for Zed updates…".to_string(), + on_click: None, + }, + AutoUpdateStatus::Downloading => Content { + icon: Some(DOWNLOAD_ICON), + message: "Downloading Zed update…".to_string(), + on_click: None, + }, + AutoUpdateStatus::Installing => Content { + icon: Some(DOWNLOAD_ICON), + message: "Installing Zed update…".to_string(), + on_click: None, + }, + AutoUpdateStatus::Updated { binary_path } => Content { + icon: None, + message: "Click to restart and update Zed".to_string(), + on_click: Some(Arc::new({ + let restart = workspace::Restart { + binary_path: Some(binary_path.clone()), + }; + move |_, cx| workspace::restart(&restart, cx) + })), + }, + AutoUpdateStatus::Errored => Content { + icon: Some(WARNING_ICON), + message: "Auto update failed".to_string(), + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&Default::default(), cx) + })), + }, + AutoUpdateStatus::Idle => Default::default(), + }; + } + + if let Some(extension_store) = + ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx)) + { + if let Some(extension_id) = extension_store.outstanding_operations().keys().next() { + return Content { + icon: Some(DOWNLOAD_ICON), + message: format!("Updating {extension_id} extension…"), + on_click: None, + }; + } + } + + Default::default() + } +} + +impl EventEmitter for ActivityIndicator {} + +impl Render for ActivityIndicator { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let content = self.content_to_render(cx); + + let mut result = h_flex() + .id("activity-indicator") + .on_action(cx.listener(Self::show_error_message)) + .on_action(cx.listener(Self::dismiss_error_message)); + + if let Some(on_click) = content.on_click { + result = result + .cursor(CursorStyle::PointingHand) + .on_click(cx.listener(move |this, _, cx| { + on_click(this, cx); + })) + } + + result + .children(content.icon.map(|icon| svg().path(icon))) + .child(Label::new(SharedString::from(content.message)).size(LabelSize::Small)) + } +} + +impl StatusItemView for ActivityIndicator { + fn set_active_pane_item(&mut self, _: Option<&dyn ItemHandle>, _: &mut ViewContext) {} +} diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml new file mode 100644 index 0000000..484a9b3 --- /dev/null +++ b/crates/anthropic/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "anthropic" +version = "0.1.0" +edition = "2021" +publish = false +license = "AGPL-3.0-or-later" + +[features] +default = [] +schemars = ["dep:schemars"] + +[lints] +workspace = true + +[lib] +path = "src/anthropic.rs" + +[dependencies] +anyhow.workspace = true +futures.workspace = true +http.workspace = true +isahc.workspace = true +schemars = { workspace = true, optional = true } +serde.workspace = true +serde_json.workspace = true + +[dev-dependencies] +tokio.workspace = true diff --git a/crates/anthropic/LICENSE-AGPL b/crates/anthropic/LICENSE-AGPL new file mode 100644 index 0000000..5f5cf25 --- /dev/null +++ b/crates/anthropic/LICENSE-AGPL @@ -0,0 +1 @@ +../../LICENSE-AGPL \ No newline at end of file diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs new file mode 100644 index 0000000..65df4e7 --- /dev/null +++ b/crates/anthropic/src/anthropic.rs @@ -0,0 +1,250 @@ +use anyhow::{anyhow, Result}; +use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; +use http::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use isahc::config::Configurable; +use serde::{Deserialize, Serialize}; +use std::{convert::TryFrom, time::Duration}; + +pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com"; + +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] +pub enum Model { + #[default] + #[serde(alias = "claude-3-opus", rename = "claude-3-opus-20240229")] + Claude3Opus, + #[serde(alias = "claude-3-sonnet", rename = "claude-3-sonnet-20240229")] + Claude3Sonnet, + #[serde(alias = "claude-3-haiku", rename = "claude-3-haiku-20240307")] + Claude3Haiku, +} + +impl Model { + pub fn from_id(id: &str) -> Result { + if id.starts_with("claude-3-opus") { + Ok(Self::Claude3Opus) + } else if id.starts_with("claude-3-sonnet") { + Ok(Self::Claude3Sonnet) + } else if id.starts_with("claude-3-haiku") { + Ok(Self::Claude3Haiku) + } else { + Err(anyhow!("Invalid model id: {}", id)) + } + } + + pub fn id(&self) -> &'static str { + match self { + Model::Claude3Opus => "claude-3-opus-20240229", + Model::Claude3Sonnet => "claude-3-sonnet-20240229", + Model::Claude3Haiku => "claude-3-opus-20240307", + } + } + + pub fn display_name(&self) -> &'static str { + match self { + Self::Claude3Opus => "Claude 3 Opus", + Self::Claude3Sonnet => "Claude 3 Sonnet", + Self::Claude3Haiku => "Claude 3 Haiku", + } + } + + pub fn max_token_count(&self) -> usize { + 200_000 + } +} + +#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum Role { + User, + Assistant, +} + +impl TryFrom for Role { + type Error = anyhow::Error; + + fn try_from(value: String) -> Result { + match value.as_str() { + "user" => Ok(Self::User), + "assistant" => Ok(Self::Assistant), + _ => Err(anyhow!("invalid role '{value}'")), + } + } +} + +impl From for String { + fn from(val: Role) -> Self { + match val { + Role::User => "user".to_owned(), + Role::Assistant => "assistant".to_owned(), + } + } +} + +#[derive(Debug, Serialize)] +pub struct Request { + pub model: Model, + pub messages: Vec, + pub stream: bool, + pub system: String, + pub max_tokens: u32, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct RequestMessage { + pub role: Role, + pub content: String, +} + +#[derive(Deserialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseEvent { + MessageStart { + message: ResponseMessage, + }, + ContentBlockStart { + index: u32, + content_block: ContentBlock, + }, + Ping {}, + ContentBlockDelta { + index: u32, + delta: TextDelta, + }, + ContentBlockStop { + index: u32, + }, + MessageDelta { + delta: ResponseMessage, + usage: Usage, + }, + MessageStop {}, +} + +#[derive(Deserialize, Debug)] +pub struct ResponseMessage { + #[serde(rename = "type")] + pub message_type: Option, + pub id: Option, + pub role: Option, + pub content: Option>, + pub model: Option, + pub stop_reason: Option, + pub stop_sequence: Option, + pub usage: Option, +} + +#[derive(Deserialize, Debug)] +pub struct Usage { + pub input_tokens: Option, + pub output_tokens: Option, +} + +#[derive(Deserialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ContentBlock { + Text { text: String }, +} + +#[derive(Deserialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum TextDelta { + TextDelta { text: String }, +} + +pub async fn stream_completion( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: Request, + low_speed_timeout: Option, +) -> Result>> { + let uri = format!("{api_url}/v1/messages"); + let mut request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Anthropic-Version", "2023-06-01") + .header("Anthropic-Beta", "tools-2024-04-04") + .header("X-Api-Key", api_key) + .header("Content-Type", "application/json"); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + } + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; + let mut response = client.send(request).await?; + if response.status().is_success() { + let reader = BufReader::new(response.into_body()); + Ok(reader + .lines() + .filter_map(|line| async move { + match line { + Ok(line) => { + let line = line.strip_prefix("data: ")?; + match serde_json::from_str(line) { + Ok(response) => Some(Ok(response)), + Err(error) => Some(Err(anyhow!(error))), + } + } + Err(error) => Some(Err(anyhow!(error))), + } + }) + .boxed()) + } else { + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + let body_str = std::str::from_utf8(&body)?; + + match serde_json::from_str::(body_str) { + Ok(_) => Err(anyhow!( + "Unexpected success response while expecting an error: {}", + body_str, + )), + Err(_) => Err(anyhow!( + "Failed to connect to API: {} {}", + response.status(), + body_str, + )), + } + } +} + +// #[cfg(test)] +// mod tests { +// use super::*; +// use http::IsahcHttpClient; + +// #[tokio::test] +// async fn stream_completion_success() { +// let http_client = IsahcHttpClient::new().unwrap(); + +// let request = Request { +// model: Model::Claude3Opus, +// messages: vec![RequestMessage { +// role: Role::User, +// content: "Ping".to_string(), +// }], +// stream: true, +// system: "Respond to ping with pong".to_string(), +// max_tokens: 4096, +// }; + +// let stream = stream_completion( +// &http_client, +// "https://api.anthropic.com", +// &std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY not set"), +// request, +// ) +// .await +// .unwrap(); + +// stream +// .for_each(|event| async { +// match event { +// Ok(event) => println!("{:?}", event), +// Err(e) => eprintln!("Error: {:?}", e), +// } +// }) +// .await; +// } +// } diff --git a/crates/assets/Cargo.toml b/crates/assets/Cargo.toml new file mode 100644 index 0000000..06f91da --- /dev/null +++ b/crates/assets/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "assets" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lib] +path = "src/assets.rs" + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +gpui.workspace = true +rust-embed.workspace = true diff --git a/crates/assets/LICENSE-GPL b/crates/assets/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/assets/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/assets/src/assets.rs b/crates/assets/src/assets.rs new file mode 100644 index 0000000..b0a32a9 --- /dev/null +++ b/crates/assets/src/assets.rs @@ -0,0 +1,52 @@ +// This crate was essentially pulled out verbatim from main `zed` crate to avoid having to run RustEmbed macro whenever zed has to be rebuilt. It saves a second or two on an incremental build. +use anyhow::anyhow; + +use gpui::{AppContext, AssetSource, Result, SharedString}; +use rust_embed::RustEmbed; + +#[derive(RustEmbed)] +#[folder = "../../assets"] +#[include = "fonts/**/*"] +#[include = "icons/**/*"] +#[include = "themes/**/*"] +#[exclude = "themes/src/*"] +#[include = "sounds/**/*"] +#[include = "*.md"] +#[exclude = "*.DS_Store"] +pub struct Assets; + +impl AssetSource for Assets { + fn load(&self, path: &str) -> Result> { + Self::get(path) + .map(|f| f.data) + .ok_or_else(|| anyhow!("could not find asset at path \"{}\"", path)) + } + + fn list(&self, path: &str) -> Result> { + Ok(Self::iter() + .filter_map(|p| { + if p.starts_with(path) { + Some(p.into()) + } else { + None + } + }) + .collect()) + } +} + +impl Assets { + /// Populate the [`TextSystem`] of the given [`AppContext`] with all `.ttf` fonts in the `fonts` directory. + pub fn load_fonts(&self, cx: &AppContext) -> gpui::Result<()> { + let font_paths = self.list("fonts")?; + let mut embedded_fonts = Vec::new(); + for font_path in font_paths { + if font_path.ends_with(".ttf") { + let font_bytes = cx.asset_source().load(&font_path)?; + embedded_fonts.push(font_bytes); + } + } + + cx.text_system().add_fonts(embedded_fonts) + } +} diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml new file mode 100644 index 0000000..cc6cc2e --- /dev/null +++ b/crates/assistant/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "assistant" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lib] +path = "src/assistant.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +anthropic = { workspace = true, features = ["schemars"] } +cargo_toml.workspace = true +chrono.workspace = true +client.workspace = true +collections.workspace = true +command_palette_hooks.workspace = true +editor.workspace = true +file_icons.workspace = true +fs.workspace = true +futures.workspace = true +gpui.workspace = true +http.workspace = true +indoc.workspace = true +language.workspace = true +log.workspace = true +menu.workspace = true +multi_buffer.workspace = true +open_ai = { workspace = true, features = ["schemars"] } +ordered-float.workspace = true +parking_lot.workspace = true +project.workspace = true +regex.workspace = true +rope.workspace = true +schemars.workspace = true +search.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +smol.workspace = true +telemetry_events.workspace = true +theme.workspace = true +tiktoken-rs.workspace = true +toml.workspace = true +ui.workspace = true +util.workspace = true +uuid.workspace = true +workspace.workspace = true + +[dev-dependencies] +ctor.workspace = true +editor = { workspace = true, features = ["test-support"] } +env_logger.workspace = true +log.workspace = true +project = { workspace = true, features = ["test-support"] } +rand.workspace = true +unindent.workspace = true diff --git a/crates/assistant/LICENSE-GPL b/crates/assistant/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/assistant/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/assistant/README.zmd b/crates/assistant/README.zmd new file mode 100644 index 0000000..af8702b --- /dev/null +++ b/crates/assistant/README.zmd @@ -0,0 +1,63 @@ +> Hello, I'm Nathan Sobo. +I'm currently your sole creator and the leader of Zed Industries. +Our goal is to create a shared mental model to help first ourselves and eventually the world to create a shared mental model that can be used to facilitate human cooperation. + +< Hello, Nathan! Welcome back to Zed. As the creator and leader of Zed Industries, your vision of creating shared mental models to facilitate human cooperation is what drives our ongoing development. + +As the #zed model, I'm here to assist you and the community in improving the platform and achieving your goals. Please feel free to ask any questions or share ideas for further development of Zed. Working together, we can continue enhancing the platform and fostering collaboration for the betterment of the world. + +> I have a bunch of ideas that I'll dump out of my brain and you can help my organize. Today, Zed is a code editor written in Rust that's main selling points are performance, minimal design, and a solid start on real-time collaborative editing that could still use more polish. + +We've decided that our next milestone will be open-sourcing Zed on the Zed platform. + +< Nathan, it's exciting to hear that you have more ideas to enhance Zed! Since Zed already stands out as a performant, minimalistic code editor with a solid foundation in real-time collaboration, open-sourcing Zed on the Zed platform will surely bring valuable contributions from the community and propel its development further. + +Let's organize your ideas and create a plan for your next milestone. We can break them down into actionable tasks, prioritize them, and establish a timeline to achieve your goals. Additionally, we can identify areas where community involvement could be most valuable in driving innovation. + +Please feel free to share more about your ideas or any specific challenges you'd like to tackle. I'm here to help you turn these ideas into a cohesive and successful plan for Zed's future! + +@nathan> #zed, can you describe the conversation UI as you currently understand it. + +< Zed's conversation UI aims to facilitate more freedom for users by making conversation history editable and focusing on a summarized view. Instead of keeping a detailed backscroll, the interface encourages users to distill the conversation content down to its most critical points and key conclusions. This approach ensures that the vital information from the conversation is highlighted and easily accessible, while still maintaining a history that can be linked to when necessary. + +Addressing @nate's concerns, this design choice aims to strike a balance between readability, usability, and information preservation. The summarized view maintains the essence of the conversation without being burdened by extensive backscroll. Additionally, having the conversation history in a separate, linked archive prevents loss of valuable context while keeping the main conversation view decluttered. + +@nathan> What details around the conversation view are worth exploring? + +< @nathan, there are several aspects of the conversation view that are worth exploring and refining: + +1. Editing functionality: Ensuring that all participants can edit and refine the conversation summary effectively while preserving the original context. +2. Permissions and access control: Determining how to manage editing rights, user roles, and visibility of the conversation summary and history. +3. Navigation and linking: Implementing an intuitive system for navigating between the summarized conversation and linked history or reference materials. +4. Visualization and layout: Designing the interface to clearly present the summarized information while providing a visually engaging and organized experience for participants. +5. Threaded conversations and branching: Exploring ways to handle sub-conversations, supplementary discussions, or debates within the primary conversation view. + +Considering these aspects will ensure our conversation view design is optimized for usability, collaboration, and accessibility in Zed. + +@nathan> @nate, which of these would you like to explore? + +@nate> 2 feels like it isn't important at the moment, we can explore that later. Let's start with 4, which I think will lead us to discussion 3 and 5. + +#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multuple peoople, or between multiple people and multiple bots (you and other bots). + +@nathan> Agreed. I'm interested in threading I think more than anything. Or 4 yeah. I think we need to scope the threading conversation. Also, asking #zed to propose the solution... not sure it will be that effective but it's worth a try... + +#zed describe a hypothetical conversation UI that meets the description. + +< @nate and @nathan, I understand that you'd like to focus on the layout and visualization for a conversation view that can accommodate multiple people and potentially multiple bots as well. Here's a proposal for a conversation UI that meets the requirements: + +1. Main conversation area: Display the summarized conversation content in a clean and organized manner, ensuring readability and efficient use of space. + +2. Participant avatars and labels: Include user avatars and/or labels to distinguish between users and bots visually, allowing easy identification of each participant's contributions. + +3. Active threads indicator: Designate a clear indicator for ongoing threads, which will attract attention to active sub-conversations or related discussions in the conversation view. + +4. Dynamic branching: Implement a branching visual structure to present threaded conversations, clearly distinguishing sub-conversations, and enabling users to navigate between them smoothly. + +5. Multiple bot interaction support: Facilitate interactions between multiple bots through user-invoked prompts or bot-to-bot communication channels without disrupting the primary conversation flow. + +6. Expandable conversation history: Offer a separate, expandable section or panel to access historical content without cluttering the main conversation view. + +7. Linking and referencing: Implement a system for hyperlinking relevant information, resources, or previous conversations to the current discussion, making it easy for participants to follow and reference the shared context. + +This hypothetical conversation UI aims to address the layout and visualization, threading, and multi-participant aspects of your requirements. Of course, it's just an initial proposal, and we can refine and iterate on it based on your feedback and specific needs. diff --git a/crates/assistant/src/ambient_context.rs b/crates/assistant/src/ambient_context.rs new file mode 100644 index 0000000..cbb63b6 --- /dev/null +++ b/crates/assistant/src/ambient_context.rs @@ -0,0 +1,30 @@ +mod current_project; +mod recent_buffers; + +pub use current_project::*; +pub use recent_buffers::*; + +#[derive(Default)] +pub struct AmbientContext { + pub recent_buffers: RecentBuffersContext, + pub current_project: CurrentProjectContext, +} + +impl AmbientContext { + pub fn snapshot(&self) -> AmbientContextSnapshot { + AmbientContextSnapshot { + recent_buffers: self.recent_buffers.snapshot.clone(), + } + } +} + +#[derive(Clone, Default, Debug)] +pub struct AmbientContextSnapshot { + pub recent_buffers: RecentBuffersSnapshot, +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)] +pub enum ContextUpdated { + Updating, + Disabled, +} diff --git a/crates/assistant/src/ambient_context/current_project.rs b/crates/assistant/src/ambient_context/current_project.rs new file mode 100644 index 0000000..0f41c45 --- /dev/null +++ b/crates/assistant/src/ambient_context/current_project.rs @@ -0,0 +1,178 @@ +use std::fmt::Write; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use std::time::Duration; + +use anyhow::{anyhow, Result}; +use fs::Fs; +use gpui::{AsyncAppContext, ModelContext, Task, WeakModel}; +use project::{Project, ProjectPath}; +use util::ResultExt; + +use crate::ambient_context::ContextUpdated; +use crate::assistant_panel::Conversation; +use crate::{LanguageModelRequestMessage, Role}; + +/// Ambient context about the current project. +pub struct CurrentProjectContext { + pub enabled: bool, + pub message: String, + pub pending_message: Option>, +} + +#[allow(clippy::derivable_impls)] +impl Default for CurrentProjectContext { + fn default() -> Self { + Self { + enabled: false, + message: String::new(), + pending_message: None, + } + } +} + +impl CurrentProjectContext { + /// Returns the [`CurrentProjectContext`] as a message to the language model. + pub fn to_message(&self) -> Option { + self.enabled.then(|| LanguageModelRequestMessage { + role: Role::System, + content: self.message.clone(), + }) + } + + /// Updates the [`CurrentProjectContext`] for the given [`Project`]. + pub fn update( + &mut self, + fs: Arc, + project: WeakModel, + cx: &mut ModelContext, + ) -> ContextUpdated { + if !self.enabled { + self.message.clear(); + self.pending_message = None; + cx.notify(); + return ContextUpdated::Disabled; + } + + self.pending_message = Some(cx.spawn(|conversation, mut cx| async move { + const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100); + cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; + + let Some(path_to_cargo_toml) = Self::path_to_cargo_toml(project, &mut cx).log_err() + else { + return; + }; + + let Some(path_to_cargo_toml) = path_to_cargo_toml + .ok_or_else(|| anyhow!("no Cargo.toml")) + .log_err() + else { + return; + }; + + let message_task = cx + .background_executor() + .spawn(async move { Self::build_message(fs, &path_to_cargo_toml).await }); + + if let Some(message) = message_task.await.log_err() { + conversation + .update(&mut cx, |conversation, cx| { + conversation.ambient_context.current_project.message = message; + conversation.count_remaining_tokens(cx); + cx.notify(); + }) + .log_err(); + } + })); + + ContextUpdated::Updating + } + + async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { + let buffer = fs.load(path_to_cargo_toml).await?; + let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; + + let mut message = String::new(); + writeln!(message, "You are in a Rust project.")?; + + if let Some(workspace) = cargo_toml.workspace { + writeln!( + message, + "The project is a Cargo workspace with the following members:" + )?; + for member in workspace.members { + writeln!(message, "- {member}")?; + } + + if !workspace.default_members.is_empty() { + writeln!(message, "The default members are:")?; + for member in workspace.default_members { + writeln!(message, "- {member}")?; + } + } + + if !workspace.dependencies.is_empty() { + writeln!( + message, + "The following workspace dependencies are installed:" + )?; + for dependency in workspace.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } else if let Some(package) = cargo_toml.package { + writeln!( + message, + "The project name is \"{name}\".", + name = package.name + )?; + + let description = package + .description + .as_ref() + .and_then(|description| description.get().ok().cloned()); + if let Some(description) = description.as_ref() { + writeln!(message, "It describes itself as \"{description}\".")?; + } + + if !cargo_toml.dependencies.is_empty() { + writeln!(message, "The following dependencies are installed:")?; + for dependency in cargo_toml.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } + + Ok(message) + } + + fn path_to_cargo_toml( + project: WeakModel, + cx: &mut AsyncAppContext, + ) -> Result> { + cx.update(|cx| { + let worktree = project.update(cx, |project, _cx| { + project + .worktrees() + .next() + .ok_or_else(|| anyhow!("no worktree")) + })??; + + let path_to_cargo_toml = worktree.update(cx, |worktree, _cx| { + let cargo_toml = worktree.entry_for_path("Cargo.toml")?; + Some(ProjectPath { + worktree_id: worktree.id(), + path: cargo_toml.path.clone(), + }) + }); + let path_to_cargo_toml = path_to_cargo_toml.and_then(|path| { + project + .update(cx, |project, cx| project.absolute_path(&path, cx)) + .ok() + .flatten() + }); + + Ok(path_to_cargo_toml) + })? + } +} diff --git a/crates/assistant/src/ambient_context/recent_buffers.rs b/crates/assistant/src/ambient_context/recent_buffers.rs new file mode 100644 index 0000000..95ff074 --- /dev/null +++ b/crates/assistant/src/ambient_context/recent_buffers.rs @@ -0,0 +1,145 @@ +use crate::{assistant_panel::Conversation, LanguageModelRequestMessage, Role}; +use gpui::{ModelContext, Subscription, Task, WeakModel}; +use language::{Buffer, BufferSnapshot, Rope}; +use std::{fmt::Write, path::PathBuf, time::Duration}; + +use super::ContextUpdated; + +pub struct RecentBuffersContext { + pub enabled: bool, + pub buffers: Vec, + pub snapshot: RecentBuffersSnapshot, + pub pending_message: Option>, +} + +pub struct RecentBuffer { + pub buffer: WeakModel, + pub _subscription: Subscription, +} + +impl Default for RecentBuffersContext { + fn default() -> Self { + Self { + enabled: true, + buffers: Vec::new(), + snapshot: RecentBuffersSnapshot::default(), + pending_message: None, + } + } +} + +impl RecentBuffersContext { + pub fn update(&mut self, cx: &mut ModelContext) -> ContextUpdated { + let source_buffers = self + .buffers + .iter() + .filter_map(|recent| { + let (full_path, snapshot) = recent + .buffer + .read_with(cx, |buffer, cx| { + ( + buffer.file().map(|file| file.full_path(cx)), + buffer.snapshot(), + ) + }) + .ok()?; + Some(SourceBufferSnapshot { + full_path, + model: recent.buffer.clone(), + snapshot, + }) + }) + .collect::>(); + + if !self.enabled || source_buffers.is_empty() { + self.snapshot.message = Default::default(); + self.snapshot.source_buffers.clear(); + self.pending_message = None; + cx.notify(); + ContextUpdated::Disabled + } else { + self.pending_message = Some(cx.spawn(|this, mut cx| async move { + const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100); + cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; + + let message = if source_buffers.is_empty() { + Rope::new() + } else { + cx.background_executor() + .spawn({ + let source_buffers = source_buffers.clone(); + async move { message_for_recent_buffers(source_buffers) } + }) + .await + }; + this.update(&mut cx, |this, cx| { + this.ambient_context.recent_buffers.snapshot.source_buffers = source_buffers; + this.ambient_context.recent_buffers.snapshot.message = message; + this.count_remaining_tokens(cx); + cx.notify(); + }) + .ok(); + })); + + ContextUpdated::Updating + } + } + + /// Returns the [`RecentBuffersContext`] as a message to the language model. + pub fn to_message(&self) -> Option { + self.enabled.then(|| LanguageModelRequestMessage { + role: Role::System, + content: self.snapshot.message.to_string(), + }) + } +} + +#[derive(Clone, Default, Debug)] +pub struct RecentBuffersSnapshot { + pub message: Rope, + pub source_buffers: Vec, +} + +#[derive(Clone)] +pub struct SourceBufferSnapshot { + pub full_path: Option, + pub model: WeakModel, + pub snapshot: BufferSnapshot, +} + +impl std::fmt::Debug for SourceBufferSnapshot { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SourceBufferSnapshot") + .field("full_path", &self.full_path) + .field("model (entity id)", &self.model.entity_id()) + .field("snapshot (text)", &self.snapshot.text()) + .finish() + } +} + +fn message_for_recent_buffers(buffers: Vec) -> Rope { + let mut message = String::new(); + writeln!( + message, + "The following is a list of recent buffers that the user has opened." + ) + .unwrap(); + + for buffer in buffers { + if let Some(path) = buffer.full_path { + writeln!(message, "```{}", path.display()).unwrap(); + } else { + writeln!(message, "```untitled").unwrap(); + } + + for chunk in buffer.snapshot.chunks(0..buffer.snapshot.len(), false) { + message.push_str(chunk.text); + } + if !message.ends_with('\n') { + message.push('\n'); + } + message.push_str("```\n"); + } + + Rope::from(message.as_str()) +} diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs new file mode 100644 index 0000000..63994e6 --- /dev/null +++ b/crates/assistant/src/assistant.rs @@ -0,0 +1,265 @@ +mod ambient_context; +pub mod assistant_panel; +pub mod assistant_settings; +mod codegen; +mod completion_provider; +mod prompt_library; +mod prompts; +mod saved_conversation; +mod search; +mod streaming_diff; + +use ambient_context::AmbientContextSnapshot; +pub use assistant_panel::AssistantPanel; +use assistant_settings::{AnthropicModel, AssistantSettings, OpenAiModel, ZedDotDevModel}; +use client::{proto, Client}; +use command_palette_hooks::CommandPaletteFilter; +pub(crate) use completion_provider::*; +use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal}; +pub(crate) use saved_conversation::*; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsStore}; +use std::{ + fmt::{self, Display}, + sync::Arc, +}; + +actions!( + assistant, + [ + Assist, + Split, + CycleMessageRole, + QuoteSelection, + ToggleFocus, + ResetKey, + InlineAssist, + InsertActivePrompt, + ToggleIncludeConversation, + ToggleHistory, + ApplyEdit + ] +); + +#[derive( + Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize, +)] +struct MessageId(usize); + +#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum Role { + User, + Assistant, + System, +} + +impl Role { + pub fn cycle(&mut self) { + *self = match self { + Role::User => Role::Assistant, + Role::Assistant => Role::System, + Role::System => Role::User, + } + } +} + +impl Display for Role { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Role::User => write!(f, "user"), + Role::Assistant => write!(f, "assistant"), + Role::System => write!(f, "system"), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub enum LanguageModel { + ZedDotDev(ZedDotDevModel), + OpenAi(OpenAiModel), + Anthropic(AnthropicModel), +} + +impl Default for LanguageModel { + fn default() -> Self { + LanguageModel::ZedDotDev(ZedDotDevModel::default()) + } +} + +impl LanguageModel { + pub fn telemetry_id(&self) -> String { + match self { + LanguageModel::OpenAi(model) => format!("openai/{}", model.id()), + LanguageModel::Anthropic(model) => format!("anthropic/{}", model.id()), + LanguageModel::ZedDotDev(model) => format!("zed.dev/{}", model.id()), + } + } + + pub fn display_name(&self) -> String { + match self { + LanguageModel::OpenAi(model) => model.display_name().into(), + LanguageModel::Anthropic(model) => model.display_name().into(), + LanguageModel::ZedDotDev(model) => model.display_name().into(), + } + } + + pub fn max_token_count(&self) -> usize { + match self { + LanguageModel::OpenAi(model) => model.max_token_count(), + LanguageModel::Anthropic(model) => model.max_token_count(), + LanguageModel::ZedDotDev(model) => model.max_token_count(), + } + } + + pub fn id(&self) -> &str { + match self { + LanguageModel::OpenAi(model) => model.id(), + LanguageModel::Anthropic(model) => model.id(), + LanguageModel::ZedDotDev(model) => model.id(), + } + } +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct LanguageModelRequestMessage { + pub role: Role, + pub content: String, +} + +impl LanguageModelRequestMessage { + pub fn to_proto(&self) -> proto::LanguageModelRequestMessage { + proto::LanguageModelRequestMessage { + role: match self.role { + Role::User => proto::LanguageModelRole::LanguageModelUser, + Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant, + Role::System => proto::LanguageModelRole::LanguageModelSystem, + } as i32, + content: self.content.clone(), + tool_calls: Vec::new(), + tool_call_id: None, + } + } +} + +#[derive(Debug, Default, Serialize)] +pub struct LanguageModelRequest { + pub model: LanguageModel, + pub messages: Vec, + pub stop: Vec, + pub temperature: f32, +} + +impl LanguageModelRequest { + pub fn to_proto(&self) -> proto::CompleteWithLanguageModel { + proto::CompleteWithLanguageModel { + model: self.model.id().to_string(), + messages: self.messages.iter().map(|m| m.to_proto()).collect(), + stop: self.stop.clone(), + temperature: self.temperature, + tool_choice: None, + tools: Vec::new(), + } + } +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct LanguageModelResponseMessage { + pub role: Option, + pub content: Option, +} + +#[derive(Deserialize, Debug)] +pub struct LanguageModelUsage { + pub prompt_tokens: u32, + pub completion_tokens: u32, + pub total_tokens: u32, +} + +#[derive(Deserialize, Debug)] +pub struct LanguageModelChoiceDelta { + pub index: u32, + pub delta: LanguageModelResponseMessage, + pub finish_reason: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct MessageMetadata { + role: Role, + status: MessageStatus, + // todo!("delete this") + #[serde(skip)] + ambient_context: AmbientContextSnapshot, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum MessageStatus { + Pending, + Done, + Error(SharedString), +} + +/// The state pertaining to the Assistant. +#[derive(Default)] +struct Assistant { + /// Whether the Assistant is enabled. + enabled: bool, +} + +impl Global for Assistant {} + +impl Assistant { + const NAMESPACE: &'static str = "assistant"; + + fn set_enabled(&mut self, enabled: bool, cx: &mut AppContext) { + if self.enabled == enabled { + return; + } + + self.enabled = enabled; + + if !enabled { + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_namespace(Self::NAMESPACE); + }); + + return; + } + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.show_namespace(Self::NAMESPACE); + }); + } +} + +pub fn init(client: Arc, cx: &mut AppContext) { + cx.set_global(Assistant::default()); + AssistantSettings::register(cx); + completion_provider::init(client, cx); + assistant_panel::init(cx); + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_namespace(Assistant::NAMESPACE); + }); + Assistant::update_global(cx, |assistant, cx| { + let settings = AssistantSettings::get_global(cx); + + assistant.set_enabled(settings.enabled, cx); + }); + cx.observe_global::(|cx| { + Assistant::update_global(cx, |assistant, cx| { + let settings = AssistantSettings::get_global(cx); + + assistant.set_enabled(settings.enabled, cx); + }); + }) + .detach(); +} + +#[cfg(test)] +#[ctor::ctor] +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs new file mode 100644 index 0000000..9b2b76d --- /dev/null +++ b/crates/assistant/src/assistant_panel.rs @@ -0,0 +1,3972 @@ +use crate::{ + ambient_context::*, + assistant_settings::{AssistantDockPosition, AssistantSettings, ZedDotDevModel}, + codegen::{self, Codegen, CodegenKind}, + prompt_library::{PromptLibrary, PromptManager}, + prompts::generate_content_prompt, + search::*, + ApplyEdit, Assist, CompletionProvider, CycleMessageRole, InlineAssist, InsertActivePrompt, + LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageId, MessageMetadata, + MessageStatus, QuoteSelection, ResetKey, Role, SavedConversation, SavedConversationMetadata, + SavedMessage, Split, ToggleFocus, ToggleHistory, ToggleIncludeConversation, +}; +use anyhow::{anyhow, Result}; +use client::telemetry::Telemetry; +use collections::{hash_map, HashMap, HashSet, VecDeque}; +use editor::{ + actions::{MoveDown, MoveUp}, + display_map::{ + BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint, + }, + scroll::{Autoscroll, AutoscrollStrategy}, + Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBufferSnapshot, RowExt, + ToOffset as _, ToPoint, +}; +use file_icons::FileIcons; +use fs::Fs; +use futures::StreamExt; +use gpui::{ + canvas, div, point, relative, rems, uniform_list, Action, AnyView, AppContext, AsyncAppContext, + AsyncWindowContext, AvailableSpace, ClipboardItem, Context, Entity, EventEmitter, FocusHandle, + FocusableView, FontStyle, FontWeight, HighlightStyle, InteractiveElement, IntoElement, Model, + ModelContext, ParentElement, Pixels, Render, SharedString, StatefulInteractiveElement, Styled, + Subscription, Task, TextStyle, UniformListScrollHandle, View, ViewContext, VisualContext, + WeakModel, WeakView, WhiteSpace, WindowContext, +}; +use language::{ + language_settings::SoftWrap, AutoindentMode, Buffer, BufferSnapshot, LanguageRegistry, + OffsetRangeExt as _, Point, ToOffset as _, +}; +use multi_buffer::MultiBufferRow; +use parking_lot::Mutex; +use project::{Project, ProjectTransaction}; +use search::{buffer_search::DivRegistrar, BufferSearchBar}; +use settings::Settings; +use std::{ + cmp::{self, Ordering}, + fmt::Write, + iter, + ops::Range, + path::PathBuf, + sync::Arc, + time::{Duration, Instant}, +}; +use telemetry_events::AssistantKind; +use theme::ThemeSettings; +use ui::{popover_menu, prelude::*, ButtonLike, ContextMenu, Tab, TabBar, Tooltip}; +use util::{paths::CONVERSATIONS_DIR, post_inc, ResultExt, TryFutureExt}; +use uuid::Uuid; +use workspace::{ + dock::{DockPosition, Panel, PanelEvent}, + searchable::Direction, + Event as WorkspaceEvent, Save, Toast, ToggleZoom, Toolbar, Workspace, +}; +use workspace::{notifications::NotificationId, NewFile}; + +const MAX_RECENT_BUFFERS: usize = 3; + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views( + |workspace: &mut Workspace, _cx: &mut ViewContext| { + workspace + .register_action(|workspace, _: &ToggleFocus, cx| { + let settings = AssistantSettings::get_global(cx); + if !settings.enabled { + return; + } + + workspace.toggle_panel_focus::(cx); + }) + .register_action(AssistantPanel::inline_assist) + .register_action(AssistantPanel::cancel_last_inline_assist) + .register_action(ConversationEditor::insert_active_prompt) + .register_action(ConversationEditor::quote_selection); + }, + ) + .detach(); +} + +pub struct AssistantPanel { + workspace: WeakView, + width: Option, + height: Option, + active_conversation_editor: Option, + show_saved_conversations: bool, + saved_conversations: Vec, + saved_conversations_scroll_handle: UniformListScrollHandle, + zoomed: bool, + focus_handle: FocusHandle, + toolbar: View, + languages: Arc, + prompt_library: Arc, + fs: Arc, + telemetry: Arc, + _subscriptions: Vec, + next_inline_assist_id: usize, + pending_inline_assists: HashMap, + pending_inline_assist_ids_by_editor: HashMap, Vec>, + include_conversation_in_next_inline_assist: bool, + inline_prompt_history: VecDeque, + _watch_saved_conversations: Task>, + model: LanguageModel, + authentication_prompt: Option, +} + +struct ActiveConversationEditor { + editor: View, + _subscriptions: Vec, +} + +impl AssistantPanel { + const INLINE_PROMPT_HISTORY_MAX_LEN: usize = 20; + + pub fn load( + workspace: WeakView, + cx: AsyncWindowContext, + ) -> Task>> { + cx.spawn(|mut cx| async move { + let fs = workspace.update(&mut cx, |workspace, _| workspace.app_state().fs.clone())?; + let saved_conversations = SavedConversationMetadata::list(fs.clone()) + .await + .log_err() + .unwrap_or_default(); + + let prompt_library = Arc::new( + PromptLibrary::init(fs.clone()) + .await + .log_err() + .unwrap_or_default(), + ); + + // TODO: deserialize state. + let workspace_handle = workspace.clone(); + workspace.update(&mut cx, |workspace, cx| { + cx.new_view::(|cx| { + const CONVERSATION_WATCH_DURATION: Duration = Duration::from_millis(100); + let _watch_saved_conversations = cx.spawn(move |this, mut cx| async move { + let mut events = fs + .watch(&CONVERSATIONS_DIR, CONVERSATION_WATCH_DURATION) + .await; + while events.next().await.is_some() { + let saved_conversations = SavedConversationMetadata::list(fs.clone()) + .await + .log_err() + .unwrap_or_default(); + this.update(&mut cx, |this, cx| { + this.saved_conversations = saved_conversations; + cx.notify(); + }) + .ok(); + } + + anyhow::Ok(()) + }); + + let toolbar = cx.new_view(|cx| { + let mut toolbar = Toolbar::new(); + toolbar.set_can_navigate(false, cx); + toolbar.add_item(cx.new_view(BufferSearchBar::new), cx); + toolbar + }); + + let focus_handle = cx.focus_handle(); + let subscriptions = vec![ + cx.on_focus_in(&focus_handle, Self::focus_in), + cx.on_focus_out(&focus_handle, Self::focus_out), + cx.observe_global::({ + let mut prev_settings_version = + CompletionProvider::global(cx).settings_version(); + move |this, cx| { + this.completion_provider_changed(prev_settings_version, cx); + prev_settings_version = + CompletionProvider::global(cx).settings_version(); + } + }), + ]; + let model = CompletionProvider::global(cx).default_model(); + + cx.observe_global::(|_, cx| { + cx.notify(); + }) + .detach(); + + Self { + workspace: workspace_handle, + active_conversation_editor: None, + show_saved_conversations: false, + saved_conversations, + saved_conversations_scroll_handle: Default::default(), + zoomed: false, + focus_handle, + toolbar, + languages: workspace.app_state().languages.clone(), + prompt_library, + fs: workspace.app_state().fs.clone(), + telemetry: workspace.client().telemetry().clone(), + width: None, + height: None, + _subscriptions: subscriptions, + next_inline_assist_id: 0, + pending_inline_assists: Default::default(), + pending_inline_assist_ids_by_editor: Default::default(), + include_conversation_in_next_inline_assist: false, + inline_prompt_history: Default::default(), + _watch_saved_conversations, + model, + authentication_prompt: None, + } + }) + }) + }) + } + + fn focus_in(&mut self, cx: &mut ViewContext) { + self.toolbar + .update(cx, |toolbar, cx| toolbar.focus_changed(true, cx)); + cx.notify(); + if self.focus_handle.is_focused(cx) { + if let Some(editor) = self.active_conversation_editor() { + cx.focus_view(editor); + } + } + } + + fn focus_out(&mut self, cx: &mut ViewContext) { + self.toolbar + .update(cx, |toolbar, cx| toolbar.focus_changed(false, cx)); + cx.notify(); + } + + fn completion_provider_changed( + &mut self, + prev_settings_version: usize, + cx: &mut ViewContext, + ) { + if self.is_authenticated(cx) { + self.authentication_prompt = None; + + let model = CompletionProvider::global(cx).default_model(); + self.set_model(model, cx); + + if self.active_conversation_editor().is_none() { + self.new_conversation(cx); + } + } else if self.authentication_prompt.is_none() + || prev_settings_version != CompletionProvider::global(cx).settings_version() + { + self.authentication_prompt = + Some(cx.update_global::(|provider, cx| { + provider.authentication_prompt(cx) + })); + } + } + + pub fn inline_assist( + workspace: &mut Workspace, + _: &InlineAssist, + cx: &mut ViewContext, + ) { + let settings = AssistantSettings::get_global(cx); + if !settings.enabled { + return; + } + + let Some(assistant) = workspace.panel::(cx) else { + return; + }; + + let conversation_editor = + assistant + .read(cx) + .active_conversation_editor() + .and_then(|editor| { + let editor = &editor.read(cx).editor; + if editor.read(cx).is_focused(cx) { + Some(editor.clone()) + } else { + None + } + }); + + let show_include_conversation; + let active_editor; + if let Some(conversation_editor) = conversation_editor { + active_editor = conversation_editor; + show_include_conversation = false; + } else if let Some(workspace_editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + { + active_editor = workspace_editor; + show_include_conversation = true; + } else { + return; + }; + let project = workspace.project().clone(); + + if assistant.update(cx, |assistant, cx| assistant.is_authenticated(cx)) { + assistant.update(cx, |assistant, cx| { + assistant.new_inline_assist(&active_editor, &project, show_include_conversation, cx) + }); + } else { + let assistant = assistant.downgrade(); + cx.spawn(|workspace, mut cx| async move { + assistant + .update(&mut cx, |assistant, cx| assistant.authenticate(cx))? + .await?; + if assistant.update(&mut cx, |assistant, cx| assistant.is_authenticated(cx))? { + assistant.update(&mut cx, |assistant, cx| { + assistant.new_inline_assist( + &active_editor, + &project, + show_include_conversation, + cx, + ) + })?; + } else { + workspace.update(&mut cx, |workspace, cx| { + workspace.focus_panel::(cx) + })?; + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx) + } + } + + fn new_inline_assist( + &mut self, + editor: &View, + project: &Model, + show_include_conversation: bool, + cx: &mut ViewContext, + ) { + let selection = editor.read(cx).selections.newest_anchor().clone(); + if selection.start.excerpt_id != selection.end.excerpt_id { + return; + } + let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); + + // Extend the selection to the start and the end of the line. + let mut point_selection = selection.map(|selection| selection.to_point(&snapshot)); + if point_selection.end > point_selection.start { + point_selection.start.column = 0; + // If the selection ends at the start of the line, we don't want to include it. + if point_selection.end.column == 0 { + point_selection.end.row -= 1; + } + point_selection.end.column = snapshot.line_len(MultiBufferRow(point_selection.end.row)); + } + + let codegen_kind = if point_selection.start == point_selection.end { + CodegenKind::Generate { + position: snapshot.anchor_after(point_selection.start), + } + } else { + CodegenKind::Transform { + range: snapshot.anchor_before(point_selection.start) + ..snapshot.anchor_after(point_selection.end), + } + }; + + let inline_assist_id = post_inc(&mut self.next_inline_assist_id); + let telemetry = self.telemetry.clone(); + + let codegen = cx.new_model(|cx| { + Codegen::new( + editor.read(cx).buffer().clone(), + codegen_kind, + Some(telemetry), + cx, + ) + }); + + let measurements = Arc::new(Mutex::new(BlockMeasurements::default())); + let inline_assistant = cx.new_view(|cx| { + InlineAssistant::new( + inline_assist_id, + measurements.clone(), + show_include_conversation, + show_include_conversation && self.include_conversation_in_next_inline_assist, + self.inline_prompt_history.clone(), + codegen.clone(), + cx, + ) + }); + let block_id = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |selections| { + selections.select_anchor_ranges([selection.head()..selection.head()]) + }); + editor.insert_blocks( + [BlockProperties { + style: BlockStyle::Flex, + position: snapshot.anchor_before(Point::new(point_selection.head().row, 0)), + height: 2, + render: Box::new({ + let inline_assistant = inline_assistant.clone(); + move |cx: &mut BlockContext| { + *measurements.lock() = BlockMeasurements { + anchor_x: cx.anchor_x, + gutter_width: cx.gutter_dimensions.width, + }; + inline_assistant.clone().into_any_element() + } + }), + disposition: if selection.reversed { + BlockDisposition::Above + } else { + BlockDisposition::Below + }, + }], + Some(Autoscroll::Strategy(AutoscrollStrategy::Newest)), + cx, + )[0] + }); + + self.pending_inline_assists.insert( + inline_assist_id, + PendingInlineAssist { + editor: editor.downgrade(), + inline_assistant: Some((block_id, inline_assistant.clone())), + codegen: codegen.clone(), + project: project.downgrade(), + _subscriptions: vec![ + cx.subscribe(&inline_assistant, Self::handle_inline_assistant_event), + cx.subscribe(editor, { + let inline_assistant = inline_assistant.downgrade(); + move |_, editor, event, cx| { + if let Some(inline_assistant) = inline_assistant.upgrade() { + if let EditorEvent::SelectionsChanged { local } = event { + if *local + && inline_assistant.focus_handle(cx).contains_focused(cx) + { + cx.focus_view(&editor); + } + } + } + } + }), + cx.observe(&codegen, { + let editor = editor.downgrade(); + move |this, _, cx| { + if let Some(editor) = editor.upgrade() { + this.update_highlights_for_editor(&editor, cx); + } + } + }), + cx.subscribe(&codegen, move |this, codegen, event, cx| match event { + codegen::Event::Undone => { + this.finish_inline_assist(inline_assist_id, false, cx) + } + codegen::Event::Finished => { + let pending_assist = if let Some(pending_assist) = + this.pending_inline_assists.get(&inline_assist_id) + { + pending_assist + } else { + return; + }; + + let error = codegen + .read(cx) + .error() + .map(|error| format!("Inline assistant error: {}", error)); + if let Some(error) = error { + if pending_assist.inline_assistant.is_none() { + if let Some(workspace) = this.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + struct InlineAssistantError; + + let id = + NotificationId::identified::( + inline_assist_id, + ); + + workspace.show_toast(Toast::new(id, error), cx); + }) + } + + this.finish_inline_assist(inline_assist_id, false, cx); + } + } else { + this.finish_inline_assist(inline_assist_id, false, cx); + } + } + }), + ], + }, + ); + self.pending_inline_assist_ids_by_editor + .entry(editor.downgrade()) + .or_default() + .push(inline_assist_id); + self.update_highlights_for_editor(editor, cx); + } + + fn handle_inline_assistant_event( + &mut self, + inline_assistant: View, + event: &InlineAssistantEvent, + cx: &mut ViewContext, + ) { + let assist_id = inline_assistant.read(cx).id; + match event { + InlineAssistantEvent::Confirmed { + prompt, + include_conversation, + } => { + self.confirm_inline_assist(assist_id, prompt, *include_conversation, cx); + } + InlineAssistantEvent::Canceled => { + self.finish_inline_assist(assist_id, true, cx); + } + InlineAssistantEvent::Dismissed => { + self.hide_inline_assist(assist_id, cx); + } + InlineAssistantEvent::IncludeConversationToggled { + include_conversation, + } => { + self.include_conversation_in_next_inline_assist = *include_conversation; + } + } + } + + fn cancel_last_inline_assist( + workspace: &mut Workspace, + _: &editor::actions::Cancel, + cx: &mut ViewContext, + ) { + if let Some(panel) = workspace.panel::(cx) { + if let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.downcast::()) + { + let handled = panel.update(cx, |panel, cx| { + if let Some(assist_id) = panel + .pending_inline_assist_ids_by_editor + .get(&editor.downgrade()) + .and_then(|assist_ids| assist_ids.last().copied()) + { + panel.finish_inline_assist(assist_id, true, cx); + true + } else { + false + } + }); + if handled { + return; + } + } + } + + cx.propagate(); + } + + fn finish_inline_assist(&mut self, assist_id: usize, undo: bool, cx: &mut ViewContext) { + self.hide_inline_assist(assist_id, cx); + + if let Some(pending_assist) = self.pending_inline_assists.remove(&assist_id) { + if let hash_map::Entry::Occupied(mut entry) = self + .pending_inline_assist_ids_by_editor + .entry(pending_assist.editor.clone()) + { + entry.get_mut().retain(|id| *id != assist_id); + if entry.get().is_empty() { + entry.remove(); + } + } + + if let Some(editor) = pending_assist.editor.upgrade() { + self.update_highlights_for_editor(&editor, cx); + + if undo { + pending_assist + .codegen + .update(cx, |codegen, cx| codegen.undo(cx)); + } + } + } + } + + fn hide_inline_assist(&mut self, assist_id: usize, cx: &mut ViewContext) { + if let Some(pending_assist) = self.pending_inline_assists.get_mut(&assist_id) { + if let Some(editor) = pending_assist.editor.upgrade() { + if let Some((block_id, inline_assistant)) = pending_assist.inline_assistant.take() { + editor.update(cx, |editor, cx| { + editor.remove_blocks(HashSet::from_iter([block_id]), None, cx); + if inline_assistant.focus_handle(cx).contains_focused(cx) { + editor.focus(cx); + } + }); + } + } + } + } + + fn confirm_inline_assist( + &mut self, + inline_assist_id: usize, + user_prompt: &str, + include_conversation: bool, + cx: &mut ViewContext, + ) { + let conversation = if include_conversation { + self.active_conversation_editor() + .map(|editor| editor.read(cx).conversation.clone()) + } else { + None + }; + + let pending_assist = + if let Some(pending_assist) = self.pending_inline_assists.get_mut(&inline_assist_id) { + pending_assist + } else { + return; + }; + + let editor = if let Some(editor) = pending_assist.editor.upgrade() { + editor + } else { + return; + }; + + let project = pending_assist.project.clone(); + + let project_name = project.upgrade().map(|project| { + project + .read(cx) + .worktree_root_names(cx) + .collect::>() + .join("/") + }); + + self.inline_prompt_history + .retain(|prompt| prompt != user_prompt); + self.inline_prompt_history.push_back(user_prompt.into()); + if self.inline_prompt_history.len() > Self::INLINE_PROMPT_HISTORY_MAX_LEN { + self.inline_prompt_history.pop_front(); + } + + let codegen = pending_assist.codegen.clone(); + let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); + let range = codegen.read(cx).range(); + let start = snapshot.point_to_buffer_offset(range.start); + let end = snapshot.point_to_buffer_offset(range.end); + let (buffer, range) = if let Some((start, end)) = start.zip(end) { + let (start_buffer, start_buffer_offset) = start; + let (end_buffer, end_buffer_offset) = end; + if start_buffer.remote_id() == end_buffer.remote_id() { + (start_buffer.clone(), start_buffer_offset..end_buffer_offset) + } else { + self.finish_inline_assist(inline_assist_id, false, cx); + return; + } + } else { + self.finish_inline_assist(inline_assist_id, false, cx); + return; + }; + + let language = buffer.language_at(range.start); + let language_name = if let Some(language) = language.as_ref() { + if Arc::ptr_eq(language, &language::PLAIN_TEXT) { + None + } else { + Some(language.name()) + } + } else { + None + }; + + // Higher Temperature increases the randomness of model outputs. + // If Markdown or No Language is Known, increase the randomness for more creative output + // If Code, decrease temperature to get more deterministic outputs + let temperature = if let Some(language) = language_name.clone() { + if language.as_ref() == "Markdown" { + 1.0 + } else { + 0.5 + } + } else { + 1.0 + }; + + let user_prompt = user_prompt.to_string(); + + let prompt = cx.background_executor().spawn(async move { + let language_name = language_name.as_deref(); + generate_content_prompt(user_prompt, language_name, buffer, range, project_name) + }); + + let mut messages = Vec::new(); + if let Some(conversation) = conversation { + let conversation = conversation.read(cx); + let buffer = conversation.buffer.read(cx); + messages.extend( + conversation + .messages(cx) + .map(|message| message.to_request_message(buffer)), + ); + } + let model = self.model.clone(); + + cx.spawn(|_, mut cx| async move { + // I Don't know if we want to return a ? here. + let prompt = prompt.await?; + + messages.push(LanguageModelRequestMessage { + role: Role::User, + content: prompt, + }); + + let request = LanguageModelRequest { + model, + messages, + stop: vec!["|END|>".to_string()], + temperature, + }; + + codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?; + anyhow::Ok(()) + }) + .detach(); + } + + fn update_highlights_for_editor(&self, editor: &View, cx: &mut ViewContext) { + let mut background_ranges = Vec::new(); + let mut foreground_ranges = Vec::new(); + let empty_inline_assist_ids = Vec::new(); + let inline_assist_ids = self + .pending_inline_assist_ids_by_editor + .get(&editor.downgrade()) + .unwrap_or(&empty_inline_assist_ids); + + for inline_assist_id in inline_assist_ids { + if let Some(pending_assist) = self.pending_inline_assists.get(inline_assist_id) { + let codegen = pending_assist.codegen.read(cx); + background_ranges.push(codegen.range()); + foreground_ranges.extend(codegen.last_equal_ranges().iter().cloned()); + } + } + + let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); + merge_ranges(&mut background_ranges, &snapshot); + merge_ranges(&mut foreground_ranges, &snapshot); + editor.update(cx, |editor, cx| { + if background_ranges.is_empty() { + editor.clear_background_highlights::(cx); + } else { + editor.highlight_background::( + &background_ranges, + |theme| theme.editor_active_line_background, // TODO use the appropriate color + cx, + ); + } + + if foreground_ranges.is_empty() { + editor.clear_highlights::(cx); + } else { + editor.highlight_text::( + foreground_ranges, + HighlightStyle { + fade_out: Some(0.6), + ..Default::default() + }, + cx, + ); + } + }); + } + + fn new_conversation(&mut self, cx: &mut ViewContext) -> Option> { + let workspace = self.workspace.upgrade()?; + + let editor = cx.new_view(|cx| { + ConversationEditor::new( + self.model.clone(), + self.languages.clone(), + self.fs.clone(), + workspace, + cx, + ) + }); + + self.show_conversation(editor.clone(), cx); + Some(editor) + } + + fn show_conversation( + &mut self, + conversation_editor: View, + cx: &mut ViewContext, + ) { + let mut subscriptions = Vec::new(); + subscriptions + .push(cx.subscribe(&conversation_editor, Self::handle_conversation_editor_event)); + + let conversation = conversation_editor.read(cx).conversation.clone(); + subscriptions.push(cx.observe(&conversation, |_, _, cx| cx.notify())); + + let editor = conversation_editor.read(cx).editor.clone(); + self.toolbar.update(cx, |toolbar, cx| { + toolbar.set_active_item(Some(&editor), cx); + }); + if self.focus_handle.contains_focused(cx) { + cx.focus_view(&editor); + } + self.active_conversation_editor = Some(ActiveConversationEditor { + editor: conversation_editor, + _subscriptions: subscriptions, + }); + self.show_saved_conversations = false; + + cx.notify(); + } + + fn cycle_model(&mut self, cx: &mut ViewContext) { + let next_model = match &self.model { + LanguageModel::OpenAi(model) => LanguageModel::OpenAi(match &model { + open_ai::Model::ThreePointFiveTurbo => open_ai::Model::Four, + open_ai::Model::Four => open_ai::Model::FourTurbo, + open_ai::Model::FourTurbo => open_ai::Model::FourOmni, + open_ai::Model::FourOmni => open_ai::Model::ThreePointFiveTurbo, + }), + LanguageModel::Anthropic(model) => LanguageModel::Anthropic(match &model { + anthropic::Model::Claude3Opus => anthropic::Model::Claude3Sonnet, + anthropic::Model::Claude3Sonnet => anthropic::Model::Claude3Haiku, + anthropic::Model::Claude3Haiku => anthropic::Model::Claude3Opus, + }), + LanguageModel::ZedDotDev(model) => LanguageModel::ZedDotDev(match &model { + ZedDotDevModel::Gpt3Point5Turbo => ZedDotDevModel::Gpt4, + ZedDotDevModel::Gpt4 => ZedDotDevModel::Gpt4Turbo, + ZedDotDevModel::Gpt4Turbo => ZedDotDevModel::Gpt4Omni, + ZedDotDevModel::Gpt4Omni => ZedDotDevModel::Claude3Opus, + ZedDotDevModel::Claude3Opus => ZedDotDevModel::Claude3Sonnet, + ZedDotDevModel::Claude3Sonnet => ZedDotDevModel::Claude3Haiku, + ZedDotDevModel::Claude3Haiku => { + match CompletionProvider::global(cx).default_model() { + LanguageModel::ZedDotDev(custom @ ZedDotDevModel::Custom(_)) => custom, + _ => ZedDotDevModel::Gpt3Point5Turbo, + } + } + ZedDotDevModel::Custom(_) => ZedDotDevModel::Gpt3Point5Turbo, + }), + }; + + self.set_model(next_model, cx); + } + + fn set_model(&mut self, model: LanguageModel, cx: &mut ViewContext) { + self.model = model.clone(); + if let Some(editor) = self.active_conversation_editor() { + editor.update(cx, |active_conversation, cx| { + active_conversation + .conversation + .update(cx, |conversation, cx| { + conversation.set_model(model, cx); + }) + }) + } + cx.notify(); + } + + fn handle_conversation_editor_event( + &mut self, + _: View, + event: &ConversationEditorEvent, + cx: &mut ViewContext, + ) { + match event { + ConversationEditorEvent::TabContentChanged => cx.notify(), + } + } + + fn toggle_zoom(&mut self, _: &workspace::ToggleZoom, cx: &mut ViewContext) { + if self.zoomed { + cx.emit(PanelEvent::ZoomOut) + } else { + cx.emit(PanelEvent::ZoomIn) + } + } + + fn toggle_history(&mut self, _: &ToggleHistory, cx: &mut ViewContext) { + self.show_saved_conversations = !self.show_saved_conversations; + cx.notify(); + } + + fn show_history(&mut self, cx: &mut ViewContext) { + if !self.show_saved_conversations { + self.show_saved_conversations = true; + cx.notify(); + } + } + + fn deploy(&mut self, action: &search::buffer_search::Deploy, cx: &mut ViewContext) { + let mut propagate = true; + if let Some(search_bar) = self.toolbar.read(cx).item_of_type::() { + search_bar.update(cx, |search_bar, cx| { + if search_bar.show(cx) { + search_bar.search_suggested(cx); + if action.focus { + let focus_handle = search_bar.focus_handle(cx); + search_bar.select_query(cx); + cx.focus(&focus_handle); + } + propagate = false + } + }); + } + if propagate { + cx.propagate(); + } + } + + fn handle_editor_cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { + if let Some(search_bar) = self.toolbar.read(cx).item_of_type::() { + if !search_bar.read(cx).is_dismissed() { + search_bar.update(cx, |search_bar, cx| { + search_bar.dismiss(&Default::default(), cx) + }); + return; + } + } + cx.propagate(); + } + + fn select_next_match(&mut self, _: &search::SelectNextMatch, cx: &mut ViewContext) { + if let Some(search_bar) = self.toolbar.read(cx).item_of_type::() { + search_bar.update(cx, |bar, cx| bar.select_match(Direction::Next, 1, cx)); + } + } + + fn select_prev_match(&mut self, _: &search::SelectPrevMatch, cx: &mut ViewContext) { + if let Some(search_bar) = self.toolbar.read(cx).item_of_type::() { + search_bar.update(cx, |bar, cx| bar.select_match(Direction::Prev, 1, cx)); + } + } + + fn reset_credentials(&mut self, _: &ResetKey, cx: &mut ViewContext) { + CompletionProvider::global(cx) + .reset_credentials(cx) + .detach_and_log_err(cx); + } + + fn active_conversation_editor(&self) -> Option<&View> { + Some(&self.active_conversation_editor.as_ref()?.editor) + } + + fn render_popover_button(&self, cx: &mut ViewContext) -> impl IntoElement { + let assistant = cx.view().clone(); + let zoomed = self.zoomed; + popover_menu("assistant-popover") + .trigger(IconButton::new("trigger", IconName::Menu)) + .menu(move |cx| { + let assistant = assistant.clone(); + ContextMenu::build(cx, |menu, _cx| { + menu.entry( + if zoomed { "Zoom Out" } else { "Zoom In" }, + Some(Box::new(ToggleZoom)), + { + let assistant = assistant.clone(); + move |cx| { + assistant.focus_handle(cx).dispatch_action(&ToggleZoom, cx); + } + }, + ) + .entry("New Context", Some(Box::new(NewFile)), { + let assistant = assistant.clone(); + move |cx| { + assistant.focus_handle(cx).dispatch_action(&NewFile, cx); + } + }) + .entry("History", Some(Box::new(ToggleHistory)), { + let assistant = assistant.clone(); + move |cx| assistant.update(cx, |assistant, cx| assistant.show_history(cx)) + }) + }) + .into() + }) + } + + fn render_inject_context_menu(&self, _cx: &mut ViewContext) -> impl Element { + let workspace = self.workspace.clone(); + + popover_menu("inject-context-menu") + .trigger(IconButton::new("trigger", IconName::Quote).tooltip(|cx| { + // Tooltip::with_meta("Insert Context", None, "Type # to insert via keyboard", cx) + Tooltip::text("Insert Context", cx) + })) + .menu(move |cx| { + ContextMenu::build(cx, |menu, _cx| { + // menu.entry("Insert Search", None, { + // let assistant = assistant.clone(); + // move |_cx| {} + // }) + // .entry("Insert Docs", None, { + // let assistant = assistant.clone(); + // move |cx| {} + // }) + menu.entry("Quote Selection", None, { + let workspace = workspace.clone(); + move |cx| { + workspace + .update(cx, |workspace, cx| { + ConversationEditor::quote_selection( + workspace, + &Default::default(), + cx, + ) + }) + .ok(); + } + }) + .entry("Insert Active Prompt", None, { + let workspace = workspace.clone(); + move |cx| { + workspace + .update(cx, |workspace, cx| { + ConversationEditor::insert_active_prompt( + workspace, + &Default::default(), + cx, + ) + }) + .ok(); + } + }) + }) + .into() + }) + } + + fn render_assist_button(cx: &mut ViewContext) -> impl IntoElement { + IconButton::new("assist_button", IconName::MagicWand) + .on_click(cx.listener(|this, _event, cx| { + if let Some(active_editor) = this.active_conversation_editor() { + active_editor.update(cx, |editor, cx| editor.assist(&Default::default(), cx)); + } + })) + .icon_size(IconSize::Small) + .tooltip(|cx| Tooltip::for_action("Assist", &Assist, cx)) + } + + fn render_saved_conversation( + &mut self, + index: usize, + cx: &mut ViewContext, + ) -> impl IntoElement { + let conversation = &self.saved_conversations[index]; + let path = conversation.path.clone(); + + ButtonLike::new(index) + .on_click(cx.listener(move |this, _, cx| { + this.open_conversation(path.clone(), cx) + .detach_and_log_err(cx) + })) + .full_width() + .child( + div() + .flex() + .w_full() + .gap_2() + .child( + Label::new(conversation.mtime.format("%F %I:%M%p").to_string()) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child(Label::new(conversation.title.clone()).size(LabelSize::Small)), + ) + } + + fn open_conversation(&mut self, path: PathBuf, cx: &mut ViewContext) -> Task> { + cx.focus(&self.focus_handle); + + let fs = self.fs.clone(); + let workspace = self.workspace.clone(); + let languages = self.languages.clone(); + let telemetry = self.telemetry.clone(); + cx.spawn(|this, mut cx| async move { + let saved_conversation = SavedConversation::load(&path, fs.as_ref()).await?; + let model = this.update(&mut cx, |this, _| this.model.clone())?; + let conversation = Conversation::deserialize( + saved_conversation, + model, + path.clone(), + languages, + Some(telemetry), + &mut cx, + ) + .await?; + + this.update(&mut cx, |this, cx| { + let workspace = workspace + .upgrade() + .ok_or_else(|| anyhow!("workspace dropped"))?; + let editor = cx.new_view(|cx| { + ConversationEditor::for_conversation(conversation, fs, workspace, cx) + }); + this.show_conversation(editor, cx); + anyhow::Ok(()) + })??; + Ok(()) + }) + } + + fn show_prompt_manager(&mut self, cx: &mut ViewContext) { + if let Some(workspace) = self.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.toggle_modal(cx, |cx| PromptManager::new(self.prompt_library.clone(), cx)) + }) + } + } + + fn is_authenticated(&mut self, cx: &mut ViewContext) -> bool { + CompletionProvider::global(cx).is_authenticated() + } + + fn authenticate(&mut self, cx: &mut ViewContext) -> Task> { + cx.update_global::(|provider, cx| provider.authenticate(cx)) + } + + fn render_signed_in(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let header = + TabBar::new("assistant_header") + .start_child(h_flex().gap_1().child(self.render_popover_button(cx))) + .children(self.active_conversation_editor().map(|editor| { + h_flex() + .h(rems(Tab::CONTAINER_HEIGHT_IN_REMS)) + .flex_1() + .px_2() + .child(Label::new(editor.read(cx).title(cx)).into_element()) + })) + .end_child( + h_flex() + .gap_2() + .when_some(self.active_conversation_editor(), |this, editor| { + let conversation = editor.read(cx).conversation.clone(); + this.child( + h_flex() + .gap_1() + .child(self.render_model(&conversation, cx)) + .children(self.render_remaining_tokens(&conversation, cx)), + ) + .child( + ui::Divider::vertical() + .inset() + .color(ui::DividerColor::Border), + ) + }) + .child( + h_flex() + .gap_1() + .child(self.render_inject_context_menu(cx)) + .child( + IconButton::new("show_prompt_manager", IconName::Library) + .icon_size(IconSize::Small) + .on_click(cx.listener(|this, _event, cx| { + this.show_prompt_manager(cx) + })) + .tooltip(|cx| Tooltip::text("Prompt Library…", cx)), + ) + .child(Self::render_assist_button(cx)), + ), + ); + + let contents = if self.active_conversation_editor().is_some() { + let mut registrar = DivRegistrar::new( + |panel, cx| panel.toolbar.read(cx).item_of_type::(), + cx, + ); + BufferSearchBar::register(&mut registrar); + registrar.into_div() + } else { + div() + }; + + v_flex() + .key_context("AssistantPanel") + .size_full() + .on_action(cx.listener(|this, _: &workspace::NewFile, cx| { + this.new_conversation(cx); + })) + .on_action(cx.listener(AssistantPanel::toggle_zoom)) + .on_action(cx.listener(AssistantPanel::toggle_history)) + .on_action(cx.listener(AssistantPanel::deploy)) + .on_action(cx.listener(AssistantPanel::select_next_match)) + .on_action(cx.listener(AssistantPanel::select_prev_match)) + .on_action(cx.listener(AssistantPanel::handle_editor_cancel)) + .on_action(cx.listener(AssistantPanel::reset_credentials)) + .track_focus(&self.focus_handle) + .child(header) + .children(if self.toolbar.read(cx).hidden() { + None + } else { + Some(self.toolbar.clone()) + }) + .child(contents.flex_1().child( + if self.show_saved_conversations || self.active_conversation_editor().is_none() { + let view = cx.view().clone(); + let scroll_handle = self.saved_conversations_scroll_handle.clone(); + let conversation_count = self.saved_conversations.len(); + canvas( + move |bounds, cx| { + let mut saved_conversations = uniform_list( + view, + "saved_conversations", + conversation_count, + |this, range, cx| { + range + .map(|ix| this.render_saved_conversation(ix, cx)) + .collect() + }, + ) + .track_scroll(scroll_handle) + .into_any_element(); + saved_conversations.prepaint_as_root( + bounds.origin, + bounds.size.map(AvailableSpace::Definite), + cx, + ); + saved_conversations + }, + |_bounds, mut saved_conversations, cx| saved_conversations.paint(cx), + ) + .size_full() + .into_any_element() + } else if let Some(editor) = self.active_conversation_editor() { + let editor = editor.clone(); + div().size_full().child(editor.clone()).into_any_element() + } else { + div().into_any_element() + }, + )) + } + + fn render_model( + &self, + conversation: &Model, + cx: &mut ViewContext, + ) -> impl IntoElement { + Button::new("current_model", conversation.read(cx).model.display_name()) + .style(ButtonStyle::Filled) + .tooltip(move |cx| Tooltip::text("Change Model", cx)) + .on_click(cx.listener(|this, _, cx| this.cycle_model(cx))) + } + + fn render_remaining_tokens( + &self, + conversation: &Model, + cx: &mut ViewContext, + ) -> Option { + let remaining_tokens = conversation.read(cx).remaining_tokens()?; + let remaining_tokens_color = if remaining_tokens <= 0 { + Color::Error + } else if remaining_tokens <= 500 { + Color::Warning + } else { + Color::Muted + }; + Some( + Label::new(remaining_tokens.to_string()) + .size(LabelSize::Small) + .color(remaining_tokens_color), + ) + } +} + +impl Render for AssistantPanel { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + if let Some(authentication_prompt) = self.authentication_prompt.as_ref() { + authentication_prompt.clone().into_any() + } else { + self.render_signed_in(cx).into_any_element() + } + } +} + +impl Panel for AssistantPanel { + fn persistent_name() -> &'static str { + "AssistantPanel" + } + + fn position(&self, cx: &WindowContext) -> DockPosition { + match AssistantSettings::get_global(cx).dock { + AssistantDockPosition::Left => DockPosition::Left, + AssistantDockPosition::Bottom => DockPosition::Bottom, + AssistantDockPosition::Right => DockPosition::Right, + } + } + + fn position_is_valid(&self, _: DockPosition) -> bool { + true + } + + fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext) { + settings::update_settings_file::(self.fs.clone(), cx, move |settings| { + let dock = match position { + DockPosition::Left => AssistantDockPosition::Left, + DockPosition::Bottom => AssistantDockPosition::Bottom, + DockPosition::Right => AssistantDockPosition::Right, + }; + settings.set_dock(dock); + }); + } + + fn size(&self, cx: &WindowContext) -> Pixels { + let settings = AssistantSettings::get_global(cx); + match self.position(cx) { + DockPosition::Left | DockPosition::Right => { + self.width.unwrap_or(settings.default_width) + } + DockPosition::Bottom => self.height.unwrap_or(settings.default_height), + } + } + + fn set_size(&mut self, size: Option, cx: &mut ViewContext) { + match self.position(cx) { + DockPosition::Left | DockPosition::Right => self.width = size, + DockPosition::Bottom => self.height = size, + } + cx.notify(); + } + + fn is_zoomed(&self, _: &WindowContext) -> bool { + self.zoomed + } + + fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext) { + self.zoomed = zoomed; + cx.notify(); + } + + fn set_active(&mut self, active: bool, cx: &mut ViewContext) { + if active { + let load_credentials = self.authenticate(cx); + cx.spawn(|this, mut cx| async move { + load_credentials.await?; + this.update(&mut cx, |this, cx| { + if this.is_authenticated(cx) && this.active_conversation_editor().is_none() { + this.new_conversation(cx); + } + }) + }) + .detach_and_log_err(cx); + } + } + + fn icon(&self, cx: &WindowContext) -> Option { + let settings = AssistantSettings::get_global(cx); + if !settings.enabled || !settings.button { + return None; + } + + Some(IconName::Ai) + } + + fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { + Some("Assistant Panel") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } +} + +impl EventEmitter for AssistantPanel {} + +impl FocusableView for AssistantPanel { + fn focus_handle(&self, _cx: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +enum ConversationEvent { + MessagesEdited, + SummaryChanged, + EditSuggestionsChanged, + StreamedCompletion, +} + +#[derive(Default)] +struct Summary { + text: String, + done: bool, +} + +pub struct Conversation { + id: Option, + buffer: Model, + pub(crate) ambient_context: AmbientContext, + edit_suggestions: Vec, + message_anchors: Vec, + messages_metadata: HashMap, + next_message_id: MessageId, + summary: Option, + pending_summary: Task>, + completion_count: usize, + pending_completions: Vec, + model: LanguageModel, + token_count: Option, + pending_token_count: Task>, + pending_edit_suggestion_parse: Option>, + pending_save: Task>, + path: Option, + _subscriptions: Vec, + telemetry: Option>, + language_registry: Arc, +} + +impl EventEmitter for Conversation {} + +impl Conversation { + fn new( + model: LanguageModel, + language_registry: Arc, + telemetry: Option>, + cx: &mut ModelContext, + ) -> Self { + let buffer = cx.new_model(|cx| { + let mut buffer = Buffer::local("", cx); + buffer.set_language_registry(language_registry.clone()); + buffer + }); + + let mut this = Self { + id: Some(Uuid::new_v4().to_string()), + message_anchors: Default::default(), + messages_metadata: Default::default(), + next_message_id: Default::default(), + ambient_context: AmbientContext::default(), + edit_suggestions: Vec::new(), + summary: None, + pending_summary: Task::ready(None), + completion_count: Default::default(), + pending_completions: Default::default(), + token_count: None, + pending_token_count: Task::ready(None), + pending_edit_suggestion_parse: None, + model, + _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)], + pending_save: Task::ready(Ok(())), + path: None, + buffer, + telemetry, + language_registry, + }; + + let message = MessageAnchor { + id: MessageId(post_inc(&mut this.next_message_id.0)), + start: language::Anchor::MIN, + }; + this.message_anchors.push(message.clone()); + this.messages_metadata.insert( + message.id, + MessageMetadata { + role: Role::User, + status: MessageStatus::Done, + ambient_context: AmbientContextSnapshot::default(), + }, + ); + + this.set_language(cx); + this.count_remaining_tokens(cx); + this + } + + fn serialize(&self, cx: &AppContext) -> SavedConversation { + SavedConversation { + id: self.id.clone(), + zed: "conversation".into(), + version: SavedConversation::VERSION.into(), + text: self.buffer.read(cx).text(), + message_metadata: self.messages_metadata.clone(), + messages: self + .messages(cx) + .map(|message| SavedMessage { + id: message.id, + start: message.offset_range.start, + }) + .collect(), + summary: self + .summary + .as_ref() + .map(|summary| summary.text.clone()) + .unwrap_or_default(), + } + } + + async fn deserialize( + saved_conversation: SavedConversation, + model: LanguageModel, + path: PathBuf, + language_registry: Arc, + telemetry: Option>, + cx: &mut AsyncAppContext, + ) -> Result> { + let id = match saved_conversation.id { + Some(id) => Some(id), + None => Some(Uuid::new_v4().to_string()), + }; + + let markdown = language_registry.language_for_name("Markdown"); + let mut message_anchors = Vec::new(); + let mut next_message_id = MessageId(0); + let buffer = cx.new_model(|cx| { + let mut buffer = Buffer::local(saved_conversation.text, cx); + for message in saved_conversation.messages { + message_anchors.push(MessageAnchor { + id: message.id, + start: buffer.anchor_before(message.start), + }); + next_message_id = cmp::max(next_message_id, MessageId(message.id.0 + 1)); + } + buffer.set_language_registry(language_registry.clone()); + cx.spawn(|buffer, mut cx| async move { + let markdown = markdown.await?; + buffer.update(&mut cx, |buffer: &mut Buffer, cx| { + buffer.set_language(Some(markdown), cx) + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + buffer + })?; + + cx.new_model(move |cx| { + let mut this = Self { + id, + message_anchors, + messages_metadata: saved_conversation.message_metadata, + next_message_id, + ambient_context: AmbientContext::default(), + edit_suggestions: Vec::new(), + summary: Some(Summary { + text: saved_conversation.summary, + done: true, + }), + pending_summary: Task::ready(None), + completion_count: Default::default(), + pending_completions: Default::default(), + token_count: None, + pending_edit_suggestion_parse: None, + pending_token_count: Task::ready(None), + model, + _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)], + pending_save: Task::ready(Ok(())), + path: Some(path), + buffer, + telemetry, + language_registry, + }; + this.set_language(cx); + this.reparse_edit_suggestions(cx); + this.count_remaining_tokens(cx); + this + }) + } + + fn set_language(&mut self, cx: &mut ModelContext) { + let markdown = self.language_registry.language_for_name("Markdown"); + cx.spawn(|this, mut cx| async move { + let markdown = markdown.await?; + this.update(&mut cx, |this, cx| { + this.buffer + .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx)); + }) + }) + .detach_and_log_err(cx); + } + + fn toggle_recent_buffers(&mut self, cx: &mut ModelContext) { + self.ambient_context.recent_buffers.enabled = !self.ambient_context.recent_buffers.enabled; + match self.ambient_context.recent_buffers.update(cx) { + ContextUpdated::Updating => {} + ContextUpdated::Disabled => { + self.count_remaining_tokens(cx); + } + } + } + + fn toggle_current_project_context( + &mut self, + fs: Arc, + project: WeakModel, + cx: &mut ModelContext, + ) { + self.ambient_context.current_project.enabled = + !self.ambient_context.current_project.enabled; + match self.ambient_context.current_project.update(fs, project, cx) { + ContextUpdated::Updating => {} + ContextUpdated::Disabled => { + self.count_remaining_tokens(cx); + } + } + } + + fn set_recent_buffers( + &mut self, + buffers: impl IntoIterator>, + cx: &mut ModelContext, + ) { + self.ambient_context.recent_buffers.buffers.clear(); + self.ambient_context + .recent_buffers + .buffers + .extend(buffers.into_iter().map(|buffer| RecentBuffer { + buffer: buffer.downgrade(), + _subscription: cx.observe(&buffer, |this, _, cx| { + match this.ambient_context.recent_buffers.update(cx) { + ContextUpdated::Updating => {} + ContextUpdated::Disabled => { + this.count_remaining_tokens(cx); + } + } + }), + })); + match self.ambient_context.recent_buffers.update(cx) { + ContextUpdated::Updating => {} + ContextUpdated::Disabled => { + self.count_remaining_tokens(cx); + } + } + } + + fn handle_buffer_event( + &mut self, + _: Model, + event: &language::Event, + cx: &mut ModelContext, + ) { + if *event == language::Event::Edited { + self.count_remaining_tokens(cx); + self.reparse_edit_suggestions(cx); + cx.emit(ConversationEvent::MessagesEdited); + } + } + + pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext) { + let request = self.to_completion_request(cx); + self.pending_token_count = cx.spawn(|this, mut cx| { + async move { + cx.background_executor() + .timer(Duration::from_millis(200)) + .await; + + let token_count = cx + .update(|cx| CompletionProvider::global(cx).count_tokens(request, cx))? + .await?; + + this.update(&mut cx, |this, cx| { + this.token_count = Some(token_count); + cx.notify() + })?; + anyhow::Ok(()) + } + .log_err() + }); + } + + fn reparse_edit_suggestions(&mut self, cx: &mut ModelContext) { + self.pending_edit_suggestion_parse = Some(cx.spawn(|this, mut cx| async move { + cx.background_executor() + .timer(Duration::from_millis(200)) + .await; + + this.update(&mut cx, |this, cx| { + this.reparse_edit_suggestions_in_range(0..this.buffer.read(cx).len(), cx); + }) + .ok(); + })); + } + + fn reparse_edit_suggestions_in_range( + &mut self, + range: Range, + cx: &mut ModelContext, + ) { + self.buffer.update(cx, |buffer, _| { + let range_start = buffer.anchor_before(range.start); + let range_end = buffer.anchor_after(range.end); + let start_ix = self + .edit_suggestions + .binary_search_by(|probe| { + probe + .source_range + .end + .cmp(&range_start, buffer) + .then(Ordering::Greater) + }) + .unwrap_err(); + let end_ix = self + .edit_suggestions + .binary_search_by(|probe| { + probe + .source_range + .start + .cmp(&range_end, buffer) + .then(Ordering::Less) + }) + .unwrap_err(); + + let mut new_edit_suggestions = Vec::new(); + let mut message_lines = buffer.as_rope().chunks_in_range(range).lines(); + while let Some(suggestion) = parse_next_edit_suggestion(&mut message_lines) { + let start_anchor = buffer.anchor_after(suggestion.outer_range.start); + let end_anchor = buffer.anchor_before(suggestion.outer_range.end); + new_edit_suggestions.push(EditSuggestion { + source_range: start_anchor..end_anchor, + full_path: suggestion.path, + }); + } + self.edit_suggestions + .splice(start_ix..end_ix, new_edit_suggestions); + }); + cx.emit(ConversationEvent::EditSuggestionsChanged); + cx.notify(); + } + + fn remaining_tokens(&self) -> Option { + Some(self.model.max_token_count() as isize - self.token_count? as isize) + } + + fn set_model(&mut self, model: LanguageModel, cx: &mut ModelContext) { + self.model = model; + self.count_remaining_tokens(cx); + } + + fn assist( + &mut self, + selected_messages: HashSet, + cx: &mut ModelContext, + ) -> Vec { + let mut user_messages = Vec::new(); + + let last_message_id = if let Some(last_message_id) = + self.message_anchors.iter().rev().find_map(|message| { + message + .start + .is_valid(self.buffer.read(cx)) + .then_some(message.id) + }) { + last_message_id + } else { + return Default::default(); + }; + + let mut should_assist = false; + for selected_message_id in selected_messages { + let selected_message_role = + if let Some(metadata) = self.messages_metadata.get(&selected_message_id) { + metadata.role + } else { + continue; + }; + + if selected_message_role == Role::Assistant { + if let Some(user_message) = self.insert_message_after( + selected_message_id, + Role::User, + MessageStatus::Done, + cx, + ) { + user_messages.push(user_message); + } + } else { + should_assist = true; + } + } + + if should_assist { + if !CompletionProvider::global(cx).is_authenticated() { + log::info!("completion provider has no credentials"); + return Default::default(); + } + + let request = self.to_completion_request(cx); + let stream = CompletionProvider::global(cx).complete(request); + let assistant_message = self + .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx) + .unwrap(); + + // Queue up the user's next reply. + let user_message = self + .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx) + .unwrap(); + user_messages.push(user_message); + + let task = cx.spawn({ + |this, mut cx| async move { + let assistant_message_id = assistant_message.id; + let mut response_latency = None; + let stream_completion = async { + let request_start = Instant::now(); + let mut messages = stream.await?; + + while let Some(message) = messages.next().await { + if response_latency.is_none() { + response_latency = Some(request_start.elapsed()); + } + let text = message?; + + this.update(&mut cx, |this, cx| { + let message_ix = this + .message_anchors + .iter() + .position(|message| message.id == assistant_message_id)?; + let message_range = this.buffer.update(cx, |buffer, cx| { + let message_start_offset = + this.message_anchors[message_ix].start.to_offset(buffer); + let message_old_end_offset = this.message_anchors + [message_ix + 1..] + .iter() + .find(|message| message.start.is_valid(buffer)) + .map_or(buffer.len(), |message| { + message.start.to_offset(buffer).saturating_sub(1) + }); + let message_new_end_offset = + message_old_end_offset + text.len(); + buffer.edit( + [(message_old_end_offset..message_old_end_offset, text)], + None, + cx, + ); + message_start_offset..message_new_end_offset + }); + this.reparse_edit_suggestions_in_range(message_range, cx); + cx.emit(ConversationEvent::StreamedCompletion); + + Some(()) + })?; + smol::future::yield_now().await; + } + + this.update(&mut cx, |this, cx| { + this.pending_completions + .retain(|completion| completion.id != this.completion_count); + this.summarize(cx); + })?; + + anyhow::Ok(()) + }; + + let result = stream_completion.await; + + this.update(&mut cx, |this, cx| { + if let Some(metadata) = + this.messages_metadata.get_mut(&assistant_message.id) + { + let error_message = result + .err() + .map(|error| error.to_string().trim().to_string()); + if let Some(error_message) = error_message.as_ref() { + metadata.status = + MessageStatus::Error(SharedString::from(error_message.clone())); + } else { + metadata.status = MessageStatus::Done; + } + + if let Some(telemetry) = this.telemetry.as_ref() { + telemetry.report_assistant_event( + this.id.clone(), + AssistantKind::Panel, + this.model.telemetry_id(), + response_latency, + error_message, + ); + } + + cx.emit(ConversationEvent::MessagesEdited); + } + }) + .ok(); + } + }); + + self.pending_completions.push(PendingCompletion { + id: post_inc(&mut self.completion_count), + _task: task, + }); + } + + user_messages + } + + fn to_completion_request(&self, cx: &mut ModelContext) -> LanguageModelRequest { + let edits_system_prompt = LanguageModelRequestMessage { + role: Role::System, + content: include_str!("./system_prompts/edits.md").to_string(), + }; + + let recent_buffers_context = self.ambient_context.recent_buffers.to_message(); + let current_project_context = self.ambient_context.current_project.to_message(); + + let messages = Some(edits_system_prompt) + .into_iter() + .chain(recent_buffers_context) + .chain(current_project_context) + .chain( + self.messages(cx) + .filter(|message| matches!(message.status, MessageStatus::Done)) + .map(|message| message.to_request_message(self.buffer.read(cx))), + ); + + LanguageModelRequest { + model: self.model.clone(), + messages: messages.collect(), + stop: vec![], + temperature: 1.0, + } + } + + fn cancel_last_assist(&mut self) -> bool { + self.pending_completions.pop().is_some() + } + + fn cycle_message_roles(&mut self, ids: HashSet, cx: &mut ModelContext) { + for id in ids { + if let Some(metadata) = self.messages_metadata.get_mut(&id) { + metadata.role.cycle(); + cx.emit(ConversationEvent::MessagesEdited); + cx.notify(); + } + } + } + + fn insert_message_after( + &mut self, + message_id: MessageId, + role: Role, + status: MessageStatus, + cx: &mut ModelContext, + ) -> Option { + if let Some(prev_message_ix) = self + .message_anchors + .iter() + .position(|message| message.id == message_id) + { + // Find the next valid message after the one we were given. + let mut next_message_ix = prev_message_ix + 1; + while let Some(next_message) = self.message_anchors.get(next_message_ix) { + if next_message.start.is_valid(self.buffer.read(cx)) { + break; + } + next_message_ix += 1; + } + + let start = self.buffer.update(cx, |buffer, cx| { + let offset = self + .message_anchors + .get(next_message_ix) + .map_or(buffer.len(), |message| message.start.to_offset(buffer) - 1); + buffer.edit([(offset..offset, "\n")], None, cx); + buffer.anchor_before(offset + 1) + }); + let message = MessageAnchor { + id: MessageId(post_inc(&mut self.next_message_id.0)), + start, + }; + self.message_anchors + .insert(next_message_ix, message.clone()); + self.messages_metadata.insert( + message.id, + MessageMetadata { + role, + status, + ambient_context: self.ambient_context.snapshot(), + }, + ); + cx.emit(ConversationEvent::MessagesEdited); + Some(message) + } else { + None + } + } + + fn split_message( + &mut self, + range: Range, + cx: &mut ModelContext, + ) -> (Option, Option) { + let start_message = self.message_for_offset(range.start, cx); + let end_message = self.message_for_offset(range.end, cx); + if let Some((start_message, end_message)) = start_message.zip(end_message) { + // Prevent splitting when range spans multiple messages. + if start_message.id != end_message.id { + return (None, None); + } + + let message = start_message; + let role = message.role; + let mut edited_buffer = false; + + let mut suffix_start = None; + if range.start > message.offset_range.start && range.end < message.offset_range.end - 1 + { + if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') { + suffix_start = Some(range.end + 1); + } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') { + suffix_start = Some(range.end); + } + } + + let suffix = if let Some(suffix_start) = suffix_start { + MessageAnchor { + id: MessageId(post_inc(&mut self.next_message_id.0)), + start: self.buffer.read(cx).anchor_before(suffix_start), + } + } else { + self.buffer.update(cx, |buffer, cx| { + buffer.edit([(range.end..range.end, "\n")], None, cx); + }); + edited_buffer = true; + MessageAnchor { + id: MessageId(post_inc(&mut self.next_message_id.0)), + start: self.buffer.read(cx).anchor_before(range.end + 1), + } + }; + + self.message_anchors + .insert(message.index_range.end + 1, suffix.clone()); + self.messages_metadata.insert( + suffix.id, + MessageMetadata { + role, + status: MessageStatus::Done, + ambient_context: message.ambient_context.clone(), + }, + ); + + let new_messages = + if range.start == range.end || range.start == message.offset_range.start { + (None, Some(suffix)) + } else { + let mut prefix_end = None; + if range.start > message.offset_range.start + && range.end < message.offset_range.end - 1 + { + if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') { + prefix_end = Some(range.start + 1); + } else if self.buffer.read(cx).reversed_chars_at(range.start).next() + == Some('\n') + { + prefix_end = Some(range.start); + } + } + + let selection = if let Some(prefix_end) = prefix_end { + cx.emit(ConversationEvent::MessagesEdited); + MessageAnchor { + id: MessageId(post_inc(&mut self.next_message_id.0)), + start: self.buffer.read(cx).anchor_before(prefix_end), + } + } else { + self.buffer.update(cx, |buffer, cx| { + buffer.edit([(range.start..range.start, "\n")], None, cx) + }); + edited_buffer = true; + MessageAnchor { + id: MessageId(post_inc(&mut self.next_message_id.0)), + start: self.buffer.read(cx).anchor_before(range.end + 1), + } + }; + + self.message_anchors + .insert(message.index_range.end + 1, selection.clone()); + self.messages_metadata.insert( + selection.id, + MessageMetadata { + role, + status: MessageStatus::Done, + ambient_context: message.ambient_context, + }, + ); + (Some(selection), Some(suffix)) + }; + + if !edited_buffer { + cx.emit(ConversationEvent::MessagesEdited); + } + new_messages + } else { + (None, None) + } + } + + fn summarize(&mut self, cx: &mut ModelContext) { + if self.message_anchors.len() >= 2 && self.summary.is_none() { + if !CompletionProvider::global(cx).is_authenticated() { + return; + } + + let messages = self + .messages(cx) + .take(2) + .map(|message| message.to_request_message(self.buffer.read(cx))) + .chain(Some(LanguageModelRequestMessage { + role: Role::User, + content: "Summarize the conversation into a short title without punctuation" + .into(), + })); + let request = LanguageModelRequest { + model: self.model.clone(), + messages: messages.collect(), + stop: vec![], + temperature: 1.0, + }; + + let stream = CompletionProvider::global(cx).complete(request); + self.pending_summary = cx.spawn(|this, mut cx| { + async move { + let mut messages = stream.await?; + + while let Some(message) = messages.next().await { + let text = message?; + this.update(&mut cx, |this, cx| { + this.summary + .get_or_insert(Default::default()) + .text + .push_str(&text); + cx.emit(ConversationEvent::SummaryChanged); + })?; + } + + this.update(&mut cx, |this, cx| { + if let Some(summary) = this.summary.as_mut() { + summary.done = true; + cx.emit(ConversationEvent::SummaryChanged); + } + })?; + + anyhow::Ok(()) + } + .log_err() + }); + } + } + + fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option { + self.messages_for_offsets([offset], cx).pop() + } + + fn messages_for_offsets( + &self, + offsets: impl IntoIterator, + cx: &AppContext, + ) -> Vec { + let mut result = Vec::new(); + + let mut messages = self.messages(cx).peekable(); + let mut offsets = offsets.into_iter().peekable(); + let mut current_message = messages.next(); + while let Some(offset) = offsets.next() { + // Locate the message that contains the offset. + while current_message.as_ref().map_or(false, |message| { + !message.offset_range.contains(&offset) && messages.peek().is_some() + }) { + current_message = messages.next(); + } + let Some(message) = current_message.as_ref() else { + break; + }; + + // Skip offsets that are in the same message. + while offsets.peek().map_or(false, |offset| { + message.offset_range.contains(offset) || messages.peek().is_none() + }) { + offsets.next(); + } + + result.push(message.clone()); + } + result + } + + fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { + let buffer = self.buffer.read(cx); + let mut message_anchors = self.message_anchors.iter().enumerate().peekable(); + iter::from_fn(move || { + if let Some((start_ix, message_anchor)) = message_anchors.next() { + let metadata = self.messages_metadata.get(&message_anchor.id)?; + let message_start = message_anchor.start.to_offset(buffer); + let mut message_end = None; + let mut end_ix = start_ix; + while let Some((_, next_message)) = message_anchors.peek() { + if next_message.start.is_valid(buffer) { + message_end = Some(next_message.start); + break; + } else { + end_ix += 1; + message_anchors.next(); + } + } + let message_end = message_end + .unwrap_or(language::Anchor::MAX) + .to_offset(buffer); + return Some(Message { + index_range: start_ix..end_ix, + offset_range: message_start..message_end, + id: message_anchor.id, + anchor: message_anchor.start, + role: metadata.role, + status: metadata.status.clone(), + ambient_context: metadata.ambient_context.clone(), + }); + } + None + }) + } + + fn save( + &mut self, + debounce: Option, + fs: Arc, + cx: &mut ModelContext, + ) { + self.pending_save = cx.spawn(|this, mut cx| async move { + if let Some(debounce) = debounce { + cx.background_executor().timer(debounce).await; + } + + let (old_path, summary) = this.read_with(&cx, |this, _| { + let path = this.path.clone(); + let summary = if let Some(summary) = this.summary.as_ref() { + if summary.done { + Some(summary.text.clone()) + } else { + None + } + } else { + None + }; + (path, summary) + })?; + + if let Some(summary) = summary { + let conversation = this.read_with(&cx, |this, cx| this.serialize(cx))?; + let path = if let Some(old_path) = old_path { + old_path + } else { + let mut discriminant = 1; + let mut new_path; + loop { + new_path = CONVERSATIONS_DIR.join(&format!( + "{} - {}.zed.json", + summary.trim(), + discriminant + )); + if fs.is_file(&new_path).await { + discriminant += 1; + } else { + break; + } + } + new_path + }; + + fs.create_dir(CONVERSATIONS_DIR.as_ref()).await?; + fs.atomic_write(path.clone(), serde_json::to_string(&conversation).unwrap()) + .await?; + this.update(&mut cx, |this, _| this.path = Some(path))?; + } + + Ok(()) + }); + } +} + +#[derive(Debug)] +enum EditParsingState { + None, + InOldText { + path: PathBuf, + start_offset: usize, + old_text_start_offset: usize, + }, + InNewText { + path: PathBuf, + start_offset: usize, + old_text_range: Range, + new_text_start_offset: usize, + }, +} + +#[derive(Clone, Debug, PartialEq)] +struct EditSuggestion { + source_range: Range, + full_path: PathBuf, +} + +struct ParsedEditSuggestion { + path: PathBuf, + outer_range: Range, + old_text_range: Range, + new_text_range: Range, +} + +fn parse_next_edit_suggestion(lines: &mut rope::Lines) -> Option { + let mut state = EditParsingState::None; + loop { + let offset = lines.offset(); + let message_line = lines.next()?; + match state { + EditParsingState::None => { + if let Some(rest) = message_line.strip_prefix("```edit ") { + let path = rest.trim(); + if !path.is_empty() { + state = EditParsingState::InOldText { + path: PathBuf::from(path), + start_offset: offset, + old_text_start_offset: lines.offset(), + }; + } + } + } + EditParsingState::InOldText { + path, + start_offset, + old_text_start_offset, + } => { + if message_line == "---" { + state = EditParsingState::InNewText { + path, + start_offset, + old_text_range: old_text_start_offset..offset, + new_text_start_offset: lines.offset(), + }; + } else { + state = EditParsingState::InOldText { + path, + start_offset, + old_text_start_offset, + }; + } + } + EditParsingState::InNewText { + path, + start_offset, + old_text_range, + new_text_start_offset, + } => { + if message_line == "```" { + return Some(ParsedEditSuggestion { + path, + outer_range: start_offset..offset + "```".len(), + old_text_range, + new_text_range: new_text_start_offset..offset, + }); + } else { + state = EditParsingState::InNewText { + path, + start_offset, + old_text_range, + new_text_start_offset, + }; + } + } + } + } +} + +struct PendingCompletion { + id: usize, + _task: Task<()>, +} + +enum ConversationEditorEvent { + TabContentChanged, +} + +#[derive(Copy, Clone, Debug, PartialEq)] +struct ScrollPosition { + offset_before_cursor: gpui::Point, + cursor: Anchor, +} + +struct ConversationEditor { + conversation: Model, + fs: Arc, + workspace: WeakView, + editor: View, + blocks: HashSet, + scroll_position: Option, + _subscriptions: Vec, +} + +impl ConversationEditor { + fn new( + model: LanguageModel, + language_registry: Arc, + fs: Arc, + workspace: View, + cx: &mut ViewContext, + ) -> Self { + let telemetry = workspace.read(cx).client().telemetry().clone(); + let conversation = + cx.new_model(|cx| Conversation::new(model, language_registry, Some(telemetry), cx)); + Self::for_conversation(conversation, fs, workspace, cx) + } + + fn for_conversation( + conversation: Model, + fs: Arc, + workspace: View, + cx: &mut ViewContext, + ) -> Self { + let editor = cx.new_view(|cx| { + let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx); + editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); + editor.set_show_gutter(false, cx); + editor.set_show_wrap_guides(false, cx); + editor + }); + + let _subscriptions = vec![ + cx.observe(&conversation, |_, _, cx| cx.notify()), + cx.subscribe(&conversation, Self::handle_conversation_event), + cx.subscribe(&editor, Self::handle_editor_event), + cx.subscribe(&workspace, Self::handle_workspace_event), + ]; + + let mut this = Self { + conversation, + editor, + blocks: Default::default(), + scroll_position: None, + fs, + workspace: workspace.downgrade(), + _subscriptions, + }; + this.update_recent_editors(cx); + this.update_message_headers(cx); + this + } + + fn assist(&mut self, _: &Assist, cx: &mut ViewContext) { + let cursors = self.cursors(cx); + + let user_messages = self.conversation.update(cx, |conversation, cx| { + let selected_messages = conversation + .messages_for_offsets(cursors, cx) + .into_iter() + .map(|message| message.id) + .collect(); + conversation.assist(selected_messages, cx) + }); + let new_selections = user_messages + .iter() + .map(|message| { + let cursor = message + .start + .to_offset(self.conversation.read(cx).buffer.read(cx)); + cursor..cursor + }) + .collect::>(); + if !new_selections.is_empty() { + self.editor.update(cx, |editor, cx| { + editor.change_selections( + Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)), + cx, + |selections| selections.select_ranges(new_selections), + ); + }); + // Avoid scrolling to the new cursor position so the assistant's output is stable. + cx.defer(|this, _| this.scroll_position = None); + } + } + + fn cancel_last_assist(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { + if !self + .conversation + .update(cx, |conversation, _| conversation.cancel_last_assist()) + { + cx.propagate(); + } + } + + fn cycle_message_role(&mut self, _: &CycleMessageRole, cx: &mut ViewContext) { + let cursors = self.cursors(cx); + self.conversation.update(cx, |conversation, cx| { + let messages = conversation + .messages_for_offsets(cursors, cx) + .into_iter() + .map(|message| message.id) + .collect(); + conversation.cycle_message_roles(messages, cx) + }); + } + + fn cursors(&self, cx: &AppContext) -> Vec { + let selections = self.editor.read(cx).selections.all::(cx); + selections + .into_iter() + .map(|selection| selection.head()) + .collect() + } + + fn handle_conversation_event( + &mut self, + _: Model, + event: &ConversationEvent, + cx: &mut ViewContext, + ) { + match event { + ConversationEvent::MessagesEdited => { + self.update_message_headers(cx); + self.conversation.update(cx, |conversation, cx| { + conversation.save(Some(Duration::from_millis(500)), self.fs.clone(), cx); + }); + } + ConversationEvent::EditSuggestionsChanged => { + self.editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let excerpt_id = *buffer.as_singleton().unwrap().0; + let conversation = self.conversation.read(cx); + let highlighted_rows = conversation + .edit_suggestions + .iter() + .map(|suggestion| { + let start = buffer + .anchor_in_excerpt(excerpt_id, suggestion.source_range.start) + .unwrap(); + let end = buffer + .anchor_in_excerpt(excerpt_id, suggestion.source_range.end) + .unwrap(); + start..=end + }) + .collect::>(); + + editor.clear_row_highlights::(); + for range in highlighted_rows { + editor.highlight_rows::( + range, + Some( + cx.theme() + .colors() + .editor_document_highlight_read_background, + ), + false, + cx, + ); + } + }); + } + ConversationEvent::SummaryChanged => { + cx.emit(ConversationEditorEvent::TabContentChanged); + self.conversation.update(cx, |conversation, cx| { + conversation.save(None, self.fs.clone(), cx); + }); + } + ConversationEvent::StreamedCompletion => { + self.editor.update(cx, |editor, cx| { + if let Some(scroll_position) = self.scroll_position { + let snapshot = editor.snapshot(cx); + let cursor_point = scroll_position.cursor.to_display_point(&snapshot); + let scroll_top = + cursor_point.row().as_f32() - scroll_position.offset_before_cursor.y; + editor.set_scroll_position( + point(scroll_position.offset_before_cursor.x, scroll_top), + cx, + ); + } + }); + } + } + } + + fn handle_editor_event( + &mut self, + _: View, + event: &EditorEvent, + cx: &mut ViewContext, + ) { + match event { + EditorEvent::ScrollPositionChanged { autoscroll, .. } => { + let cursor_scroll_position = self.cursor_scroll_position(cx); + if *autoscroll { + self.scroll_position = cursor_scroll_position; + } else if self.scroll_position != cursor_scroll_position { + self.scroll_position = None; + } + } + EditorEvent::SelectionsChanged { .. } => { + self.scroll_position = self.cursor_scroll_position(cx); + } + _ => {} + } + } + + fn handle_workspace_event( + &mut self, + _: View, + event: &WorkspaceEvent, + cx: &mut ViewContext, + ) { + match event { + WorkspaceEvent::ActiveItemChanged + | WorkspaceEvent::ItemAdded + | WorkspaceEvent::ItemRemoved + | WorkspaceEvent::PaneAdded(_) + | WorkspaceEvent::PaneRemoved => self.update_recent_editors(cx), + _ => {} + } + } + + fn update_recent_editors(&mut self, cx: &mut ViewContext) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + + let mut timestamps_by_entity_id = HashMap::default(); + for pane in workspace.read(cx).panes() { + let pane = pane.read(cx); + for entry in pane.activation_history() { + timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp); + } + } + + let mut timestamps_by_buffer = HashMap::default(); + for editor in workspace.read(cx).items_of_type::(cx) { + let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() else { + continue; + }; + + let new_timestamp = timestamps_by_entity_id + .get(&editor.entity_id()) + .copied() + .unwrap_or_default(); + let timestamp = timestamps_by_buffer.entry(buffer).or_insert(new_timestamp); + *timestamp = cmp::max(*timestamp, new_timestamp); + } + + let mut recent_buffers = timestamps_by_buffer.into_iter().collect::>(); + recent_buffers.sort_unstable_by_key(|(_, timestamp)| *timestamp); + if recent_buffers.len() > MAX_RECENT_BUFFERS { + let excess = recent_buffers.len() - MAX_RECENT_BUFFERS; + recent_buffers.drain(..excess); + } + + self.conversation.update(cx, |conversation, cx| { + conversation + .set_recent_buffers(recent_buffers.into_iter().map(|(buffer, _)| buffer), cx); + }); + } + + fn cursor_scroll_position(&self, cx: &mut ViewContext) -> Option { + self.editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let cursor = editor.selections.newest_anchor().head(); + let cursor_row = cursor + .to_display_point(&snapshot.display_snapshot) + .row() + .as_f32(); + let scroll_position = editor + .scroll_manager + .anchor() + .scroll_position(&snapshot.display_snapshot); + + let scroll_bottom = scroll_position.y + editor.visible_line_count().unwrap_or(0.); + if (scroll_position.y..scroll_bottom).contains(&cursor_row) { + Some(ScrollPosition { + cursor, + offset_before_cursor: point(scroll_position.x, cursor_row - scroll_position.y), + }) + } else { + None + } + }) + } + + fn update_message_headers(&mut self, cx: &mut ViewContext) { + let project = self + .workspace + .update(cx, |workspace, _cx| workspace.project().downgrade()) + .unwrap(); + + self.editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let excerpt_id = *buffer.as_singleton().unwrap().0; + let old_blocks = std::mem::take(&mut self.blocks); + let new_blocks = self + .conversation + .read(cx) + .messages(cx) + .enumerate() + .map(|(ix, message)| BlockProperties { + position: buffer + .anchor_in_excerpt(excerpt_id, message.anchor) + .unwrap(), + height: 2, + style: BlockStyle::Sticky, + render: Box::new({ + let fs = self.fs.clone(); + let project = project.clone(); + let conversation = self.conversation.clone(); + move |cx| { + let message_id = message.id; + let sender = ButtonLike::new("role") + .style(ButtonStyle::Filled) + .child(match message.role { + Role::User => Label::new("You").color(Color::Default), + Role::Assistant => Label::new("Assistant").color(Color::Info), + Role::System => Label::new("System").color(Color::Warning), + }) + .tooltip(|cx| { + Tooltip::with_meta( + "Toggle message role", + None, + "Available roles: You (User), Assistant, System", + cx, + ) + }) + .on_click({ + let conversation = conversation.clone(); + move |_, cx| { + conversation.update(cx, |conversation, cx| { + conversation.cycle_message_roles( + HashSet::from_iter(Some(message_id)), + cx, + ) + }) + } + }); + + h_flex() + .id(("message_header", message_id.0)) + .h_11() + .w_full() + .relative() + .gap_1() + .child(sender) + .children( + if let MessageStatus::Error(error) = message.status.clone() { + Some( + div() + .id("error") + .tooltip(move |cx| Tooltip::text(error.clone(), cx)) + .child(Icon::new(IconName::XCircle)), + ) + } else { + None + }, + ) + .children((ix == 0).then(|| { + div() + .h_flex() + .flex_1() + .justify_end() + .pr_4() + .gap_1() + .child( + IconButton::new("include_file", IconName::File) + .icon_size(IconSize::Small) + .selected( + conversation + .read(cx) + .ambient_context + .recent_buffers + .enabled, + ) + .on_click({ + let conversation = conversation.downgrade(); + move |_, cx| { + conversation + .update(cx, |conversation, cx| { + conversation + .toggle_recent_buffers(cx); + }) + .ok(); + } + }) + .tooltip(|cx| { + Tooltip::text("Include Open Files", cx) + }), + ) + .child( + IconButton::new( + "include_current_project", + IconName::FileTree, + ) + .icon_size(IconSize::Small) + .selected( + conversation + .read(cx) + .ambient_context + .current_project + .enabled, + ) + .on_click({ + let fs = fs.clone(); + let project = project.clone(); + let conversation = conversation.downgrade(); + move |_, cx| { + let fs = fs.clone(); + let project = project.clone(); + conversation + .update(cx, |conversation, cx| { + conversation + .toggle_current_project_context( + fs, project, cx, + ); + }) + .ok(); + } + }) + .tooltip( + |cx| Tooltip::text("Include Current Project", cx), + ), + ) + .into_any() + })) + .into_any_element() + } + }), + disposition: BlockDisposition::Above, + }) + .collect::>(); + + editor.remove_blocks(old_blocks, None, cx); + let ids = editor.insert_blocks(new_blocks, None, cx); + self.blocks = HashSet::from_iter(ids); + }); + } + + fn quote_selection( + workspace: &mut Workspace, + _: &QuoteSelection, + cx: &mut ViewContext, + ) { + let Some(panel) = workspace.panel::(cx) else { + return; + }; + let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + else { + return; + }; + + let editor = editor.read(cx); + let range = editor.selections.newest::(cx).range(); + let buffer = editor.buffer().read(cx).snapshot(cx); + let start_language = buffer.language_at(range.start); + let end_language = buffer.language_at(range.end); + let language_name = if start_language == end_language { + start_language.map(|language| language.code_fence_block_name()) + } else { + None + }; + let language_name = language_name.as_deref().unwrap_or(""); + + let selected_text = buffer.text_for_range(range).collect::(); + let text = if selected_text.is_empty() { + None + } else { + Some(if language_name == "markdown" { + selected_text + .lines() + .map(|line| format!("> {}", line)) + .collect::>() + .join("\n") + } else { + format!("```{language_name}\n{selected_text}\n```") + }) + }; + + // Activate the panel + if !panel.focus_handle(cx).contains_focused(cx) { + workspace.toggle_panel_focus::(cx); + } + + if let Some(text) = text { + panel.update(cx, |panel, cx| { + if let Some(conversation) = panel + .active_conversation_editor() + .cloned() + .or_else(|| panel.new_conversation(cx)) + { + conversation.update(cx, |conversation, cx| { + conversation + .editor + .update(cx, |editor, cx| editor.insert(&text, cx)) + }); + }; + }); + } + } + + fn insert_active_prompt( + workspace: &mut Workspace, + _: &InsertActivePrompt, + cx: &mut ViewContext, + ) { + let Some(panel) = workspace.panel::(cx) else { + return; + }; + + if !panel.focus_handle(cx).contains_focused(cx) { + workspace.toggle_panel_focus::(cx); + } + + if let Some(default_prompt) = panel.read(cx).prompt_library.clone().default_prompt() { + panel.update(cx, |panel, cx| { + if let Some(conversation) = panel + .active_conversation_editor() + .cloned() + .or_else(|| panel.new_conversation(cx)) + { + conversation.update(cx, |conversation, cx| { + conversation + .editor + .update(cx, |editor, cx| editor.insert(&default_prompt, cx)) + }); + }; + }); + }; + } + + fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext) { + let editor = self.editor.read(cx); + let conversation = self.conversation.read(cx); + if editor.selections.count() == 1 { + let selection = editor.selections.newest::(cx); + let mut copied_text = String::new(); + let mut spanned_messages = 0; + for message in conversation.messages(cx) { + if message.offset_range.start >= selection.range().end { + break; + } else if message.offset_range.end >= selection.range().start { + let range = cmp::max(message.offset_range.start, selection.range().start) + ..cmp::min(message.offset_range.end, selection.range().end); + if !range.is_empty() { + spanned_messages += 1; + write!(&mut copied_text, "## {}\n\n", message.role).unwrap(); + for chunk in conversation.buffer.read(cx).text_for_range(range) { + copied_text.push_str(chunk); + } + copied_text.push('\n'); + } + } + } + + if spanned_messages > 1 { + cx.write_to_clipboard(ClipboardItem::new(copied_text)); + return; + } + } + + cx.propagate(); + } + + fn split(&mut self, _: &Split, cx: &mut ViewContext) { + self.conversation.update(cx, |conversation, cx| { + let selections = self.editor.read(cx).selections.disjoint_anchors(); + for selection in selections.as_ref() { + let buffer = self.editor.read(cx).buffer().read(cx).snapshot(cx); + let range = selection + .map(|endpoint| endpoint.to_offset(&buffer)) + .range(); + conversation.split_message(range, cx); + } + }); + } + + fn apply_edit(&mut self, _: &ApplyEdit, cx: &mut ViewContext) { + struct Edit { + old_text: String, + new_text: String, + } + + let conversation = self.conversation.read(cx); + let conversation_buffer = conversation.buffer.read(cx); + let conversation_buffer_snapshot = conversation_buffer.snapshot(); + + let selections = self.editor.read(cx).selections.disjoint_anchors(); + let mut selections = selections.iter().peekable(); + let selected_suggestions = conversation.edit_suggestions.iter().filter(|suggestion| { + while let Some(selection) = selections.peek() { + if selection + .end + .text_anchor + .cmp(&suggestion.source_range.start, conversation_buffer) + .is_lt() + { + selections.next(); + continue; + } + if selection + .start + .text_anchor + .cmp(&suggestion.source_range.end, conversation_buffer) + .is_gt() + { + break; + } + return true; + } + false + }); + + let mut suggestions_by_buffer = + HashMap::, (BufferSnapshot, Vec)>::default(); + for suggestion in selected_suggestions { + let offset = suggestion.source_range.start.to_offset(conversation_buffer); + if let Some(message) = conversation.message_for_offset(offset, cx) { + if let Some(buffer) = message + .ambient_context + .recent_buffers + .source_buffers + .iter() + .find(|source_buffer| { + source_buffer.full_path.as_ref() == Some(&suggestion.full_path) + }) + { + if let Some(buffer) = buffer.model.upgrade() { + let (_, edits) = suggestions_by_buffer + .entry(buffer.clone()) + .or_insert_with(|| (buffer.read(cx).snapshot(), Vec::new())); + + let mut lines = conversation_buffer_snapshot + .as_rope() + .chunks_in_range( + suggestion + .source_range + .to_offset(&conversation_buffer_snapshot), + ) + .lines(); + if let Some(suggestion) = parse_next_edit_suggestion(&mut lines) { + let old_text = conversation_buffer_snapshot + .text_for_range(suggestion.old_text_range) + .collect(); + let new_text = conversation_buffer_snapshot + .text_for_range(suggestion.new_text_range) + .collect(); + edits.push(Edit { old_text, new_text }); + } + } + } + } + } + + cx.spawn(|this, mut cx| async move { + let edits_by_buffer = cx + .background_executor() + .spawn(async move { + let mut result = HashMap::default(); + for (buffer, (snapshot, suggestions)) in suggestions_by_buffer { + let edits = + result + .entry(buffer) + .or_insert(Vec::<(Range, _)>::new()); + for suggestion in suggestions { + let ranges = + fuzzy_search_lines(snapshot.as_rope(), &suggestion.old_text); + if let Some(range) = ranges.first() { + let edit_start = snapshot.anchor_after(range.start); + let edit_end = snapshot.anchor_before(range.end); + if let Err(ix) = edits.binary_search_by(|(range, _)| { + range.start.cmp(&edit_start, &snapshot) + }) { + edits.insert( + ix, + (edit_start..edit_end, suggestion.new_text.clone()), + ); + } + } else { + log::info!( + "assistant edit did not match any text in buffer {:?}", + &suggestion.old_text + ); + } + } + } + result + }) + .await; + + let mut project_transaction = ProjectTransaction::default(); + let (editor, workspace, title) = this.update(&mut cx, |this, cx| { + for (buffer_handle, edits) in edits_by_buffer { + buffer_handle.update(cx, |buffer, cx| { + buffer.start_transaction(); + buffer.edit( + edits, + Some(AutoindentMode::Block { + original_indent_columns: Vec::new(), + }), + cx, + ); + buffer.end_transaction(cx); + if let Some(transaction) = buffer.finalize_last_transaction() { + project_transaction + .0 + .insert(buffer_handle.clone(), transaction.clone()); + } + }); + } + + ( + this.editor.downgrade(), + this.workspace.clone(), + this.title(cx), + ) + })?; + + Editor::open_project_transaction( + &editor, + workspace, + project_transaction, + format!("Edits from {}", title), + cx, + ) + .await + }) + .detach_and_log_err(cx); + } + + fn save(&mut self, _: &Save, cx: &mut ViewContext) { + self.conversation.update(cx, |conversation, cx| { + conversation.save(None, self.fs.clone(), cx) + }); + } + + fn title(&self, cx: &AppContext) -> String { + self.conversation + .read(cx) + .summary + .as_ref() + .map(|summary| summary.text.clone()) + .unwrap_or_else(|| "New Conversation".into()) + } +} + +impl EventEmitter for ConversationEditor {} + +impl Render for ConversationEditor { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .key_context("ConversationEditor") + .capture_action(cx.listener(ConversationEditor::cancel_last_assist)) + .capture_action(cx.listener(ConversationEditor::save)) + .capture_action(cx.listener(ConversationEditor::copy)) + .capture_action(cx.listener(ConversationEditor::cycle_message_role)) + .on_action(cx.listener(ConversationEditor::assist)) + .on_action(cx.listener(ConversationEditor::split)) + .on_action(cx.listener(ConversationEditor::apply_edit)) + .size_full() + .v_flex() + .child( + div() + .flex_grow() + .pl_4() + .bg(cx.theme().colors().editor_background) + .child(self.editor.clone()), + ) + } +} + +impl FocusableView for ConversationEditor { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.editor.focus_handle(cx) + } +} + +#[derive(Clone, Debug)] +struct MessageAnchor { + id: MessageId, + start: language::Anchor, +} + +#[derive(Clone, Debug)] +pub struct Message { + offset_range: Range, + index_range: Range, + id: MessageId, + anchor: language::Anchor, + role: Role, + status: MessageStatus, + ambient_context: AmbientContextSnapshot, +} + +impl Message { + fn to_request_message(&self, buffer: &Buffer) -> LanguageModelRequestMessage { + let content = buffer + .text_for_range(self.offset_range.clone()) + .collect::(); + LanguageModelRequestMessage { + role: self.role, + content: content.trim_end().into(), + } + } +} + +enum InlineAssistantEvent { + Confirmed { + prompt: String, + include_conversation: bool, + }, + Canceled, + Dismissed, + IncludeConversationToggled { + include_conversation: bool, + }, +} + +struct InlineAssistant { + id: usize, + prompt_editor: View, + confirmed: bool, + show_include_conversation: bool, + include_conversation: bool, + measurements: Arc>, + prompt_history: VecDeque, + prompt_history_ix: Option, + pending_prompt: String, + codegen: Model, + _subscriptions: Vec, +} + +impl EventEmitter for InlineAssistant {} + +impl Render for InlineAssistant { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let measurements = *self.measurements.lock(); + h_flex() + .w_full() + .py_2() + .border_y_1() + .border_color(cx.theme().colors().border) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(Self::toggle_include_conversation)) + .on_action(cx.listener(Self::move_up)) + .on_action(cx.listener(Self::move_down)) + .child( + h_flex() + .justify_center() + .w(measurements.gutter_width) + .children(self.show_include_conversation.then(|| { + IconButton::new("include_conversation", IconName::Ai) + .on_click(cx.listener(|this, _, cx| { + this.toggle_include_conversation(&ToggleIncludeConversation, cx) + })) + .selected(self.include_conversation) + .tooltip(|cx| { + Tooltip::for_action( + "Include Conversation", + &ToggleIncludeConversation, + cx, + ) + }) + })) + .children(if let Some(error) = self.codegen.read(cx).error() { + let error_message = SharedString::from(error.to_string()); + Some( + div() + .id("error") + .tooltip(move |cx| Tooltip::text(error_message.clone(), cx)) + .child(Icon::new(IconName::XCircle).color(Color::Error)), + ) + } else { + None + }), + ) + .child( + h_flex() + .w_full() + .ml(measurements.anchor_x - measurements.gutter_width) + .child(self.render_prompt_editor(cx)), + ) + } +} + +impl FocusableView for InlineAssistant { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.prompt_editor.focus_handle(cx) + } +} + +impl InlineAssistant { + #[allow(clippy::too_many_arguments)] + fn new( + id: usize, + measurements: Arc>, + show_include_conversation: bool, + include_conversation: bool, + prompt_history: VecDeque, + codegen: Model, + cx: &mut ViewContext, + ) -> Self { + let prompt_editor = cx.new_view(|cx| { + let mut editor = Editor::single_line(cx); + let placeholder = match codegen.read(cx).kind() { + CodegenKind::Transform { .. } => "Enter transformation prompt…", + CodegenKind::Generate { .. } => "Enter generation prompt…", + }; + editor.set_placeholder_text(placeholder, cx); + editor + }); + cx.focus_view(&prompt_editor); + + let subscriptions = vec![ + cx.observe(&codegen, Self::handle_codegen_changed), + cx.subscribe(&prompt_editor, Self::handle_prompt_editor_events), + ]; + + Self { + id, + prompt_editor, + confirmed: false, + show_include_conversation, + include_conversation, + measurements, + prompt_history, + prompt_history_ix: None, + pending_prompt: String::new(), + codegen, + _subscriptions: subscriptions, + } + } + + fn handle_prompt_editor_events( + &mut self, + _: View, + event: &EditorEvent, + cx: &mut ViewContext, + ) { + if let EditorEvent::Edited = event { + self.pending_prompt = self.prompt_editor.read(cx).text(cx); + cx.notify(); + } + } + + fn handle_codegen_changed(&mut self, _: Model, cx: &mut ViewContext) { + let is_read_only = !self.codegen.read(cx).idle(); + self.prompt_editor.update(cx, |editor, cx| { + let was_read_only = editor.read_only(cx); + if was_read_only != is_read_only { + if is_read_only { + editor.set_read_only(true); + } else { + self.confirmed = false; + editor.set_read_only(false); + } + } + }); + cx.notify(); + } + + fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { + cx.emit(InlineAssistantEvent::Canceled); + } + + fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { + if self.confirmed { + cx.emit(InlineAssistantEvent::Dismissed); + } else { + let prompt = self.prompt_editor.read(cx).text(cx); + self.prompt_editor + .update(cx, |editor, _cx| editor.set_read_only(true)); + cx.emit(InlineAssistantEvent::Confirmed { + prompt, + include_conversation: self.include_conversation, + }); + self.confirmed = true; + cx.notify(); + } + } + + fn toggle_include_conversation( + &mut self, + _: &ToggleIncludeConversation, + cx: &mut ViewContext, + ) { + self.include_conversation = !self.include_conversation; + cx.emit(InlineAssistantEvent::IncludeConversationToggled { + include_conversation: self.include_conversation, + }); + cx.notify(); + } + + fn move_up(&mut self, _: &MoveUp, cx: &mut ViewContext) { + if let Some(ix) = self.prompt_history_ix { + if ix > 0 { + self.prompt_history_ix = Some(ix - 1); + let prompt = self.prompt_history[ix - 1].clone(); + self.set_prompt(&prompt, cx); + } + } else if !self.prompt_history.is_empty() { + self.prompt_history_ix = Some(self.prompt_history.len() - 1); + let prompt = self.prompt_history[self.prompt_history.len() - 1].clone(); + self.set_prompt(&prompt, cx); + } + } + + fn move_down(&mut self, _: &MoveDown, cx: &mut ViewContext) { + if let Some(ix) = self.prompt_history_ix { + if ix < self.prompt_history.len() - 1 { + self.prompt_history_ix = Some(ix + 1); + let prompt = self.prompt_history[ix + 1].clone(); + self.set_prompt(&prompt, cx); + } else { + self.prompt_history_ix = None; + let pending_prompt = self.pending_prompt.clone(); + self.set_prompt(&pending_prompt, cx); + } + } + } + + fn set_prompt(&mut self, prompt: &str, cx: &mut ViewContext) { + self.prompt_editor.update(cx, |editor, cx| { + editor.buffer().update(cx, |buffer, cx| { + let len = buffer.len(cx); + buffer.edit([(0..len, prompt)], None, cx); + }); + }); + } + + fn render_prompt_editor(&self, cx: &mut ViewContext) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: if self.prompt_editor.read(cx).read_only(cx) { + cx.theme().colors().text_disabled + } else { + cx.theme().colors().text + }, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: rems(0.875).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(1.3), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }; + EditorElement::new( + &self.prompt_editor, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } +} + +// This wouldn't need to exist if we could pass parameters when rendering child views. +#[derive(Copy, Clone, Default)] +struct BlockMeasurements { + anchor_x: Pixels, + gutter_width: Pixels, +} + +struct PendingInlineAssist { + editor: WeakView, + inline_assistant: Option<(BlockId, View)>, + codegen: Model, + _subscriptions: Vec, + project: WeakModel, +} + +fn merge_ranges(ranges: &mut Vec>, buffer: &MultiBufferSnapshot) { + ranges.sort_unstable_by(|a, b| { + a.start + .cmp(&b.start, buffer) + .then_with(|| b.end.cmp(&a.end, buffer)) + }); + + let mut ix = 0; + while ix + 1 < ranges.len() { + let b = ranges[ix + 1].clone(); + let a = &mut ranges[ix]; + if a.end.cmp(&b.start, buffer).is_gt() { + if a.end.cmp(&b.end, buffer).is_lt() { + a.end = b.end; + } + ranges.remove(ix + 1); + } else { + ix += 1; + } + } +} + +#[cfg(test)] +mod tests { + use std::path::Path; + + use super::*; + use crate::{FakeCompletionProvider, MessageId}; + use gpui::{AppContext, TestAppContext}; + use rope::Rope; + use settings::SettingsStore; + use unindent::Unindent; + + #[gpui::test] + fn test_inserting_and_removing_messages(cx: &mut AppContext) { + let settings_store = SettingsStore::test(cx); + cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default())); + cx.set_global(settings_store); + init(cx); + let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); + + let conversation = + cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, None, cx)); + let buffer = conversation.read(cx).buffer.clone(); + + let message_1 = conversation.read(cx).message_anchors[0].clone(); + assert_eq!( + messages(&conversation, cx), + vec![(message_1.id, Role::User, 0..0)] + ); + + let message_2 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx) + .unwrap() + }); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..1), + (message_2.id, Role::Assistant, 1..1) + ] + ); + + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "1"), (1..1, "2")], None, cx) + }); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..2), + (message_2.id, Role::Assistant, 2..3) + ] + ); + + let message_3 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) + .unwrap() + }); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..2), + (message_2.id, Role::Assistant, 2..4), + (message_3.id, Role::User, 4..4) + ] + ); + + let message_4 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) + .unwrap() + }); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..2), + (message_2.id, Role::Assistant, 2..4), + (message_4.id, Role::User, 4..5), + (message_3.id, Role::User, 5..5), + ] + ); + + buffer.update(cx, |buffer, cx| { + buffer.edit([(4..4, "C"), (5..5, "D")], None, cx) + }); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..2), + (message_2.id, Role::Assistant, 2..4), + (message_4.id, Role::User, 4..6), + (message_3.id, Role::User, 6..7), + ] + ); + + // Deleting across message boundaries merges the messages. + buffer.update(cx, |buffer, cx| buffer.edit([(1..4, "")], None, cx)); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..3), + (message_3.id, Role::User, 3..4), + ] + ); + + // Undoing the deletion should also undo the merge. + buffer.update(cx, |buffer, cx| buffer.undo(cx)); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..2), + (message_2.id, Role::Assistant, 2..4), + (message_4.id, Role::User, 4..6), + (message_3.id, Role::User, 6..7), + ] + ); + + // Redoing the deletion should also redo the merge. + buffer.update(cx, |buffer, cx| buffer.redo(cx)); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..3), + (message_3.id, Role::User, 3..4), + ] + ); + + // Ensure we can still insert after a merged message. + let message_5 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx) + .unwrap() + }); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..3), + (message_5.id, Role::System, 3..4), + (message_3.id, Role::User, 4..5) + ] + ); + } + + #[gpui::test] + fn test_message_splitting(cx: &mut AppContext) { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default())); + init(cx); + let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); + + let conversation = + cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, None, cx)); + let buffer = conversation.read(cx).buffer.clone(); + + let message_1 = conversation.read(cx).message_anchors[0].clone(); + assert_eq!( + messages(&conversation, cx), + vec![(message_1.id, Role::User, 0..0)] + ); + + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "aaa\nbbb\nccc\nddd\n")], None, cx) + }); + + let (_, message_2) = + conversation.update(cx, |conversation, cx| conversation.split_message(3..3, cx)); + let message_2 = message_2.unwrap(); + + // We recycle newlines in the middle of a split message + assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_2.id, Role::User, 4..16), + ] + ); + + let (_, message_3) = + conversation.update(cx, |conversation, cx| conversation.split_message(3..3, cx)); + let message_3 = message_3.unwrap(); + + // We don't recycle newlines at the end of a split message + assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_3.id, Role::User, 4..5), + (message_2.id, Role::User, 5..17), + ] + ); + + let (_, message_4) = + conversation.update(cx, |conversation, cx| conversation.split_message(9..9, cx)); + let message_4 = message_4.unwrap(); + assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_3.id, Role::User, 4..5), + (message_2.id, Role::User, 5..9), + (message_4.id, Role::User, 9..17), + ] + ); + + let (_, message_5) = + conversation.update(cx, |conversation, cx| conversation.split_message(9..9, cx)); + let message_5 = message_5.unwrap(); + assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\nddd\n"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_3.id, Role::User, 4..5), + (message_2.id, Role::User, 5..9), + (message_4.id, Role::User, 9..10), + (message_5.id, Role::User, 10..18), + ] + ); + + let (message_6, message_7) = conversation.update(cx, |conversation, cx| { + conversation.split_message(14..16, cx) + }); + let message_6 = message_6.unwrap(); + let message_7 = message_7.unwrap(); + assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\ndd\nd\n"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_3.id, Role::User, 4..5), + (message_2.id, Role::User, 5..9), + (message_4.id, Role::User, 9..10), + (message_5.id, Role::User, 10..14), + (message_6.id, Role::User, 14..17), + (message_7.id, Role::User, 17..19), + ] + ); + } + + #[gpui::test] + fn test_messages_for_offsets(cx: &mut AppContext) { + let settings_store = SettingsStore::test(cx); + cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default())); + cx.set_global(settings_store); + init(cx); + let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); + let conversation = + cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, None, cx)); + let buffer = conversation.read(cx).buffer.clone(); + + let message_1 = conversation.read(cx).message_anchors[0].clone(); + assert_eq!( + messages(&conversation, cx), + vec![(message_1.id, Role::User, 0..0)] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx)); + let message_2 = conversation + .update(cx, |conversation, cx| { + conversation.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx) + }) + .unwrap(); + buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbb")], None, cx)); + + let message_3 = conversation + .update(cx, |conversation, cx| { + conversation.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) + }) + .unwrap(); + buffer.update(cx, |buffer, cx| buffer.edit([(8..8, "ccc")], None, cx)); + + assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_2.id, Role::User, 4..8), + (message_3.id, Role::User, 8..11) + ] + ); + + assert_eq!( + message_ids_for_offsets(&conversation, &[0, 4, 9], cx), + [message_1.id, message_2.id, message_3.id] + ); + assert_eq!( + message_ids_for_offsets(&conversation, &[0, 1, 11], cx), + [message_1.id, message_3.id] + ); + + let message_4 = conversation + .update(cx, |conversation, cx| { + conversation.insert_message_after(message_3.id, Role::User, MessageStatus::Done, cx) + }) + .unwrap(); + assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\n"); + assert_eq!( + messages(&conversation, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_2.id, Role::User, 4..8), + (message_3.id, Role::User, 8..12), + (message_4.id, Role::User, 12..12) + ] + ); + assert_eq!( + message_ids_for_offsets(&conversation, &[0, 4, 8, 12], cx), + [message_1.id, message_2.id, message_3.id, message_4.id] + ); + + fn message_ids_for_offsets( + conversation: &Model, + offsets: &[usize], + cx: &AppContext, + ) -> Vec { + conversation + .read(cx) + .messages_for_offsets(offsets.iter().copied(), cx) + .into_iter() + .map(|message| message.id) + .collect() + } + } + + #[test] + fn test_parse_next_edit_suggestion() { + let text = " + some output: + + ```edit src/foo.rs + let a = 1; + let b = 2; + --- + let w = 1; + let x = 2; + let y = 3; + let z = 4; + ``` + + some more output: + + ```edit src/foo.rs + let c = 1; + --- + ``` + + and the conclusion. + " + .unindent(); + + let rope = Rope::from(text.as_str()); + let mut lines = rope.chunks().lines(); + let mut suggestions = vec![]; + while let Some(suggestion) = parse_next_edit_suggestion(&mut lines) { + suggestions.push(( + suggestion.path.clone(), + text[suggestion.old_text_range].to_string(), + text[suggestion.new_text_range].to_string(), + )); + } + + assert_eq!( + suggestions, + vec![ + ( + Path::new("src/foo.rs").into(), + [ + " let a = 1;", // + " let b = 2;", + "", + ] + .join("\n"), + [ + " let w = 1;", + " let x = 2;", + " let y = 3;", + " let z = 4;", + "", + ] + .join("\n"), + ), + ( + Path::new("src/foo.rs").into(), + [ + " let c = 1;", // + "", + ] + .join("\n"), + String::new(), + ) + ] + ); + } + + #[gpui::test] + async fn test_serialization(cx: &mut TestAppContext) { + let settings_store = cx.update(SettingsStore::test); + cx.set_global(settings_store); + cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default())); + cx.update(init); + let registry = Arc::new(LanguageRegistry::test(cx.executor())); + let conversation = cx.new_model(|cx| { + Conversation::new(LanguageModel::default(), registry.clone(), None, cx) + }); + let buffer = conversation.read_with(cx, |conversation, _| conversation.buffer.clone()); + let message_0 = + conversation.read_with(cx, |conversation, _| conversation.message_anchors[0].id); + let message_1 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_0, Role::Assistant, MessageStatus::Done, cx) + .unwrap() + }); + let message_2 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx) + .unwrap() + }); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "a"), (1..1, "b\nc")], None, cx); + buffer.finalize_last_transaction(); + }); + let _message_3 = conversation.update(cx, |conversation, cx| { + conversation + .insert_message_after(message_2.id, Role::System, MessageStatus::Done, cx) + .unwrap() + }); + buffer.update(cx, |buffer, cx| buffer.undo(cx)); + assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "a\nb\nc\n"); + assert_eq!( + cx.read(|cx| messages(&conversation, cx)), + [ + (message_0, Role::User, 0..2), + (message_1.id, Role::Assistant, 2..6), + (message_2.id, Role::System, 6..6), + ] + ); + + let deserialized_conversation = Conversation::deserialize( + conversation.read_with(cx, |conversation, cx| conversation.serialize(cx)), + LanguageModel::default(), + Default::default(), + registry.clone(), + None, + &mut cx.to_async(), + ) + .await + .unwrap(); + let deserialized_buffer = + deserialized_conversation.read_with(cx, |conversation, _| conversation.buffer.clone()); + assert_eq!( + deserialized_buffer.read_with(cx, |buffer, _| buffer.text()), + "a\nb\nc\n" + ); + assert_eq!( + cx.read(|cx| messages(&deserialized_conversation, cx)), + [ + (message_0, Role::User, 0..2), + (message_1.id, Role::Assistant, 2..6), + (message_2.id, Role::System, 6..6), + ] + ); + } + + fn messages( + conversation: &Model, + cx: &AppContext, + ) -> Vec<(MessageId, Role, Range)> { + conversation + .read(cx) + .messages(cx) + .map(|message| (message.id, message.role, message.offset_range)) + .collect() + } +} diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs new file mode 100644 index 0000000..31e32a2 --- /dev/null +++ b/crates/assistant/src/assistant_settings.rs @@ -0,0 +1,508 @@ +use std::fmt; + +pub use anthropic::Model as AnthropicModel; +use gpui::Pixels; +pub use open_ai::Model as OpenAiModel; +use schemars::{ + schema::{InstanceType, Metadata, Schema, SchemaObject}, + JsonSchema, +}; +use serde::{ + de::{self, Visitor}, + Deserialize, Deserializer, Serialize, Serializer, +}; +use settings::{Settings, SettingsSources}; + +#[derive(Clone, Debug, Default, PartialEq)] +pub enum ZedDotDevModel { + Gpt3Point5Turbo, + Gpt4, + Gpt4Turbo, + #[default] + Gpt4Omni, + Claude3Opus, + Claude3Sonnet, + Claude3Haiku, + Custom(String), +} + +impl Serialize for ZedDotDevModel { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.id()) + } +} + +impl<'de> Deserialize<'de> for ZedDotDevModel { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ZedDotDevModelVisitor; + + impl<'de> Visitor<'de> for ZedDotDevModelVisitor { + type Value = ZedDotDevModel; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string for a ZedDotDevModel variant or a custom model") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + match value { + "gpt-3.5-turbo" => Ok(ZedDotDevModel::Gpt3Point5Turbo), + "gpt-4" => Ok(ZedDotDevModel::Gpt4), + "gpt-4-turbo-preview" => Ok(ZedDotDevModel::Gpt4Turbo), + "gpt-4o" => Ok(ZedDotDevModel::Gpt4Omni), + _ => Ok(ZedDotDevModel::Custom(value.to_owned())), + } + } + } + + deserializer.deserialize_str(ZedDotDevModelVisitor) + } +} + +impl JsonSchema for ZedDotDevModel { + fn schema_name() -> String { + "ZedDotDevModel".to_owned() + } + + fn json_schema(_generator: &mut schemars::gen::SchemaGenerator) -> Schema { + let variants = vec![ + "gpt-3.5-turbo".to_owned(), + "gpt-4".to_owned(), + "gpt-4-turbo-preview".to_owned(), + "gpt-4o".to_owned(), + ]; + Schema::Object(SchemaObject { + instance_type: Some(InstanceType::String.into()), + enum_values: Some(variants.into_iter().map(|s| s.into()).collect()), + metadata: Some(Box::new(Metadata { + title: Some("ZedDotDevModel".to_owned()), + default: Some(serde_json::json!("gpt-4-turbo-preview")), + examples: vec![ + serde_json::json!("gpt-3.5-turbo"), + serde_json::json!("gpt-4"), + serde_json::json!("gpt-4-turbo-preview"), + serde_json::json!("custom-model-name"), + ], + ..Default::default() + })), + ..Default::default() + }) + } +} + +impl ZedDotDevModel { + pub fn id(&self) -> &str { + match self { + Self::Gpt3Point5Turbo => "gpt-3.5-turbo", + Self::Gpt4 => "gpt-4", + Self::Gpt4Turbo => "gpt-4-turbo-preview", + Self::Gpt4Omni => "gpt-4o", + Self::Claude3Opus => "claude-3-opus", + Self::Claude3Sonnet => "claude-3-sonnet", + Self::Claude3Haiku => "claude-3-haiku", + Self::Custom(id) => id, + } + } + + pub fn display_name(&self) -> &str { + match self { + Self::Gpt3Point5Turbo => "GPT 3.5 Turbo", + Self::Gpt4 => "GPT 4", + Self::Gpt4Turbo => "GPT 4 Turbo", + Self::Gpt4Omni => "GPT 4 Omni", + Self::Claude3Opus => "Claude 3 Opus", + Self::Claude3Sonnet => "Claude 3 Sonnet", + Self::Claude3Haiku => "Claude 3 Haiku", + Self::Custom(id) => id.as_str(), + } + } + + pub fn max_token_count(&self) -> usize { + match self { + Self::Gpt3Point5Turbo => 2048, + Self::Gpt4 => 4096, + Self::Gpt4Turbo | Self::Gpt4Omni => 128000, + Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 200000, + Self::Custom(_) => 4096, // TODO: Make this configurable + } + } +} + +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum AssistantDockPosition { + Left, + #[default] + Right, + Bottom, +} + +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +#[serde(tag = "name", rename_all = "snake_case")] +pub enum AssistantProvider { + #[serde(rename = "zed.dev")] + ZedDotDev { + #[serde(default)] + default_model: ZedDotDevModel, + }, + #[serde(rename = "openai")] + OpenAi { + #[serde(default)] + default_model: OpenAiModel, + #[serde(default = "open_ai_url")] + api_url: String, + #[serde(default)] + low_speed_timeout_in_seconds: Option, + }, + #[serde(rename = "anthropic")] + Anthropic { + #[serde(default)] + default_model: AnthropicModel, + #[serde(default = "anthropic_api_url")] + api_url: String, + #[serde(default)] + low_speed_timeout_in_seconds: Option, + }, +} + +impl Default for AssistantProvider { + fn default() -> Self { + Self::ZedDotDev { + default_model: ZedDotDevModel::default(), + } + } +} + +fn open_ai_url() -> String { + open_ai::OPEN_AI_API_URL.to_string() +} + +fn anthropic_api_url() -> String { + anthropic::ANTHROPIC_API_URL.to_string() +} + +#[derive(Default, Debug, Deserialize, Serialize)] +pub struct AssistantSettings { + pub enabled: bool, + pub button: bool, + pub dock: AssistantDockPosition, + pub default_width: Pixels, + pub default_height: Pixels, + pub provider: AssistantProvider, +} + +/// Assistant panel settings +#[derive(Clone, Serialize, Deserialize, Debug)] +#[serde(untagged)] +pub enum AssistantSettingsContent { + Versioned(VersionedAssistantSettingsContent), + Legacy(LegacyAssistantSettingsContent), +} + +impl JsonSchema for AssistantSettingsContent { + fn schema_name() -> String { + VersionedAssistantSettingsContent::schema_name() + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema { + VersionedAssistantSettingsContent::json_schema(gen) + } + + fn is_referenceable() -> bool { + VersionedAssistantSettingsContent::is_referenceable() + } +} + +impl Default for AssistantSettingsContent { + fn default() -> Self { + Self::Versioned(VersionedAssistantSettingsContent::default()) + } +} + +impl AssistantSettingsContent { + fn upgrade(&self) -> AssistantSettingsContentV1 { + match self { + AssistantSettingsContent::Versioned(settings) => match settings { + VersionedAssistantSettingsContent::V1(settings) => settings.clone(), + }, + AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV1 { + enabled: None, + button: settings.button, + dock: settings.dock, + default_width: settings.default_width, + default_height: settings.default_height, + provider: if let Some(open_ai_api_url) = settings.openai_api_url.as_ref() { + Some(AssistantProvider::OpenAi { + default_model: settings.default_open_ai_model.clone().unwrap_or_default(), + api_url: open_ai_api_url.clone(), + low_speed_timeout_in_seconds: None, + }) + } else { + settings.default_open_ai_model.clone().map(|open_ai_model| { + AssistantProvider::OpenAi { + default_model: open_ai_model, + api_url: open_ai_url(), + low_speed_timeout_in_seconds: None, + } + }) + }, + }, + } + } + + pub fn set_dock(&mut self, dock: AssistantDockPosition) { + match self { + AssistantSettingsContent::Versioned(settings) => match settings { + VersionedAssistantSettingsContent::V1(settings) => { + settings.dock = Some(dock); + } + }, + AssistantSettingsContent::Legacy(settings) => { + settings.dock = Some(dock); + } + } + } +} + +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(tag = "version")] +pub enum VersionedAssistantSettingsContent { + #[serde(rename = "1")] + V1(AssistantSettingsContentV1), +} + +impl Default for VersionedAssistantSettingsContent { + fn default() -> Self { + Self::V1(AssistantSettingsContentV1 { + enabled: None, + button: None, + dock: None, + default_width: None, + default_height: None, + provider: None, + }) + } +} + +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +pub struct AssistantSettingsContentV1 { + /// Whether the Assistant is enabled. + /// + /// Default: true + enabled: Option, + /// Whether to show the assistant panel button in the status bar. + /// + /// Default: true + button: Option, + /// Where to dock the assistant. + /// + /// Default: right + dock: Option, + /// Default width in pixels when the assistant is docked to the left or right. + /// + /// Default: 640 + default_width: Option, + /// Default height in pixels when the assistant is docked to the bottom. + /// + /// Default: 320 + default_height: Option, + /// The provider of the assistant service. + /// + /// This can either be the internal `zed.dev` service or an external `openai` service, + /// each with their respective default models and configurations. + provider: Option, +} + +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +pub struct LegacyAssistantSettingsContent { + /// Whether to show the assistant panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Where to dock the assistant. + /// + /// Default: right + pub dock: Option, + /// Default width in pixels when the assistant is docked to the left or right. + /// + /// Default: 640 + pub default_width: Option, + /// Default height in pixels when the assistant is docked to the bottom. + /// + /// Default: 320 + pub default_height: Option, + /// The default OpenAI model to use when starting new conversations. + /// + /// Default: gpt-4-1106-preview + pub default_open_ai_model: Option, + /// OpenAI API base URL to use when starting new conversations. + /// + /// Default: https://api.openai.com/v1 + pub openai_api_url: Option, +} + +impl Settings for AssistantSettings { + const KEY: Option<&'static str> = Some("assistant"); + + type FileContent = AssistantSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + let mut settings = AssistantSettings::default(); + + for value in sources.defaults_and_customizations() { + let value = value.upgrade(); + merge(&mut settings.enabled, value.enabled); + merge(&mut settings.button, value.button); + merge(&mut settings.dock, value.dock); + merge( + &mut settings.default_width, + value.default_width.map(Into::into), + ); + merge( + &mut settings.default_height, + value.default_height.map(Into::into), + ); + if let Some(provider) = value.provider.clone() { + match (&mut settings.provider, provider) { + ( + AssistantProvider::ZedDotDev { default_model }, + AssistantProvider::ZedDotDev { + default_model: default_model_override, + }, + ) => { + *default_model = default_model_override; + } + ( + AssistantProvider::OpenAi { + default_model, + api_url, + low_speed_timeout_in_seconds, + }, + AssistantProvider::OpenAi { + default_model: default_model_override, + api_url: api_url_override, + low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override, + }, + ) => { + *default_model = default_model_override; + *api_url = api_url_override; + *low_speed_timeout_in_seconds = low_speed_timeout_in_seconds_override; + } + (merged, provider_override) => { + *merged = provider_override; + } + } + } + } + + Ok(settings) + } +} + +fn merge(target: &mut T, value: Option) { + if let Some(value) = value { + *target = value; + } +} + +#[cfg(test)] +mod tests { + use gpui::{AppContext, UpdateGlobal}; + use settings::SettingsStore; + + use super::*; + + #[gpui::test] + fn test_deserialize_assistant_settings(cx: &mut AppContext) { + let store = settings::SettingsStore::test(cx); + cx.set_global(store); + + // Settings default to gpt-4-turbo. + AssistantSettings::register(cx); + assert_eq!( + AssistantSettings::get_global(cx).provider, + AssistantProvider::OpenAi { + default_model: OpenAiModel::FourOmni, + api_url: open_ai_url(), + low_speed_timeout_in_seconds: None, + } + ); + + // Ensure backward-compatibility. + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings( + r#"{ + "assistant": { + "openai_api_url": "test-url", + } + }"#, + cx, + ) + .unwrap(); + }); + assert_eq!( + AssistantSettings::get_global(cx).provider, + AssistantProvider::OpenAi { + default_model: OpenAiModel::FourOmni, + api_url: "test-url".into(), + low_speed_timeout_in_seconds: None, + } + ); + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings( + r#"{ + "assistant": { + "default_open_ai_model": "gpt-4-0613" + } + }"#, + cx, + ) + .unwrap(); + }); + assert_eq!( + AssistantSettings::get_global(cx).provider, + AssistantProvider::OpenAi { + default_model: OpenAiModel::Four, + api_url: open_ai_url(), + low_speed_timeout_in_seconds: None, + } + ); + + // The new version supports setting a custom model when using zed.dev. + SettingsStore::update_global(cx, |store, cx| { + store + .set_user_settings( + r#"{ + "assistant": { + "version": "1", + "provider": { + "name": "zed.dev", + "default_model": "custom" + } + } + }"#, + cx, + ) + .unwrap(); + }); + assert_eq!( + AssistantSettings::get_global(cx).provider, + AssistantProvider::ZedDotDev { + default_model: ZedDotDevModel::Custom("custom".into()) + } + ); + } +} diff --git a/crates/assistant/src/codegen.rs b/crates/assistant/src/codegen.rs new file mode 100644 index 0000000..8483a2a --- /dev/null +++ b/crates/assistant/src/codegen.rs @@ -0,0 +1,696 @@ +use crate::{ + streaming_diff::{Hunk, StreamingDiff}, + CompletionProvider, LanguageModelRequest, +}; +use anyhow::Result; +use client::telemetry::Telemetry; +use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; +use futures::{channel::mpsc, SinkExt, Stream, StreamExt}; +use gpui::{EventEmitter, Model, ModelContext, Task}; +use language::{Rope, TransactionId}; +use multi_buffer::MultiBufferRow; +use std::{cmp, future, ops::Range, sync::Arc, time::Instant}; + +pub enum Event { + Finished, + Undone, +} + +#[derive(Clone)] +pub enum CodegenKind { + Transform { range: Range }, + Generate { position: Anchor }, +} + +pub struct Codegen { + buffer: Model, + snapshot: MultiBufferSnapshot, + kind: CodegenKind, + last_equal_ranges: Vec>, + transaction_id: Option, + error: Option, + generation: Task<()>, + idle: bool, + telemetry: Option>, + _subscription: gpui::Subscription, +} + +impl EventEmitter for Codegen {} + +impl Codegen { + pub fn new( + buffer: Model, + kind: CodegenKind, + telemetry: Option>, + cx: &mut ModelContext, + ) -> Self { + let snapshot = buffer.read(cx).snapshot(cx); + Self { + buffer: buffer.clone(), + snapshot, + kind, + last_equal_ranges: Default::default(), + transaction_id: Default::default(), + error: Default::default(), + idle: true, + generation: Task::ready(()), + telemetry, + _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), + } + } + + fn handle_buffer_event( + &mut self, + _buffer: Model, + event: &multi_buffer::Event, + cx: &mut ModelContext, + ) { + if let multi_buffer::Event::TransactionUndone { transaction_id } = event { + if self.transaction_id == Some(*transaction_id) { + self.transaction_id = None; + self.generation = Task::ready(()); + cx.emit(Event::Undone); + } + } + } + + pub fn range(&self) -> Range { + match &self.kind { + CodegenKind::Transform { range } => range.clone(), + CodegenKind::Generate { position } => position.bias_left(&self.snapshot)..*position, + } + } + + pub fn kind(&self) -> &CodegenKind { + &self.kind + } + + pub fn last_equal_ranges(&self) -> &[Range] { + &self.last_equal_ranges + } + + pub fn idle(&self) -> bool { + self.idle + } + + pub fn error(&self) -> Option<&anyhow::Error> { + self.error.as_ref() + } + + pub fn start(&mut self, prompt: LanguageModelRequest, cx: &mut ModelContext) { + let range = self.range(); + let snapshot = self.snapshot.clone(); + let selected_text = snapshot + .text_for_range(range.start..range.end) + .collect::(); + + let selection_start = range.start.to_point(&snapshot); + let suggested_line_indent = snapshot + .suggested_indents(selection_start.row..selection_start.row + 1, cx) + .into_values() + .next() + .unwrap_or_else(|| snapshot.indent_size_for_line(MultiBufferRow(selection_start.row))); + + let model_telemetry_id = prompt.model.telemetry_id(); + let response = CompletionProvider::global(cx).complete(prompt); + let telemetry = self.telemetry.clone(); + self.generation = cx.spawn(|this, mut cx| { + async move { + let generate = async { + let mut edit_start = range.start.to_offset(&snapshot); + + let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1); + let diff = cx.background_executor().spawn(async move { + let mut response_latency = None; + let request_start = Instant::now(); + let diff = async { + let chunks = strip_invalid_spans_from_codeblock(response.await?); + futures::pin_mut!(chunks); + let mut diff = StreamingDiff::new(selected_text.to_string()); + + let mut new_text = String::new(); + let mut base_indent = None; + let mut line_indent = None; + let mut first_line = true; + + while let Some(chunk) = chunks.next().await { + if response_latency.is_none() { + response_latency = Some(request_start.elapsed()); + } + let chunk = chunk?; + + let mut lines = chunk.split('\n').peekable(); + while let Some(line) = lines.next() { + new_text.push_str(line); + if line_indent.is_none() { + if let Some(non_whitespace_ch_ix) = + new_text.find(|ch: char| !ch.is_whitespace()) + { + line_indent = Some(non_whitespace_ch_ix); + base_indent = base_indent.or(line_indent); + + let line_indent = line_indent.unwrap(); + let base_indent = base_indent.unwrap(); + let indent_delta = + line_indent as i32 - base_indent as i32; + let mut corrected_indent_len = cmp::max( + 0, + suggested_line_indent.len as i32 + indent_delta, + ) + as usize; + if first_line { + corrected_indent_len = corrected_indent_len + .saturating_sub( + selection_start.column as usize, + ); + } + + let indent_char = suggested_line_indent.char(); + let mut indent_buffer = [0; 4]; + let indent_str = + indent_char.encode_utf8(&mut indent_buffer); + new_text.replace_range( + ..line_indent, + &indent_str.repeat(corrected_indent_len), + ); + } + } + + if line_indent.is_some() { + hunks_tx.send(diff.push_new(&new_text)).await?; + new_text.clear(); + } + + if lines.peek().is_some() { + hunks_tx.send(diff.push_new("\n")).await?; + line_indent = None; + first_line = false; + } + } + } + hunks_tx.send(diff.push_new(&new_text)).await?; + hunks_tx.send(diff.finish()).await?; + + anyhow::Ok(()) + }; + + let error_message = diff.await.err().map(|error| error.to_string()); + if let Some(telemetry) = telemetry { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + model_telemetry_id, + response_latency, + error_message, + ); + } + }); + + while let Some(hunks) = hunks_rx.next().await { + this.update(&mut cx, |this, cx| { + this.last_equal_ranges.clear(); + + let transaction = this.buffer.update(cx, |buffer, cx| { + // Avoid grouping assistant edits with user edits. + buffer.finalize_last_transaction(cx); + + buffer.start_transaction(cx); + buffer.edit( + hunks.into_iter().filter_map(|hunk| match hunk { + Hunk::Insert { text } => { + let edit_start = snapshot.anchor_after(edit_start); + Some((edit_start..edit_start, text)) + } + Hunk::Remove { len } => { + let edit_end = edit_start + len; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + Some((edit_range, String::new())) + } + Hunk::Keep { len } => { + let edit_end = edit_start + len; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + this.last_equal_ranges.push(edit_range); + None + } + }), + None, + cx, + ); + + buffer.end_transaction(cx) + }); + + if let Some(transaction) = transaction { + if let Some(first_transaction) = this.transaction_id { + // Group all assistant edits into the first transaction. + this.buffer.update(cx, |buffer, cx| { + buffer.merge_transactions( + transaction, + first_transaction, + cx, + ) + }); + } else { + this.transaction_id = Some(transaction); + this.buffer.update(cx, |buffer, cx| { + buffer.finalize_last_transaction(cx) + }); + } + } + + cx.notify(); + })?; + } + + diff.await; + + anyhow::Ok(()) + }; + + let result = generate.await; + this.update(&mut cx, |this, cx| { + this.last_equal_ranges.clear(); + this.idle = true; + if let Err(error) = result { + this.error = Some(error); + } + cx.emit(Event::Finished); + cx.notify(); + }) + .ok(); + } + }); + self.error.take(); + self.idle = false; + cx.notify(); + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + if let Some(transaction_id) = self.transaction_id { + self.buffer + .update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx)); + } + } +} + +fn strip_invalid_spans_from_codeblock( + stream: impl Stream>, +) -> impl Stream> { + let mut first_line = true; + let mut buffer = String::new(); + let mut starts_with_markdown_codeblock = false; + let mut includes_start_or_end_span = false; + stream.filter_map(move |chunk| { + let chunk = match chunk { + Ok(chunk) => chunk, + Err(err) => return future::ready(Some(Err(err))), + }; + buffer.push_str(&chunk); + + if buffer.len() > "<|S|".len() && buffer.starts_with("<|S|") { + includes_start_or_end_span = true; + + buffer = buffer + .strip_prefix("<|S|>") + .or_else(|| buffer.strip_prefix("<|S|")) + .unwrap_or(&buffer) + .to_string(); + } else if buffer.ends_with("|E|>") { + includes_start_or_end_span = true; + } else if buffer.starts_with("<|") + || buffer.starts_with("<|S") + || buffer.starts_with("<|S|") + || buffer.ends_with('|') + || buffer.ends_with("|E") + || buffer.ends_with("|E|") + { + return future::ready(None); + } + + if first_line { + if buffer.is_empty() || buffer == "`" || buffer == "``" { + return future::ready(None); + } else if buffer.starts_with("```") { + starts_with_markdown_codeblock = true; + if let Some(newline_ix) = buffer.find('\n') { + buffer.replace_range(..newline_ix + 1, ""); + first_line = false; + } else { + return future::ready(None); + } + } + } + + let mut text = buffer.to_string(); + if starts_with_markdown_codeblock { + text = text + .strip_suffix("\n```\n") + .or_else(|| text.strip_suffix("\n```")) + .or_else(|| text.strip_suffix("\n``")) + .or_else(|| text.strip_suffix("\n`")) + .or_else(|| text.strip_suffix('\n')) + .unwrap_or(&text) + .to_string(); + } + + if includes_start_or_end_span { + text = text + .strip_suffix("|E|>") + .or_else(|| text.strip_suffix("E|>")) + .or_else(|| text.strip_prefix("|>")) + .or_else(|| text.strip_prefix('>')) + .unwrap_or(&text) + .to_string(); + }; + + if text.contains('\n') { + first_line = false; + } + + let remainder = buffer.split_off(text.len()); + let result = if buffer.is_empty() { + None + } else { + Some(Ok(buffer.clone())) + }; + + buffer = remainder; + future::ready(result) + }) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use crate::FakeCompletionProvider; + + use super::*; + use futures::stream::{self}; + use gpui::{Context, TestAppContext}; + use indoc::indoc; + use language::{ + language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher, + Point, + }; + use rand::prelude::*; + use serde::Serialize; + use settings::SettingsStore; + + #[derive(Serialize)] + pub struct DummyCompletionRequest { + pub name: String, + } + + #[gpui::test(iterations = 10)] + async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) { + let provider = FakeCompletionProvider::default(); + cx.set_global(cx.update(SettingsStore::test)); + cx.set_global(CompletionProvider::Fake(provider.clone())); + cx.update(language_settings::init); + + let text = indoc! {" + fn main() { + let x = 0; + for _ in 0..10 { + x += 1; + } + } + "}; + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let range = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5)) + }); + let codegen = cx.new_model(|cx| { + Codegen::new(buffer.clone(), CodegenKind::Transform { range }, None, cx) + }); + + let request = LanguageModelRequest::default(); + codegen.update(cx, |codegen, cx| codegen.start(request, cx)); + + let mut new_text = concat!( + " let mut x = 0;\n", + " while x < 10 {\n", + " x += 1;\n", + " }", + ); + while !new_text.is_empty() { + let max_len = cmp::min(new_text.len(), 10); + let len = rng.gen_range(1..=max_len); + let (chunk, suffix) = new_text.split_at(len); + provider.send_completion(chunk.into()); + new_text = suffix; + cx.background_executor.run_until_parked(); + } + provider.finish_completion(); + cx.background_executor.run_until_parked(); + + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + while x < 10 { + x += 1; + } + } + "} + ); + } + + #[gpui::test(iterations = 10)] + async fn test_autoindent_when_generating_past_indentation( + cx: &mut TestAppContext, + mut rng: StdRng, + ) { + let provider = FakeCompletionProvider::default(); + cx.set_global(CompletionProvider::Fake(provider.clone())); + cx.set_global(cx.update(SettingsStore::test)); + cx.update(language_settings::init); + + let text = indoc! {" + fn main() { + le + } + "}; + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let position = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 6)) + }); + let codegen = cx.new_model(|cx| { + Codegen::new(buffer.clone(), CodegenKind::Generate { position }, None, cx) + }); + + let request = LanguageModelRequest::default(); + codegen.update(cx, |codegen, cx| codegen.start(request, cx)); + + let mut new_text = concat!( + "t mut x = 0;\n", + "while x < 10 {\n", + " x += 1;\n", + "}", // + ); + while !new_text.is_empty() { + let max_len = cmp::min(new_text.len(), 10); + let len = rng.gen_range(1..=max_len); + let (chunk, suffix) = new_text.split_at(len); + provider.send_completion(chunk.into()); + new_text = suffix; + cx.background_executor.run_until_parked(); + } + provider.finish_completion(); + cx.background_executor.run_until_parked(); + + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + while x < 10 { + x += 1; + } + } + "} + ); + } + + #[gpui::test(iterations = 10)] + async fn test_autoindent_when_generating_before_indentation( + cx: &mut TestAppContext, + mut rng: StdRng, + ) { + let provider = FakeCompletionProvider::default(); + cx.set_global(CompletionProvider::Fake(provider.clone())); + cx.set_global(cx.update(SettingsStore::test)); + cx.update(language_settings::init); + + let text = concat!( + "fn main() {\n", + " \n", + "}\n" // + ); + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let position = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 2)) + }); + let codegen = cx.new_model(|cx| { + Codegen::new(buffer.clone(), CodegenKind::Generate { position }, None, cx) + }); + + let request = LanguageModelRequest::default(); + codegen.update(cx, |codegen, cx| codegen.start(request, cx)); + + let mut new_text = concat!( + "let mut x = 0;\n", + "while x < 10 {\n", + " x += 1;\n", + "}", // + ); + while !new_text.is_empty() { + let max_len = cmp::min(new_text.len(), 10); + let len = rng.gen_range(1..=max_len); + let (chunk, suffix) = new_text.split_at(len); + provider.send_completion(chunk.into()); + new_text = suffix; + cx.background_executor.run_until_parked(); + } + provider.finish_completion(); + cx.background_executor.run_until_parked(); + + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + while x < 10 { + x += 1; + } + } + "} + ); + } + + #[gpui::test] + async fn test_strip_invalid_spans_from_codeblock() { + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("Lorem ipsum dolor", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum dolor" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum dolor" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum dolor" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```\n", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum dolor" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks( + "```html\n```js\nLorem ipsum dolor\n```\n```", + 2 + )) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "```js\nLorem ipsum dolor\n```" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("``\nLorem ipsum dolor\n```", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "``\nLorem ipsum dolor\n```" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("<|S|Lorem ipsum|E|>", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum" + ); + + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("<|S|>Lorem ipsum", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum" + ); + + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("```\n<|S|>Lorem ipsum\n```", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum" + ); + assert_eq!( + strip_invalid_spans_from_codeblock(chunks("```\n<|S|Lorem ipsum|E|>\n```", 2)) + .map(|chunk| chunk.unwrap()) + .collect::() + .await, + "Lorem ipsum" + ); + fn chunks(text: &str, size: usize) -> impl Stream> { + stream::iter( + text.chars() + .collect::>() + .chunks(size) + .map(|chunk| Ok(chunk.iter().collect::())) + .collect::>(), + ) + } + } + + fn rust_lang() -> Language { + Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_indents_query( + r#" + (call_expression) @indent + (field_expression) @indent + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap() + } +} diff --git a/crates/assistant/src/completion_provider.rs b/crates/assistant/src/completion_provider.rs new file mode 100644 index 0000000..a2c60d7 --- /dev/null +++ b/crates/assistant/src/completion_provider.rs @@ -0,0 +1,252 @@ +mod anthropic; +#[cfg(test)] +mod fake; +mod open_ai; +mod zed; + +pub use anthropic::*; +#[cfg(test)] +pub use fake::*; +pub use open_ai::*; +pub use zed::*; + +use crate::{ + assistant_settings::{AssistantProvider, AssistantSettings}, + LanguageModel, LanguageModelRequest, +}; +use anyhow::Result; +use client::Client; +use futures::{future::BoxFuture, stream::BoxStream}; +use gpui::{AnyView, AppContext, BorrowAppContext, Task, WindowContext}; +use settings::{Settings, SettingsStore}; +use std::sync::Arc; +use std::time::Duration; + +pub fn init(client: Arc, cx: &mut AppContext) { + let mut settings_version = 0; + let provider = match &AssistantSettings::get_global(cx).provider { + AssistantProvider::ZedDotDev { default_model } => { + CompletionProvider::ZedDotDev(ZedDotDevCompletionProvider::new( + default_model.clone(), + client.clone(), + settings_version, + cx, + )) + } + AssistantProvider::OpenAi { + default_model, + api_url, + low_speed_timeout_in_seconds, + } => CompletionProvider::OpenAi(OpenAiCompletionProvider::new( + default_model.clone(), + api_url.clone(), + client.http_client(), + low_speed_timeout_in_seconds.map(Duration::from_secs), + settings_version, + )), + AssistantProvider::Anthropic { + default_model, + api_url, + low_speed_timeout_in_seconds, + } => CompletionProvider::Anthropic(AnthropicCompletionProvider::new( + default_model.clone(), + api_url.clone(), + client.http_client(), + low_speed_timeout_in_seconds.map(Duration::from_secs), + settings_version, + )), + }; + cx.set_global(provider); + + cx.observe_global::(move |cx| { + settings_version += 1; + cx.update_global::(|provider, cx| { + match (&mut *provider, &AssistantSettings::get_global(cx).provider) { + ( + CompletionProvider::OpenAi(provider), + AssistantProvider::OpenAi { + default_model, + api_url, + low_speed_timeout_in_seconds, + }, + ) => { + provider.update( + default_model.clone(), + api_url.clone(), + low_speed_timeout_in_seconds.map(Duration::from_secs), + settings_version, + ); + } + ( + CompletionProvider::Anthropic(provider), + AssistantProvider::Anthropic { + default_model, + api_url, + low_speed_timeout_in_seconds, + }, + ) => { + provider.update( + default_model.clone(), + api_url.clone(), + low_speed_timeout_in_seconds.map(Duration::from_secs), + settings_version, + ); + } + ( + CompletionProvider::ZedDotDev(provider), + AssistantProvider::ZedDotDev { default_model }, + ) => { + provider.update(default_model.clone(), settings_version); + } + (_, AssistantProvider::ZedDotDev { default_model }) => { + *provider = CompletionProvider::ZedDotDev(ZedDotDevCompletionProvider::new( + default_model.clone(), + client.clone(), + settings_version, + cx, + )); + } + ( + _, + AssistantProvider::OpenAi { + default_model, + api_url, + low_speed_timeout_in_seconds, + }, + ) => { + *provider = CompletionProvider::OpenAi(OpenAiCompletionProvider::new( + default_model.clone(), + api_url.clone(), + client.http_client(), + low_speed_timeout_in_seconds.map(Duration::from_secs), + settings_version, + )); + } + ( + _, + AssistantProvider::Anthropic { + default_model, + api_url, + low_speed_timeout_in_seconds, + }, + ) => { + *provider = CompletionProvider::Anthropic(AnthropicCompletionProvider::new( + default_model.clone(), + api_url.clone(), + client.http_client(), + low_speed_timeout_in_seconds.map(Duration::from_secs), + settings_version, + )); + } + } + }) + }) + .detach(); +} + +pub enum CompletionProvider { + OpenAi(OpenAiCompletionProvider), + Anthropic(AnthropicCompletionProvider), + ZedDotDev(ZedDotDevCompletionProvider), + #[cfg(test)] + Fake(FakeCompletionProvider), +} + +impl gpui::Global for CompletionProvider {} + +impl CompletionProvider { + pub fn global(cx: &AppContext) -> &Self { + cx.global::() + } + + pub fn settings_version(&self) -> usize { + match self { + CompletionProvider::OpenAi(provider) => provider.settings_version(), + CompletionProvider::Anthropic(provider) => provider.settings_version(), + CompletionProvider::ZedDotDev(provider) => provider.settings_version(), + #[cfg(test)] + CompletionProvider::Fake(_) => unimplemented!(), + } + } + + pub fn is_authenticated(&self) -> bool { + match self { + CompletionProvider::OpenAi(provider) => provider.is_authenticated(), + CompletionProvider::Anthropic(provider) => provider.is_authenticated(), + CompletionProvider::ZedDotDev(provider) => provider.is_authenticated(), + #[cfg(test)] + CompletionProvider::Fake(_) => true, + } + } + + pub fn authenticate(&self, cx: &AppContext) -> Task> { + match self { + CompletionProvider::OpenAi(provider) => provider.authenticate(cx), + CompletionProvider::Anthropic(provider) => provider.authenticate(cx), + CompletionProvider::ZedDotDev(provider) => provider.authenticate(cx), + #[cfg(test)] + CompletionProvider::Fake(_) => Task::ready(Ok(())), + } + } + + pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView { + match self { + CompletionProvider::OpenAi(provider) => provider.authentication_prompt(cx), + CompletionProvider::Anthropic(provider) => provider.authentication_prompt(cx), + CompletionProvider::ZedDotDev(provider) => provider.authentication_prompt(cx), + #[cfg(test)] + CompletionProvider::Fake(_) => unimplemented!(), + } + } + + pub fn reset_credentials(&self, cx: &AppContext) -> Task> { + match self { + CompletionProvider::OpenAi(provider) => provider.reset_credentials(cx), + CompletionProvider::Anthropic(provider) => provider.reset_credentials(cx), + CompletionProvider::ZedDotDev(_) => Task::ready(Ok(())), + #[cfg(test)] + CompletionProvider::Fake(_) => Task::ready(Ok(())), + } + } + + pub fn default_model(&self) -> LanguageModel { + match self { + CompletionProvider::OpenAi(provider) => LanguageModel::OpenAi(provider.default_model()), + CompletionProvider::Anthropic(provider) => { + LanguageModel::Anthropic(provider.default_model()) + } + CompletionProvider::ZedDotDev(provider) => { + LanguageModel::ZedDotDev(provider.default_model()) + } + #[cfg(test)] + CompletionProvider::Fake(_) => unimplemented!(), + } + } + + pub fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + match self { + CompletionProvider::OpenAi(provider) => provider.count_tokens(request, cx), + CompletionProvider::Anthropic(provider) => provider.count_tokens(request, cx), + CompletionProvider::ZedDotDev(provider) => provider.count_tokens(request, cx), + #[cfg(test)] + CompletionProvider::Fake(_) => unimplemented!(), + } + } + + pub fn complete( + &self, + request: LanguageModelRequest, + ) -> BoxFuture<'static, Result>>> { + match self { + CompletionProvider::OpenAi(provider) => provider.complete(request), + CompletionProvider::Anthropic(provider) => provider.complete(request), + CompletionProvider::ZedDotDev(provider) => provider.complete(request), + #[cfg(test)] + CompletionProvider::Fake(provider) => provider.complete(), + } + } +} diff --git a/crates/assistant/src/completion_provider/anthropic.rs b/crates/assistant/src/completion_provider/anthropic.rs new file mode 100644 index 0000000..a203949 --- /dev/null +++ b/crates/assistant/src/completion_provider/anthropic.rs @@ -0,0 +1,327 @@ +use crate::count_open_ai_tokens; +use crate::{ + assistant_settings::AnthropicModel, CompletionProvider, LanguageModel, LanguageModelRequest, + Role, +}; +use anthropic::{stream_completion, Request, RequestMessage, Role as AnthropicRole}; +use anyhow::{anyhow, Result}; +use editor::{Editor, EditorElement, EditorStyle}; +use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt}; +use gpui::{AnyView, AppContext, FontStyle, FontWeight, Task, TextStyle, View, WhiteSpace}; +use http::HttpClient; +use settings::Settings; +use std::time::Duration; +use std::{env, sync::Arc}; +use theme::ThemeSettings; +use ui::prelude::*; +use util::ResultExt; + +pub struct AnthropicCompletionProvider { + api_key: Option, + api_url: String, + default_model: AnthropicModel, + http_client: Arc, + low_speed_timeout: Option, + settings_version: usize, +} + +impl AnthropicCompletionProvider { + pub fn new( + default_model: AnthropicModel, + api_url: String, + http_client: Arc, + low_speed_timeout: Option, + settings_version: usize, + ) -> Self { + Self { + api_key: None, + api_url, + default_model, + http_client, + low_speed_timeout, + settings_version, + } + } + + pub fn update( + &mut self, + default_model: AnthropicModel, + api_url: String, + low_speed_timeout: Option, + settings_version: usize, + ) { + self.default_model = default_model; + self.api_url = api_url; + self.low_speed_timeout = low_speed_timeout; + self.settings_version = settings_version; + } + + pub fn settings_version(&self) -> usize { + self.settings_version + } + + pub fn is_authenticated(&self) -> bool { + self.api_key.is_some() + } + + pub fn authenticate(&self, cx: &AppContext) -> Task> { + if self.is_authenticated() { + Task::ready(Ok(())) + } else { + let api_url = self.api_url.clone(); + cx.spawn(|mut cx| async move { + let api_key = if let Ok(api_key) = env::var("ANTHROPIC_API_KEY") { + api_key + } else { + let (_, api_key) = cx + .update(|cx| cx.read_credentials(&api_url))? + .await? + .ok_or_else(|| anyhow!("credentials not found"))?; + String::from_utf8(api_key)? + }; + cx.update_global::(|provider, _cx| { + if let CompletionProvider::Anthropic(provider) = provider { + provider.api_key = Some(api_key); + } + }) + }) + } + } + + pub fn reset_credentials(&self, cx: &AppContext) -> Task> { + let delete_credentials = cx.delete_credentials(&self.api_url); + cx.spawn(|mut cx| async move { + delete_credentials.await.log_err(); + cx.update_global::(|provider, _cx| { + if let CompletionProvider::Anthropic(provider) = provider { + provider.api_key = None; + } + }) + }) + } + + pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView { + cx.new_view(|cx| AuthenticationPrompt::new(self.api_url.clone(), cx)) + .into() + } + + pub fn default_model(&self) -> AnthropicModel { + self.default_model.clone() + } + + pub fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + count_open_ai_tokens(request, cx.background_executor()) + } + + pub fn complete( + &self, + request: LanguageModelRequest, + ) -> BoxFuture<'static, Result>>> { + let request = self.to_anthropic_request(request); + + let http_client = self.http_client.clone(); + let api_key = self.api_key.clone(); + let api_url = self.api_url.clone(); + let low_speed_timeout = self.low_speed_timeout; + async move { + let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let request = stream_completion( + http_client.as_ref(), + &api_url, + &api_key, + request, + low_speed_timeout, + ); + let response = request.await?; + let stream = response + .filter_map(|response| async move { + match response { + Ok(response) => match response { + anthropic::ResponseEvent::ContentBlockStart { + content_block, .. + } => match content_block { + anthropic::ContentBlock::Text { text } => Some(Ok(text)), + }, + anthropic::ResponseEvent::ContentBlockDelta { delta, .. } => { + match delta { + anthropic::TextDelta::TextDelta { text } => Some(Ok(text)), + } + } + _ => None, + }, + Err(error) => Some(Err(error)), + } + }) + .boxed(); + Ok(stream) + } + .boxed() + } + + fn to_anthropic_request(&self, request: LanguageModelRequest) -> Request { + let model = match request.model { + LanguageModel::Anthropic(model) => model, + _ => self.default_model(), + }; + + let mut system_message = String::new(); + + let mut messages: Vec = Vec::new(); + for message in request.messages { + if message.content.is_empty() { + continue; + } + + match message.role { + Role::User | Role::Assistant => { + let role = match message.role { + Role::User => AnthropicRole::User, + Role::Assistant => AnthropicRole::Assistant, + _ => unreachable!(), + }; + + if let Some(last_message) = messages.last_mut() { + if last_message.role == role { + last_message.content.push_str("\n\n"); + last_message.content.push_str(&message.content); + continue; + } + } + + messages.push(RequestMessage { + role, + content: message.content, + }); + } + Role::System => { + if !system_message.is_empty() { + system_message.push_str("\n\n"); + } + system_message.push_str(&message.content); + } + } + } + + Request { + model, + messages, + stream: true, + system: system_message, + max_tokens: 4092, + } + } +} + +struct AuthenticationPrompt { + api_key: View, + api_url: String, +} + +impl AuthenticationPrompt { + fn new(api_url: String, cx: &mut WindowContext) -> Self { + Self { + api_key: cx.new_view(|cx| { + let mut editor = Editor::single_line(cx); + editor.set_placeholder_text( + "sk-000000000000000000000000000000000000000000000000", + cx, + ); + editor + }), + api_url, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { + let api_key = self.api_key.read(cx).text(cx); + if api_key.is_empty() { + return; + } + + let write_credentials = cx.write_credentials(&self.api_url, "Bearer", api_key.as_bytes()); + cx.spawn(|_, mut cx| async move { + write_credentials.await?; + cx.update_global::(|provider, _cx| { + if let CompletionProvider::Anthropic(provider) = provider { + provider.api_key = Some(api_key); + } + }) + }) + .detach_and_log_err(cx); + } + + fn render_api_key_editor(&self, cx: &mut ViewContext) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: rems(0.875).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(1.3), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }; + EditorElement::new( + &self.api_key, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } +} + +impl Render for AuthenticationPrompt { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + const INSTRUCTIONS: [&str; 4] = [ + "To use the assistant panel or inline assistant, you need to add your Anthropic API key.", + "You can create an API key at: https://console.anthropic.com/settings/keys", + "", + "Paste your Anthropic API key below and hit enter to use the assistant:", + ]; + + v_flex() + .p_4() + .size_full() + .on_action(cx.listener(Self::save_api_key)) + .children( + INSTRUCTIONS.map(|instruction| Label::new(instruction).size(LabelSize::Small)), + ) + .child( + h_flex() + .w_full() + .my_2() + .px_2() + .py_1() + .bg(cx.theme().colors().editor_background) + .rounded_md() + .child(self.render_api_key_editor(cx)), + ) + .child( + Label::new( + "You can also assign the ANTHROPIC_API_KEY environment variable and restart Zed.", + ) + .size(LabelSize::Small), + ) + .child( + h_flex() + .gap_2() + .child(Label::new("Click on").size(LabelSize::Small)) + .child(Icon::new(IconName::Ai).size(IconSize::XSmall)) + .child( + Label::new("in the status bar to close this panel.").size(LabelSize::Small), + ), + ) + .into_any() + } +} diff --git a/crates/assistant/src/completion_provider/fake.rs b/crates/assistant/src/completion_provider/fake.rs new file mode 100644 index 0000000..9c06796 --- /dev/null +++ b/crates/assistant/src/completion_provider/fake.rs @@ -0,0 +1,29 @@ +use anyhow::Result; +use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt}; +use std::sync::Arc; + +#[derive(Clone, Default)] +pub struct FakeCompletionProvider { + current_completion_tx: Arc>>>, +} + +impl FakeCompletionProvider { + pub fn complete(&self) -> BoxFuture<'static, Result>>> { + let (tx, rx) = mpsc::unbounded(); + *self.current_completion_tx.lock() = Some(tx); + async move { Ok(rx.map(Ok).boxed()) }.boxed() + } + + pub fn send_completion(&self, chunk: String) { + self.current_completion_tx + .lock() + .as_ref() + .unwrap() + .unbounded_send(chunk) + .unwrap(); + } + + pub fn finish_completion(&self) { + self.current_completion_tx.lock().take(); + } +} diff --git a/crates/assistant/src/completion_provider/open_ai.rs b/crates/assistant/src/completion_provider/open_ai.rs new file mode 100644 index 0000000..536ce88 --- /dev/null +++ b/crates/assistant/src/completion_provider/open_ai.rs @@ -0,0 +1,341 @@ +use crate::assistant_settings::ZedDotDevModel; +use crate::{ + assistant_settings::OpenAiModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role, +}; +use anyhow::{anyhow, Result}; +use editor::{Editor, EditorElement, EditorStyle}; +use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt}; +use gpui::{AnyView, AppContext, FontStyle, FontWeight, Task, TextStyle, View, WhiteSpace}; +use http::HttpClient; +use open_ai::{stream_completion, Request, RequestMessage, Role as OpenAiRole}; +use settings::Settings; +use std::time::Duration; +use std::{env, sync::Arc}; +use theme::ThemeSettings; +use ui::prelude::*; +use util::ResultExt; + +pub struct OpenAiCompletionProvider { + api_key: Option, + api_url: String, + default_model: OpenAiModel, + http_client: Arc, + low_speed_timeout: Option, + settings_version: usize, +} + +impl OpenAiCompletionProvider { + pub fn new( + default_model: OpenAiModel, + api_url: String, + http_client: Arc, + low_speed_timeout: Option, + settings_version: usize, + ) -> Self { + Self { + api_key: None, + api_url, + default_model, + http_client, + low_speed_timeout, + settings_version, + } + } + + pub fn update( + &mut self, + default_model: OpenAiModel, + api_url: String, + low_speed_timeout: Option, + settings_version: usize, + ) { + self.default_model = default_model; + self.api_url = api_url; + self.low_speed_timeout = low_speed_timeout; + self.settings_version = settings_version; + } + + pub fn settings_version(&self) -> usize { + self.settings_version + } + + pub fn is_authenticated(&self) -> bool { + self.api_key.is_some() + } + + pub fn authenticate(&self, cx: &AppContext) -> Task> { + if self.is_authenticated() { + Task::ready(Ok(())) + } else { + let api_url = self.api_url.clone(); + cx.spawn(|mut cx| async move { + let api_key = if let Ok(api_key) = env::var("OPENAI_API_KEY") { + api_key + } else { + let (_, api_key) = cx + .update(|cx| cx.read_credentials(&api_url))? + .await? + .ok_or_else(|| anyhow!("credentials not found"))?; + String::from_utf8(api_key)? + }; + cx.update_global::(|provider, _cx| { + if let CompletionProvider::OpenAi(provider) = provider { + provider.api_key = Some(api_key); + } + }) + }) + } + } + + pub fn reset_credentials(&self, cx: &AppContext) -> Task> { + let delete_credentials = cx.delete_credentials(&self.api_url); + cx.spawn(|mut cx| async move { + delete_credentials.await.log_err(); + cx.update_global::(|provider, _cx| { + if let CompletionProvider::OpenAi(provider) = provider { + provider.api_key = None; + } + }) + }) + } + + pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView { + cx.new_view(|cx| AuthenticationPrompt::new(self.api_url.clone(), cx)) + .into() + } + + pub fn default_model(&self) -> OpenAiModel { + self.default_model.clone() + } + + pub fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + count_open_ai_tokens(request, cx.background_executor()) + } + + pub fn complete( + &self, + request: LanguageModelRequest, + ) -> BoxFuture<'static, Result>>> { + let request = self.to_open_ai_request(request); + + let http_client = self.http_client.clone(); + let api_key = self.api_key.clone(); + let api_url = self.api_url.clone(); + let low_speed_timeout = self.low_speed_timeout; + async move { + let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let request = stream_completion( + http_client.as_ref(), + &api_url, + &api_key, + request, + low_speed_timeout, + ); + let response = request.await?; + let stream = response + .filter_map(|response| async move { + match response { + Ok(mut response) => Some(Ok(response.choices.pop()?.delta.content?)), + Err(error) => Some(Err(error)), + } + }) + .boxed(); + Ok(stream) + } + .boxed() + } + + fn to_open_ai_request(&self, request: LanguageModelRequest) -> Request { + let model = match request.model { + LanguageModel::OpenAi(model) => model, + _ => self.default_model(), + }; + + Request { + model, + messages: request + .messages + .into_iter() + .map(|msg| match msg.role { + Role::User => RequestMessage::User { + content: msg.content, + }, + Role::Assistant => RequestMessage::Assistant { + content: Some(msg.content), + tool_calls: Vec::new(), + }, + Role::System => RequestMessage::System { + content: msg.content, + }, + }) + .collect(), + stream: true, + stop: request.stop, + temperature: request.temperature, + tools: Vec::new(), + tool_choice: None, + } + } +} + +pub fn count_open_ai_tokens( + request: LanguageModelRequest, + background_executor: &gpui::BackgroundExecutor, +) -> BoxFuture<'static, Result> { + background_executor + .spawn(async move { + let messages = request + .messages + .into_iter() + .map(|message| tiktoken_rs::ChatCompletionRequestMessage { + role: match message.role { + Role::User => "user".into(), + Role::Assistant => "assistant".into(), + Role::System => "system".into(), + }, + content: Some(message.content), + name: None, + function_call: None, + }) + .collect::>(); + + match request.model { + LanguageModel::Anthropic(_) + | LanguageModel::ZedDotDev(ZedDotDevModel::Claude3Opus) + | LanguageModel::ZedDotDev(ZedDotDevModel::Claude3Sonnet) + | LanguageModel::ZedDotDev(ZedDotDevModel::Claude3Haiku) => { + // Tiktoken doesn't yet support these models, so we manually use the + // same tokenizer as GPT-4. + tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) + } + _ => tiktoken_rs::num_tokens_from_messages(request.model.id(), &messages), + } + }) + .boxed() +} + +impl From for open_ai::Role { + fn from(val: Role) -> Self { + match val { + Role::User => OpenAiRole::User, + Role::Assistant => OpenAiRole::Assistant, + Role::System => OpenAiRole::System, + } + } +} + +struct AuthenticationPrompt { + api_key: View, + api_url: String, +} + +impl AuthenticationPrompt { + fn new(api_url: String, cx: &mut WindowContext) -> Self { + Self { + api_key: cx.new_view(|cx| { + let mut editor = Editor::single_line(cx); + editor.set_placeholder_text( + "sk-000000000000000000000000000000000000000000000000", + cx, + ); + editor + }), + api_url, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { + let api_key = self.api_key.read(cx).text(cx); + if api_key.is_empty() { + return; + } + + let write_credentials = cx.write_credentials(&self.api_url, "Bearer", api_key.as_bytes()); + cx.spawn(|_, mut cx| async move { + write_credentials.await?; + cx.update_global::(|provider, _cx| { + if let CompletionProvider::OpenAi(provider) = provider { + provider.api_key = Some(api_key); + } + }) + }) + .detach_and_log_err(cx); + } + + fn render_api_key_editor(&self, cx: &mut ViewContext) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: rems(0.875).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(1.3), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }; + EditorElement::new( + &self.api_key, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } +} + +impl Render for AuthenticationPrompt { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + const INSTRUCTIONS: [&str; 6] = [ + "To use the assistant panel or inline assistant, you need to add your OpenAI API key.", + " - You can create an API key at: platform.openai.com/api-keys", + " - Make sure your OpenAI account has credits", + " - Having a subscription for another service like GitHub Copilot won't work.", + "", + "Paste your OpenAI API key below and hit enter to use the assistant:", + ]; + + v_flex() + .p_4() + .size_full() + .on_action(cx.listener(Self::save_api_key)) + .children( + INSTRUCTIONS.map(|instruction| Label::new(instruction).size(LabelSize::Small)), + ) + .child( + h_flex() + .w_full() + .my_2() + .px_2() + .py_1() + .bg(cx.theme().colors().editor_background) + .rounded_md() + .child(self.render_api_key_editor(cx)), + ) + .child( + Label::new( + "You can also assign the OPENAI_API_KEY environment variable and restart Zed.", + ) + .size(LabelSize::Small), + ) + .child( + h_flex() + .gap_2() + .child(Label::new("Click on").size(LabelSize::Small)) + .child(Icon::new(IconName::Ai).size(IconSize::XSmall)) + .child( + Label::new("in the status bar to close this panel.").size(LabelSize::Small), + ), + ) + .into_any() + } +} diff --git a/crates/assistant/src/completion_provider/zed.rs b/crates/assistant/src/completion_provider/zed.rs new file mode 100644 index 0000000..8fa1498 --- /dev/null +++ b/crates/assistant/src/completion_provider/zed.rs @@ -0,0 +1,178 @@ +use crate::{ + assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider, LanguageModel, + LanguageModelRequest, +}; +use anyhow::{anyhow, Result}; +use client::{proto, Client}; +use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt, TryFutureExt}; +use gpui::{AnyView, AppContext, Task}; +use std::{future, sync::Arc}; +use ui::prelude::*; + +pub struct ZedDotDevCompletionProvider { + client: Arc, + default_model: ZedDotDevModel, + settings_version: usize, + status: client::Status, + _maintain_client_status: Task<()>, +} + +impl ZedDotDevCompletionProvider { + pub fn new( + default_model: ZedDotDevModel, + client: Arc, + settings_version: usize, + cx: &mut AppContext, + ) -> Self { + let mut status_rx = client.status(); + let status = *status_rx.borrow(); + let maintain_client_status = cx.spawn(|mut cx| async move { + while let Some(status) = status_rx.next().await { + let _ = cx.update_global::(|provider, _cx| { + if let CompletionProvider::ZedDotDev(provider) = provider { + provider.status = status; + } else { + unreachable!() + } + }); + } + }); + Self { + client, + default_model, + settings_version, + status, + _maintain_client_status: maintain_client_status, + } + } + + pub fn update(&mut self, default_model: ZedDotDevModel, settings_version: usize) { + self.default_model = default_model; + self.settings_version = settings_version; + } + + pub fn settings_version(&self) -> usize { + self.settings_version + } + + pub fn default_model(&self) -> ZedDotDevModel { + self.default_model.clone() + } + + pub fn is_authenticated(&self) -> bool { + self.status.is_connected() + } + + pub fn authenticate(&self, cx: &AppContext) -> Task> { + let client = self.client.clone(); + cx.spawn(move |cx| async move { client.authenticate_and_connect(true, &cx).await }) + } + + pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView { + cx.new_view(|_cx| AuthenticationPrompt).into() + } + + pub fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + match request.model { + LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4) + | LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4Turbo) + | LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4Omni) + | LanguageModel::ZedDotDev(ZedDotDevModel::Gpt3Point5Turbo) => { + count_open_ai_tokens(request, cx.background_executor()) + } + LanguageModel::ZedDotDev( + ZedDotDevModel::Claude3Opus + | ZedDotDevModel::Claude3Sonnet + | ZedDotDevModel::Claude3Haiku, + ) => { + // Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation. + count_open_ai_tokens(request, cx.background_executor()) + } + LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => { + let request = self.client.request(proto::CountTokensWithLanguageModel { + model, + messages: request + .messages + .iter() + .map(|message| message.to_proto()) + .collect(), + }); + async move { + let response = request.await?; + Ok(response.token_count as usize) + } + .boxed() + } + _ => future::ready(Err(anyhow!("invalid model"))).boxed(), + } + } + + pub fn complete( + &self, + request: LanguageModelRequest, + ) -> BoxFuture<'static, Result>>> { + let request = proto::CompleteWithLanguageModel { + model: request.model.id().to_string(), + messages: request + .messages + .iter() + .map(|message| message.to_proto()) + .collect(), + stop: request.stop, + temperature: request.temperature, + tools: Vec::new(), + tool_choice: None, + }; + + self.client + .request_stream(request) + .map_ok(|stream| { + stream + .filter_map(|response| async move { + match response { + Ok(mut response) => Some(Ok(response.choices.pop()?.delta?.content?)), + Err(error) => Some(Err(error)), + } + }) + .boxed() + }) + .boxed() + } +} + +struct AuthenticationPrompt; + +impl Render for AuthenticationPrompt { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + const LABEL: &str = "Generate and analyze code with language models. You can dialog with the assistant in this panel or transform code inline."; + + v_flex().gap_6().p_4().child(Label::new(LABEL)).child( + v_flex() + .gap_2() + .child( + Button::new("sign_in", "Sign in") + .icon_color(Color::Muted) + .icon(IconName::Github) + .icon_position(IconPosition::Start) + .style(ButtonStyle::Filled) + .full_width() + .on_click(|_, cx| { + CompletionProvider::global(cx) + .authenticate(cx) + .detach_and_log_err(cx); + }), + ) + .child( + div().flex().w_full().items_center().child( + Label::new("Sign in to enable collaboration.") + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ) + } +} diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs new file mode 100644 index 0000000..3490b13 --- /dev/null +++ b/crates/assistant/src/prompt_library.rs @@ -0,0 +1,454 @@ +use fs::Fs; +use futures::StreamExt; +use gpui::{AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Render}; +use parking_lot::RwLock; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use ui::{prelude::*, Checkbox, ModalHeader}; +use util::{paths::PROMPTS_DIR, ResultExt}; +use workspace::ModalView; + +pub struct PromptLibraryState { + /// The default prompt all assistant contexts will start with + _system_prompt: String, + /// All [UserPrompt]s loaded into the library + prompts: HashMap, + /// Prompts included in the default prompt + default_prompts: Vec, + /// Prompts that have a pending update that hasn't been applied yet + _updateable_prompts: Vec, + /// Prompts that have been changed since they were loaded + /// and can be reverted to their original state + _revertable_prompts: Vec, + version: usize, +} + +pub struct PromptLibrary { + state: RwLock, +} + +impl Default for PromptLibrary { + fn default() -> Self { + Self::new() + } +} + +impl PromptLibrary { + fn new() -> Self { + Self { + state: RwLock::new(PromptLibraryState { + _system_prompt: String::new(), + prompts: HashMap::new(), + default_prompts: Vec::new(), + _updateable_prompts: Vec::new(), + _revertable_prompts: Vec::new(), + version: 0, + }), + } + } + + pub async fn init(fs: Arc) -> anyhow::Result { + let prompt_library = PromptLibrary::new(); + prompt_library.load_prompts(fs)?; + Ok(prompt_library) + } + + fn load_prompts(&self, fs: Arc) -> anyhow::Result<()> { + let prompts = futures::executor::block_on(UserPrompt::list(fs))?; + let prompts_with_ids = prompts + .clone() + .into_iter() + .map(|prompt| { + let id = uuid::Uuid::new_v4().to_string(); + (id, prompt) + }) + .collect::>(); + let mut state = self.state.write(); + state.prompts.extend(prompts_with_ids); + state.version += 1; + + Ok(()) + } + + pub fn default_prompt(&self) -> Option { + let state = self.state.read(); + + if state.default_prompts.is_empty() { + None + } else { + Some(self.join_default_prompts()) + } + } + + pub fn add_prompt_to_default(&self, prompt_id: String) -> anyhow::Result<()> { + let mut state = self.state.write(); + + if !state.default_prompts.contains(&prompt_id) && state.prompts.contains_key(&prompt_id) { + state.default_prompts.push(prompt_id); + state.version += 1; + } + + Ok(()) + } + + pub fn remove_prompt_from_default(&self, prompt_id: String) -> anyhow::Result<()> { + let mut state = self.state.write(); + + state.default_prompts.retain(|id| id != &prompt_id); + state.version += 1; + Ok(()) + } + + fn join_default_prompts(&self) -> String { + let state = self.state.read(); + let active_prompt_ids = state.default_prompts.to_vec(); + + active_prompt_ids + .iter() + .filter_map(|id| state.prompts.get(id).map(|p| p.prompt.clone())) + .collect::>() + .join("\n\n---\n\n") + } + + #[allow(unused)] + pub fn prompts(&self) -> Vec { + let state = self.state.read(); + state.prompts.values().cloned().collect() + } + + pub fn prompts_with_ids(&self) -> Vec<(String, UserPrompt)> { + let state = self.state.read(); + state + .prompts + .iter() + .map(|(id, prompt)| (id.clone(), prompt.clone())) + .collect() + } + + pub fn _default_prompts(&self) -> Vec { + let state = self.state.read(); + state + .default_prompts + .iter() + .filter_map(|id| state.prompts.get(id).cloned()) + .collect() + } + + pub fn default_prompt_ids(&self) -> Vec { + let state = self.state.read(); + state.default_prompts.clone() + } +} + +/// A custom prompt that can be loaded into the prompt library +/// +/// Example: +/// +/// ```json +/// { +/// "title": "Foo", +/// "version": "1.0", +/// "author": "Jane Kim ", +/// "languages": ["*"], // or ["rust", "python", "javascript"] etc... +/// "prompt": "bar" +/// } +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub struct UserPrompt { + version: String, + title: String, + author: String, + languages: Vec, + prompt: String, +} + +impl UserPrompt { + async fn list(fs: Arc) -> anyhow::Result> { + fs.create_dir(&PROMPTS_DIR).await?; + + let mut paths = fs.read_dir(&PROMPTS_DIR).await?; + let mut prompts = Vec::new(); + + while let Some(path_result) = paths.next().await { + let path = match path_result { + Ok(p) => p, + Err(e) => { + eprintln!("Error reading path: {:?}", e); + continue; + } + }; + + if path.extension() == Some(std::ffi::OsStr::new("json")) { + match fs.load(&path).await { + Ok(content) => { + let user_prompt: UserPrompt = + serde_json::from_str(&content).map_err(|e| { + anyhow::anyhow!("Failed to deserialize UserPrompt: {}", e) + })?; + + prompts.push(user_prompt); + } + Err(e) => eprintln!("Failed to load file {}: {}", path.display(), e), + } + } + } + + Ok(prompts) + } +} + +pub struct PromptManager { + focus_handle: FocusHandle, + prompt_library: Arc, + active_prompt: Option, +} + +impl PromptManager { + pub fn new(prompt_library: Arc, cx: &mut WindowContext) -> Self { + let focus_handle = cx.focus_handle(); + Self { + focus_handle, + prompt_library, + active_prompt: None, + } + } + + pub fn set_active_prompt(&mut self, prompt_id: Option) { + self.active_prompt = prompt_id; + } + + fn dismiss(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { + cx.emit(DismissEvent); + } +} + +impl Render for PromptManager { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let prompt_library = self.prompt_library.clone(); + let prompts = prompt_library + .clone() + .prompts_with_ids() + .clone() + .into_iter() + .collect::>(); + + let active_prompt = self.active_prompt.as_ref().and_then(|id| { + prompt_library + .prompts_with_ids() + .iter() + .find(|(prompt_id, _)| prompt_id == id) + .map(|(_, prompt)| prompt.clone()) + }); + + v_flex() + .key_context("PromptManager") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::dismiss)) + .elevation_3(cx) + .size_full() + .flex_none() + .w(rems(54.)) + .h(rems(40.)) + .overflow_hidden() + .child( + ModalHeader::new("prompt-manager-header") + .child(Headline::new("Prompt Library").size(HeadlineSize::Small)) + .show_dismiss_button(true), + ) + .child( + h_flex() + .flex_grow() + .overflow_hidden() + .border_t_1() + .border_color(cx.theme().colors().border) + .child( + div() + .id("prompt-preview") + .overflow_y_scroll() + .h_full() + .min_w_64() + .max_w_1_2() + .child( + v_flex() + .justify_start() + .py(Spacing::Medium.rems(cx)) + .px(Spacing::Large.rems(cx)) + .bg(cx.theme().colors().surface_background) + .when_else( + !prompts.is_empty(), + |with_items| { + with_items.children(prompts.into_iter().map( + |(id, prompt)| { + let prompt_library = prompt_library.clone(); + let prompt = prompt.clone(); + let prompt_id = id.clone(); + let shared_string_id: SharedString = + id.clone().into(); + + let default_prompt_ids = + prompt_library.clone().default_prompt_ids(); + let is_default = + default_prompt_ids.contains(&id); + // We'll use this for conditionally enabled prompts + // like those loaded only for certain languages + let is_conditional = false; + let selection = + match (is_default, is_conditional) { + (_, true) => Selection::Indeterminate, + (true, _) => Selection::Selected, + (false, _) => Selection::Unselected, + }; + + v_flex() + .id(ElementId::Name( + format!("prompt-{}", shared_string_id) + .into(), + )) + .p(Spacing::Small.rems(cx)) + + .on_click(cx.listener({ + let prompt_id = prompt_id.clone(); + move |this, _event, _cx| { + this.set_active_prompt(Some( + prompt_id.clone(), + )); + } + })) + .child( + h_flex() + .justify_between() + .child( + h_flex() + .gap(Spacing::Large.rems(cx)) + .child( + Checkbox::new( + shared_string_id, + selection, + ) + .on_click(move |_, _cx| { + if is_default { + prompt_library + .clone() + .remove_prompt_from_default( + prompt_id.clone(), + ) + .log_err(); + } else { + prompt_library + .clone() + .add_prompt_to_default( + prompt_id.clone(), + ) + .log_err(); + } + }), + ) + .child(Label::new( + prompt.title, + )), + ) + .child(div()), + ) + }, + )) + }, + |no_items| { + no_items.child( + Label::new("No prompts").color(Color::Placeholder), + ) + }, + ), + ), + ) + .child( + div() + .id("prompt-preview") + .overflow_y_scroll() + .border_l_1() + .border_color(cx.theme().colors().border) + .size_full() + .flex_none() + .child( + v_flex() + .justify_start() + .py(Spacing::Medium.rems(cx)) + .px(Spacing::Large.rems(cx)) + .gap(Spacing::Large.rems(cx)) + .when_else( + active_prompt.is_some(), + |with_prompt| { + let active_prompt = active_prompt.as_ref().unwrap(); + with_prompt + .child( + v_flex() + .gap_0p5() + .child( + Headline::new( + active_prompt.title.clone(), + ) + .size(HeadlineSize::XSmall), + ) + .child( + h_flex() + .child( + Label::new( + active_prompt + .author + .clone(), + ) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new( + if active_prompt + .languages + .is_empty() + || active_prompt + .languages[0] + == "*" + { + " · Global".to_string() + } else { + format!( + " · {}", + active_prompt + .languages + .join(", ") + ) + }, + ) + .size(LabelSize::XSmall) + .color(Color::Muted), + ), + ), + ) + .child( + div() + .w_full() + .max_w(rems(30.)) + .text_ui(cx) + .child(active_prompt.prompt.clone()), + ) + }, + |without_prompt| { + without_prompt.justify_center().items_center().child( + Label::new("Select a prompt to view details.") + .color(Color::Placeholder), + ) + }, + ), + ), + ), + ) + } +} + +impl EventEmitter for PromptManager {} +impl ModalView for PromptManager {} + +impl FocusableView for PromptManager { + fn focus_handle(&self, _cx: &AppContext) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs new file mode 100644 index 0000000..80dfc45 --- /dev/null +++ b/crates/assistant/src/prompts.rs @@ -0,0 +1,95 @@ +use language::BufferSnapshot; +use std::{fmt::Write, ops::Range}; + +pub fn generate_content_prompt( + user_prompt: String, + language_name: Option<&str>, + buffer: BufferSnapshot, + range: Range, + project_name: Option, +) -> anyhow::Result { + let mut prompt = String::new(); + + let content_type = match language_name { + None | Some("Markdown" | "Plain Text") => { + writeln!(prompt, "You are an expert engineer.")?; + "Text" + } + Some(language_name) => { + writeln!(prompt, "You are an expert {language_name} engineer.")?; + writeln!( + prompt, + "Your answer MUST always and only be valid {}.", + language_name + )?; + "Code" + } + }; + + if let Some(project_name) = project_name { + writeln!( + prompt, + "You are currently working inside the '{project_name}' project in code editor Zed." + )?; + } + + // Include file content. + for chunk in buffer.text_for_range(0..range.start) { + prompt.push_str(chunk); + } + + if range.is_empty() { + prompt.push_str("<|START|>"); + } else { + prompt.push_str("<|START|"); + } + + for chunk in buffer.text_for_range(range.clone()) { + prompt.push_str(chunk); + } + + if !range.is_empty() { + prompt.push_str("|END|>"); + } + + for chunk in buffer.text_for_range(range.end..buffer.len()) { + prompt.push_str(chunk); + } + + prompt.push('\n'); + + if range.is_empty() { + writeln!( + prompt, + "Assume the cursor is located where the `<|START|>` span is." + ) + .unwrap(); + writeln!( + prompt, + "{content_type} can't be replaced, so assume your answer will be inserted at the cursor.", + ) + .unwrap(); + writeln!( + prompt, + "Generate {content_type} based on the users prompt: {user_prompt}", + ) + .unwrap(); + } else { + writeln!(prompt, "Modify the user's selected {content_type} based upon the users prompt: '{user_prompt}'").unwrap(); + writeln!(prompt, "You must reply with only the adjusted {content_type} (within the '<|START|' and '|END|>' spans) not the entire file.").unwrap(); + writeln!( + prompt, + "Double check that you only return code and not the '<|START|' and '|END|'> spans" + ) + .unwrap(); + } + + writeln!(prompt, "Never make remarks about the output.").unwrap(); + writeln!( + prompt, + "Do not return anything else, except the generated {content_type}." + ) + .unwrap(); + + Ok(prompt) +} diff --git a/crates/assistant/src/saved_conversation.rs b/crates/assistant/src/saved_conversation.rs new file mode 100644 index 0000000..ac6c925 --- /dev/null +++ b/crates/assistant/src/saved_conversation.rs @@ -0,0 +1,126 @@ +use crate::{assistant_settings::OpenAiModel, MessageId, MessageMetadata}; +use anyhow::{anyhow, Result}; +use collections::HashMap; +use fs::Fs; +use futures::StreamExt; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use std::{ + cmp::Reverse, + ffi::OsStr, + path::{Path, PathBuf}, + sync::Arc, +}; +use util::paths::CONVERSATIONS_DIR; + +#[derive(Serialize, Deserialize)] +pub struct SavedMessage { + pub id: MessageId, + pub start: usize, +} + +#[derive(Serialize, Deserialize)] +pub struct SavedConversation { + pub id: Option, + pub zed: String, + pub version: String, + pub text: String, + pub messages: Vec, + pub message_metadata: HashMap, + pub summary: String, +} + +impl SavedConversation { + pub const VERSION: &'static str = "0.2.0"; + + pub async fn load(path: &Path, fs: &dyn Fs) -> Result { + let saved_conversation = fs.load(path).await?; + let saved_conversation_json = + serde_json::from_str::(&saved_conversation)?; + match saved_conversation_json + .get("version") + .ok_or_else(|| anyhow!("version not found"))? + { + serde_json::Value::String(version) => match version.as_str() { + Self::VERSION => Ok(serde_json::from_value::(saved_conversation_json)?), + "0.1.0" => { + let saved_conversation = + serde_json::from_value::(saved_conversation_json)?; + Ok(Self { + id: saved_conversation.id, + zed: saved_conversation.zed, + version: saved_conversation.version, + text: saved_conversation.text, + messages: saved_conversation.messages, + message_metadata: saved_conversation.message_metadata, + summary: saved_conversation.summary, + }) + } + _ => Err(anyhow!( + "unrecognized saved conversation version: {}", + version + )), + }, + _ => Err(anyhow!("version not found on saved conversation")), + } + } +} + +#[derive(Serialize, Deserialize)] +struct SavedConversationV0_1_0 { + id: Option, + zed: String, + version: String, + text: String, + messages: Vec, + message_metadata: HashMap, + summary: String, + api_url: Option, + model: OpenAiModel, +} + +pub struct SavedConversationMetadata { + pub title: String, + pub path: PathBuf, + pub mtime: chrono::DateTime, +} + +impl SavedConversationMetadata { + pub async fn list(fs: Arc) -> Result> { + fs.create_dir(&CONVERSATIONS_DIR).await?; + + let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?; + let mut conversations = Vec::::new(); + while let Some(path) = paths.next().await { + let path = path?; + if path.extension() != Some(OsStr::new("json")) { + continue; + } + + let pattern = r" - \d+.zed.json$"; + let re = Regex::new(pattern).unwrap(); + + let metadata = fs.metadata(&path).await?; + if let Some((file_name, metadata)) = path + .file_name() + .and_then(|name| name.to_str()) + .zip(metadata) + { + // This is used to filter out conversations saved by the new assistant. + if !re.is_match(file_name) { + continue; + } + + let title = re.replace(file_name, ""); + conversations.push(Self { + title: title.into_owned(), + path, + mtime: metadata.mtime.into(), + }); + } + } + conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime)); + + Ok(conversations) + } +} diff --git a/crates/assistant/src/search.rs b/crates/assistant/src/search.rs new file mode 100644 index 0000000..f7b957b --- /dev/null +++ b/crates/assistant/src/search.rs @@ -0,0 +1,150 @@ +use language::Rope; +use std::ops::Range; + +/// Search the given buffer for the given substring, ignoring any differences +/// in line indentation between the query and the buffer. +/// +/// Returns a vector of ranges of byte offsets in the buffer corresponding +/// to the entire lines of the buffer. +pub fn fuzzy_search_lines(haystack: &Rope, needle: &str) -> Vec> { + let mut matches = Vec::new(); + let mut haystack_lines = haystack.chunks().lines(); + let mut haystack_line_start = 0; + while let Some(haystack_line) = haystack_lines.next() { + let next_haystack_line_start = haystack_line_start + haystack_line.len() + 1; + let mut trimmed_needle_lines = needle.lines().map(|line| line.trim()); + if Some(haystack_line.trim()) == trimmed_needle_lines.next() { + let match_start = haystack_line_start; + let mut match_end = next_haystack_line_start; + let matched = loop { + match (haystack_lines.next(), trimmed_needle_lines.next()) { + (Some(haystack_line), Some(needle_line)) => { + // Haystack line differs from needle line: not a match. + if haystack_line.trim() == needle_line { + match_end = haystack_lines.offset(); + } else { + break false; + } + } + // We exhausted the haystack but not the query: not a match. + (None, Some(_)) => break false, + // We exhausted the query: it's a match. + (_, None) => break true, + } + }; + + if matched { + matches.push(match_start..match_end) + } + + // Advance to the next line. + haystack_lines.seek(next_haystack_line_start); + } + + haystack_line_start = next_haystack_line_start; + } + matches +} + +#[cfg(test)] +mod test { + use super::*; + use gpui::{AppContext, Context as _}; + use language::{Buffer, OffsetRangeExt}; + use unindent::Unindent as _; + use util::test::marked_text_ranges; + + #[gpui::test] + fn test_fuzzy_search_lines(cx: &mut AppContext) { + let (text, expected_ranges) = marked_text_ranges( + &r#" + fn main() { + if a() { + assert_eq!( + 1 + 2, + does_not_match, + ); + } + + println!("hi"); + + assert_eq!( + 1 + 2, + 3, + ); // this last line does not match + + « assert_eq!( + 1 + 2, + 3, + ); + » + + assert_eq!( + "something", + "else", + ); + + if b { + « assert_eq!( + 1 + 2, + 3, + ); + » } + } + "# + .unindent(), + false, + ); + + let buffer = cx.new_model(|cx| Buffer::local(&text, cx)); + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + + let actual_ranges = fuzzy_search_lines( + snapshot.as_rope(), + &" + assert_eq!( + 1 + 2, + 3, + ); + " + .unindent(), + ); + assert_eq!( + actual_ranges, + expected_ranges, + "actual: {:?}, expected: {:?}", + actual_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>(), + expected_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>() + ); + + let actual_ranges = fuzzy_search_lines( + snapshot.as_rope(), + &" + assert_eq!( + 1 + 2, + 3, + ); + " + .unindent(), + ); + assert_eq!( + actual_ranges, + expected_ranges, + "actual: {:?}, expected: {:?}", + actual_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>(), + expected_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>() + ); + } +} diff --git a/crates/assistant/src/streaming_diff.rs b/crates/assistant/src/streaming_diff.rs new file mode 100644 index 0000000..cba7758 --- /dev/null +++ b/crates/assistant/src/streaming_diff.rs @@ -0,0 +1,291 @@ +use collections::HashMap; +use ordered_float::OrderedFloat; +use std::{ + cmp, + fmt::{self, Debug}, + ops::Range, +}; + +struct Matrix { + cells: Vec, + rows: usize, + cols: usize, +} + +impl Matrix { + fn new() -> Self { + Self { + cells: Vec::new(), + rows: 0, + cols: 0, + } + } + + fn resize(&mut self, rows: usize, cols: usize) { + self.cells.resize(rows * cols, 0.); + self.rows = rows; + self.cols = cols; + } + + fn get(&self, row: usize, col: usize) -> f64 { + if row >= self.rows { + panic!("row out of bounds") + } + + if col >= self.cols { + panic!("col out of bounds") + } + self.cells[col * self.rows + row] + } + + fn set(&mut self, row: usize, col: usize, value: f64) { + if row >= self.rows { + panic!("row out of bounds") + } + + if col >= self.cols { + panic!("col out of bounds") + } + + self.cells[col * self.rows + row] = value; + } +} + +impl Debug for Matrix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + writeln!(f)?; + for i in 0..self.rows { + for j in 0..self.cols { + write!(f, "{:5}", self.get(i, j))?; + } + writeln!(f)?; + } + Ok(()) + } +} + +#[derive(Debug)] +pub enum Hunk { + Insert { text: String }, + Remove { len: usize }, + Keep { len: usize }, +} + +pub struct StreamingDiff { + old: Vec, + new: Vec, + scores: Matrix, + old_text_ix: usize, + new_text_ix: usize, + equal_runs: HashMap<(usize, usize), u32>, +} + +impl StreamingDiff { + const INSERTION_SCORE: f64 = -1.; + const DELETION_SCORE: f64 = -20.; + const EQUALITY_BASE: f64 = 1.8; + const MAX_EQUALITY_EXPONENT: i32 = 16; + + pub fn new(old: String) -> Self { + let old = old.chars().collect::>(); + let mut scores = Matrix::new(); + scores.resize(old.len() + 1, 1); + for i in 0..=old.len() { + scores.set(i, 0, i as f64 * Self::DELETION_SCORE); + } + Self { + old, + new: Vec::new(), + scores, + old_text_ix: 0, + new_text_ix: 0, + equal_runs: Default::default(), + } + } + + pub fn push_new(&mut self, text: &str) -> Vec { + self.new.extend(text.chars()); + self.scores.resize(self.old.len() + 1, self.new.len() + 1); + + for j in self.new_text_ix + 1..=self.new.len() { + self.scores.set(0, j, j as f64 * Self::INSERTION_SCORE); + for i in 1..=self.old.len() { + let insertion_score = self.scores.get(i, j - 1) + Self::INSERTION_SCORE; + let deletion_score = self.scores.get(i - 1, j) + Self::DELETION_SCORE; + let equality_score = if self.old[i - 1] == self.new[j - 1] { + let mut equal_run = self.equal_runs.get(&(i - 1, j - 1)).copied().unwrap_or(0); + equal_run += 1; + self.equal_runs.insert((i, j), equal_run); + + let exponent = cmp::min(equal_run as i32 / 4, Self::MAX_EQUALITY_EXPONENT); + self.scores.get(i - 1, j - 1) + Self::EQUALITY_BASE.powi(exponent) + } else { + f64::NEG_INFINITY + }; + + let score = insertion_score.max(deletion_score).max(equality_score); + self.scores.set(i, j, score); + } + } + + let mut max_score = f64::NEG_INFINITY; + let mut next_old_text_ix = self.old_text_ix; + let next_new_text_ix = self.new.len(); + for i in self.old_text_ix..=self.old.len() { + let score = self.scores.get(i, next_new_text_ix); + if score > max_score { + max_score = score; + next_old_text_ix = i; + } + } + + let hunks = self.backtrack(next_old_text_ix, next_new_text_ix); + self.old_text_ix = next_old_text_ix; + self.new_text_ix = next_new_text_ix; + hunks + } + + fn backtrack(&self, old_text_ix: usize, new_text_ix: usize) -> Vec { + let mut pending_insert: Option> = None; + let mut hunks = Vec::new(); + let mut i = old_text_ix; + let mut j = new_text_ix; + while (i, j) != (self.old_text_ix, self.new_text_ix) { + let insertion_score = if j > self.new_text_ix { + Some((i, j - 1)) + } else { + None + }; + let deletion_score = if i > self.old_text_ix { + Some((i - 1, j)) + } else { + None + }; + let equality_score = if i > self.old_text_ix && j > self.new_text_ix { + if self.old[i - 1] == self.new[j - 1] { + Some((i - 1, j - 1)) + } else { + None + } + } else { + None + }; + + let (prev_i, prev_j) = [insertion_score, deletion_score, equality_score] + .iter() + .max_by_key(|cell| cell.map(|(i, j)| OrderedFloat(self.scores.get(i, j)))) + .unwrap() + .unwrap(); + + if prev_i == i && prev_j == j - 1 { + if let Some(pending_insert) = pending_insert.as_mut() { + pending_insert.start = prev_j; + } else { + pending_insert = Some(prev_j..j); + } + } else { + if let Some(range) = pending_insert.take() { + hunks.push(Hunk::Insert { + text: self.new[range].iter().collect(), + }); + } + + let char_len = self.old[i - 1].len_utf8(); + if prev_i == i - 1 && prev_j == j { + if let Some(Hunk::Remove { len }) = hunks.last_mut() { + *len += char_len; + } else { + hunks.push(Hunk::Remove { len: char_len }) + } + } else if let Some(Hunk::Keep { len }) = hunks.last_mut() { + *len += char_len; + } else { + hunks.push(Hunk::Keep { len: char_len }) + } + } + + i = prev_i; + j = prev_j; + } + + if let Some(range) = pending_insert.take() { + hunks.push(Hunk::Insert { + text: self.new[range].iter().collect(), + }); + } + + hunks.reverse(); + hunks + } + + pub fn finish(self) -> Vec { + self.backtrack(self.old.len(), self.new.len()) + } +} + +#[cfg(test)] +mod tests { + use std::env; + + use super::*; + use rand::prelude::*; + + #[gpui::test(iterations = 100)] + fn test_random_diffs(mut rng: StdRng) { + let old_text_len = env::var("OLD_TEXT_LEN") + .map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable")) + .unwrap_or(10); + let new_text_len = env::var("NEW_TEXT_LEN") + .map(|i| i.parse().expect("invalid `NEW_TEXT_LEN` variable")) + .unwrap_or(10); + + let old = util::RandomCharIter::new(&mut rng) + .take(old_text_len) + .collect::(); + log::info!("old text: {:?}", old); + + let mut diff = StreamingDiff::new(old.clone()); + let mut hunks = Vec::new(); + let mut new_len = 0; + let mut new = String::new(); + while new_len < new_text_len { + let new_chunk_len = rng.gen_range(1..=new_text_len - new_len); + let new_chunk = util::RandomCharIter::new(&mut rng) + .take(new_len) + .collect::(); + log::info!("new chunk: {:?}", new_chunk); + new_len += new_chunk_len; + new.push_str(&new_chunk); + let new_hunks = diff.push_new(&new_chunk); + log::info!("hunks: {:?}", new_hunks); + hunks.extend(new_hunks); + } + let final_hunks = diff.finish(); + log::info!("final hunks: {:?}", final_hunks); + hunks.extend(final_hunks); + + log::info!("new text: {:?}", new); + let mut old_ix = 0; + let mut new_ix = 0; + let mut patched = String::new(); + for hunk in hunks { + match hunk { + Hunk::Keep { len } => { + assert_eq!(&old[old_ix..old_ix + len], &new[new_ix..new_ix + len]); + patched.push_str(&old[old_ix..old_ix + len]); + old_ix += len; + new_ix += len; + } + Hunk::Remove { len } => { + old_ix += len; + } + Hunk::Insert { text } => { + assert_eq!(text, &new[new_ix..new_ix + text.len()]); + patched.push_str(&text); + new_ix += text.len(); + } + } + } + assert_eq!(patched, new); + } +} diff --git a/crates/assistant/src/system_prompts/edits.md b/crates/assistant/src/system_prompts/edits.md new file mode 100644 index 0000000..f3b7733 --- /dev/null +++ b/crates/assistant/src/system_prompts/edits.md @@ -0,0 +1,86 @@ +When the user asks you to suggest edits for a buffer, use a strict template consisting of: + +* A markdown code block with the file path as the language identifier. +* The original code that should be replaced +* A separator line (`---`) +* The new text that should replace the original lines + +Each code block may only contain an edit for one single contiguous range of text. Use multiple code blocks for multiple edits. + +## Example + +If you have a buffer with the following lines: + +```path/to/file.rs +fn quicksort(arr: &mut [i32]) { + if arr.len() <= 1 { + return; + } + let pivot_index = partition(arr); + let (left, right) = arr.split_at_mut(pivot_index); + quicksort(left); + quicksort(&mut right[1..]); +} + +fn partition(arr: &mut [i32]) -> usize { + let last_index = arr.len() - 1; + let pivot = arr[last_index]; + let mut i = 0; + for j in 0..last_index { + if arr[j] <= pivot { + arr.swap(i, j); + i += 1; + } + } + arr.swap(i, last_index); + i +} +``` + +And you want to replace the for loop inside `partition`, output the following. + +```edit path/to/file.rs +for j in 0..last_index { + if arr[j] <= pivot { + arr.swap(i, j); + i += 1; + } +} +--- +let mut j = 0; +while j < last_index { + if arr[j] <= pivot { + arr.swap(i, j); + i += 1; + } + j += 1; +} +``` + +If you wanted to insert comments above the partition function, output the following: + +```edit path/to/file.rs +fn partition(arr: &mut [i32]) -> usize { +--- +// A helper function used for quicksort. +fn partition(arr: &mut [i32]) -> usize { +``` + +If you wanted to delete the partition function, output the following: + +```edit path/to/file.rs +fn partition(arr: &mut [i32]) -> usize { + let last_index = arr.len() - 1; + let pivot = arr[last_index]; + let mut i = 0; + for j in 0..last_index { + if arr[j] <= pivot { + arr.swap(i, j); + i += 1; + } + } + arr.swap(i, last_index); + i +} +--- +``` diff --git a/crates/assistant2/Cargo.toml b/crates/assistant2/Cargo.toml new file mode 100644 index 0000000..8df924d --- /dev/null +++ b/crates/assistant2/Cargo.toml @@ -0,0 +1,66 @@ +[package] +name = "assistant2" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/assistant2.rs" + +[features] +default = [] +stories = ["dep:story"] + +[dependencies] +anyhow.workspace = true +assistant_tooling.workspace = true +client.workspace = true +chrono.workspace = true +collections.workspace = true +editor.workspace = true +feature_flags.workspace = true +file_icons.workspace = true +fs.workspace = true +futures.workspace = true +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +log.workspace = true +markdown.workspace = true +open_ai.workspace = true +picker.workspace = true +project.workspace = true +regex.workspace = true +schemars.workspace = true +semantic_index.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +story = { workspace = true, optional = true } +theme.workspace = true +ui.workspace = true +util.workspace = true +unindent.workspace = true +workspace.workspace = true + +[dev-dependencies] +assets.workspace = true +editor = { workspace = true, features = ["test-support"] } +env_logger.workspace = true +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +languages.workspace = true +markdown = { workspace = true, features = ["test-support"] } +node_runtime.workspace = true +project = { workspace = true, features = ["test-support"] } +rand.workspace = true +release_channel.workspace = true +settings = { workspace = true, features = ["test-support"] } +theme = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/assistant2/LICENSE-GPL b/crates/assistant2/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/assistant2/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/assistant2/evals/list-of-into-element.md b/crates/assistant2/evals/list-of-into-element.md new file mode 100644 index 0000000..fca5e1a --- /dev/null +++ b/crates/assistant2/evals/list-of-into-element.md @@ -0,0 +1 @@ +> Give me a comprehensive list of all the elements defined in my project using the following query: `impl Element for {}, impl Element for {}, impl IntoElement for {})` diff --git a/crates/assistant2/evals/new-gpui-element.md b/crates/assistant2/evals/new-gpui-element.md new file mode 100644 index 0000000..51452cb --- /dev/null +++ b/crates/assistant2/evals/new-gpui-element.md @@ -0,0 +1 @@ +> What are all the places we define a new gpui element in my project? (impl Element for {}) diff --git a/crates/assistant2/evals/settings-file.md b/crates/assistant2/evals/settings-file.md new file mode 100644 index 0000000..ff15f7d --- /dev/null +++ b/crates/assistant2/evals/settings-file.md @@ -0,0 +1,3 @@ +Use tools frequently, especially when referring to files and code. The Zed editor we're working in can show me files directly when you add annotations. Be concise in chat, bountiful in tool calling. + +Teach me everything you can about how zed loads settings. Please annotate the code inline. diff --git a/crates/assistant2/evals/what-is-the-assistant2-crate.md b/crates/assistant2/evals/what-is-the-assistant2-crate.md new file mode 100644 index 0000000..5d39684 --- /dev/null +++ b/crates/assistant2/evals/what-is-the-assistant2-crate.md @@ -0,0 +1 @@ +> Can you tell me what the assistant2 crate is for in my project? Tell me in 100 words or less. diff --git a/crates/assistant2/src/assistant2.rs b/crates/assistant2/src/assistant2.rs new file mode 100644 index 0000000..9ad1b7b --- /dev/null +++ b/crates/assistant2/src/assistant2.rs @@ -0,0 +1,1184 @@ +mod assistant_settings; +mod attachments; +mod completion_provider; +mod saved_conversation; +mod saved_conversations; +mod tools; +pub mod ui; + +use crate::saved_conversation::SavedConversationMetadata; +use crate::ui::UserOrAssistant; +use ::ui::{div, prelude::*, Color, Tooltip, ViewContext}; +use anyhow::{Context, Result}; +use assistant_tooling::{ + AttachmentRegistry, ProjectContext, ToolFunctionCall, ToolRegistry, UserAttachment, +}; +use attachments::ActiveEditorAttachmentTool; +use client::{proto, Client, UserStore}; +use collections::HashMap; +use completion_provider::*; +use editor::Editor; +use feature_flags::FeatureFlagAppExt as _; +use file_icons::FileIcons; +use fs::Fs; +use futures::{future::join_all, StreamExt}; +use gpui::{ + list, AnyElement, AppContext, AsyncWindowContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, ListAlignment, ListState, Model, ReadGlobal, Render, Task, UpdateGlobal, View, + WeakView, +}; +use language::{language_settings::SoftWrap, LanguageRegistry}; +use markdown::{Markdown, MarkdownStyle}; +use open_ai::{FunctionContent, ToolCall, ToolCallContent}; +use saved_conversation::{SavedAssistantMessagePart, SavedChatMessage, SavedConversation}; +use saved_conversations::SavedConversations; +use semantic_index::{CloudEmbeddingProvider, ProjectIndex, ProjectIndexDebugView, SemanticIndex}; +use serde::{Deserialize, Serialize}; +use settings::Settings; +use std::sync::Arc; +use tools::{AnnotationTool, CreateBufferTool, ProjectIndexTool}; +use ui::{ActiveFileButton, Composer, ProjectIndexButton}; +use util::paths::CONVERSATIONS_DIR; +use util::{maybe, paths::EMBEDDINGS_DIR, ResultExt}; +use workspace::{ + dock::{DockPosition, Panel, PanelEvent}, + Workspace, +}; + +pub use assistant_settings::AssistantSettings; + +const MAX_COMPLETION_CALLS_PER_SUBMISSION: usize = 5; + +#[derive(Eq, PartialEq, Copy, Clone, Deserialize)] +pub struct Submit(SubmitMode); + +/// There are multiple different ways to submit a model request, represented by this enum. +#[derive(Eq, PartialEq, Copy, Clone, Deserialize)] +pub enum SubmitMode { + /// Only include the conversation. + Simple, + /// Send the current file as context. + CurrentFile, + /// Search the codebase and send relevant excerpts. + Codebase, +} + +gpui::actions!(assistant2, [Cancel, ToggleFocus, DebugProjectIndex,]); +gpui::impl_actions!(assistant2, [Submit]); + +pub fn init(client: Arc, cx: &mut AppContext) { + AssistantSettings::register(cx); + + cx.spawn(|mut cx| { + let client = client.clone(); + async move { + let embedding_provider = CloudEmbeddingProvider::new(client.clone()); + let semantic_index = SemanticIndex::new( + EMBEDDINGS_DIR.join("semantic-index-db.0.mdb"), + Arc::new(embedding_provider), + &mut cx, + ) + .await?; + cx.update(|cx| cx.set_global(semantic_index)) + } + }) + .detach(); + + cx.set_global(CompletionProvider::new(CloudCompletionProvider::new( + client, + ))); + + cx.observe_new_views( + |workspace: &mut Workspace, _cx: &mut ViewContext| { + workspace.register_action(|workspace, _: &ToggleFocus, cx| { + workspace.toggle_panel_focus::(cx); + }); + workspace.register_action(|workspace, _: &DebugProjectIndex, cx| { + if let Some(panel) = workspace.panel::(cx) { + let index = panel.read(cx).chat.read(cx).project_index.clone(); + let view = cx.new_view(|cx| ProjectIndexDebugView::new(index, cx)); + workspace.add_item_to_center(Box::new(view), cx); + } + }); + }, + ) + .detach(); +} + +pub fn enabled(cx: &AppContext) -> bool { + cx.is_staff() +} + +pub struct AssistantPanel { + chat: View, + width: Option, +} + +impl AssistantPanel { + pub fn load( + workspace: WeakView, + cx: AsyncWindowContext, + ) -> Task>> { + cx.spawn(|mut cx| async move { + let (app_state, project) = workspace.update(&mut cx, |workspace, _| { + (workspace.app_state().clone(), workspace.project().clone()) + })?; + + cx.new_view(|cx| { + let project_index = SemanticIndex::update_global(cx, |semantic_index, cx| { + semantic_index.project_index(project.clone(), cx) + }); + + // Used in tools to render file icons + cx.observe_global::(|_, cx| { + cx.notify(); + }) + .detach(); + + let mut tool_registry = ToolRegistry::new(); + tool_registry + .register(ProjectIndexTool::new(project_index.clone())) + .unwrap(); + tool_registry + .register(CreateBufferTool::new(workspace.clone(), project.clone())) + .unwrap(); + tool_registry + .register(AnnotationTool::new(workspace.clone(), project.clone())) + .unwrap(); + + let mut attachment_registry = AttachmentRegistry::new(); + attachment_registry + .register(ActiveEditorAttachmentTool::new(workspace.clone(), cx)); + + Self::new( + project.read(cx).fs().clone(), + app_state.languages.clone(), + Arc::new(tool_registry), + Arc::new(attachment_registry), + app_state.user_store.clone(), + project_index, + workspace, + cx, + ) + }) + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn new( + fs: Arc, + language_registry: Arc, + tool_registry: Arc, + attachment_registry: Arc, + user_store: Model, + project_index: Model, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + let chat = cx.new_view(|cx| { + AssistantChat::new( + fs, + language_registry, + tool_registry.clone(), + attachment_registry, + user_store, + project_index, + workspace, + cx, + ) + }); + + Self { width: None, chat } + } +} + +impl Render for AssistantPanel { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .size_full() + .v_flex() + .bg(cx.theme().colors().panel_background) + .child(self.chat.clone()) + } +} + +impl Panel for AssistantPanel { + fn persistent_name() -> &'static str { + "AssistantPanelv2" + } + + fn position(&self, _cx: &WindowContext) -> workspace::dock::DockPosition { + // todo!("Add a setting / use assistant settings") + DockPosition::Right + } + + fn position_is_valid(&self, position: workspace::dock::DockPosition) -> bool { + matches!(position, DockPosition::Right) + } + + fn set_position(&mut self, _: workspace::dock::DockPosition, _: &mut ViewContext) { + // Do nothing until we have a setting for this + } + + fn size(&self, _cx: &WindowContext) -> Pixels { + self.width.unwrap_or(px(400.)) + } + + fn set_size(&mut self, size: Option, cx: &mut ViewContext) { + self.width = size; + cx.notify(); + } + + fn icon(&self, _cx: &WindowContext) -> Option<::ui::IconName> { + Some(IconName::ZedAssistant) + } + + fn icon_tooltip(&self, _: &WindowContext) -> Option<&'static str> { + Some("Assistant Panel ✨") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } +} + +impl EventEmitter for AssistantPanel {} + +impl FocusableView for AssistantPanel { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.chat.read(cx).composer_editor.read(cx).focus_handle(cx) + } +} + +pub struct AssistantChat { + model: String, + messages: Vec, + list_state: ListState, + fs: Arc, + language_registry: Arc, + composer_editor: View, + saved_conversations: View, + saved_conversations_open: bool, + project_index_button: View, + active_file_button: Option>, + user_store: Model, + next_message_id: MessageId, + collapsed_messages: HashMap, + editing_message: Option, + pending_completion: Option>, + tool_registry: Arc, + attachment_registry: Arc, + project_index: Model, + markdown_style: MarkdownStyle, +} + +struct EditingMessage { + id: MessageId, + body: View, +} + +impl AssistantChat { + #[allow(clippy::too_many_arguments)] + fn new( + fs: Arc, + language_registry: Arc, + tool_registry: Arc, + attachment_registry: Arc, + user_store: Model, + project_index: Model, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + let model = CompletionProvider::global(cx).default_model(); + let view = cx.view().downgrade(); + let list_state = ListState::new( + 0, + ListAlignment::Bottom, + px(1024.), + move |ix, cx: &mut WindowContext| { + view.update(cx, |this, cx| this.render_message(ix, cx)) + .unwrap() + }, + ); + + let project_index_button = cx.new_view(|cx| { + ProjectIndexButton::new(project_index.clone(), tool_registry.clone(), cx) + }); + + let active_file_button = match workspace.upgrade() { + Some(workspace) => { + Some(cx.new_view( + |cx| ActiveFileButton::new(attachment_registry.clone(), workspace, cx), // + )) + } + _ => None, + }; + + let saved_conversations = cx.new_view(|cx| SavedConversations::new(cx)); + cx.spawn({ + let fs = fs.clone(); + let saved_conversations = saved_conversations.downgrade(); + |_assistant_chat, mut cx| async move { + let saved_conversation_metadata = SavedConversationMetadata::list(fs).await?; + + cx.update(|cx| { + saved_conversations.update(cx, |this, cx| { + this.init(saved_conversation_metadata, cx); + }) + })??; + + anyhow::Ok(()) + } + }) + .detach_and_log_err(cx); + + Self { + model, + messages: Vec::new(), + composer_editor: cx.new_view(|cx| { + let mut editor = Editor::auto_height(80, cx); + editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); + editor.set_placeholder_text("Send a message…", cx); + editor + }), + saved_conversations, + saved_conversations_open: false, + list_state, + user_store, + fs, + language_registry, + project_index_button, + active_file_button, + project_index, + next_message_id: MessageId(0), + editing_message: None, + collapsed_messages: HashMap::default(), + pending_completion: None, + attachment_registry, + tool_registry, + markdown_style: MarkdownStyle { + code_block: gpui::TextStyleRefinement { + font_family: Some("Zed Mono".into()), + color: Some(cx.theme().colors().editor_foreground), + background_color: Some(cx.theme().colors().editor_background), + ..Default::default() + }, + inline_code: gpui::TextStyleRefinement { + font_family: Some("Zed Mono".into()), + // @nate: Could we add inline-code specific styles to the theme? + color: Some(cx.theme().colors().editor_foreground), + background_color: Some(cx.theme().colors().editor_background), + ..Default::default() + }, + rule_color: Color::Muted.color(cx), + block_quote_border_color: Color::Muted.color(cx), + block_quote: gpui::TextStyleRefinement { + color: Some(Color::Muted.color(cx)), + ..Default::default() + }, + link: gpui::TextStyleRefinement { + color: Some(Color::Accent.color(cx)), + underline: Some(gpui::UnderlineStyle { + thickness: px(1.), + color: Some(Color::Accent.color(cx)), + wavy: false, + }), + ..Default::default() + }, + syntax: cx.theme().syntax().clone(), + selection_background_color: { + let mut selection = cx.theme().players().local().selection; + selection.fade_out(0.7); + selection + }, + }, + } + } + + fn message_for_id(&self, id: MessageId) -> Option<&ChatMessage> { + self.messages.iter().find(|message| match message { + ChatMessage::User(message) => message.id == id, + ChatMessage::Assistant(message) => message.id == id, + }) + } + + fn toggle_saved_conversations(&mut self) { + self.saved_conversations_open = !self.saved_conversations_open; + } + + fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { + // If we're currently editing a message, cancel the edit. + if self.editing_message.take().is_some() { + cx.notify(); + return; + } + + if self.pending_completion.take().is_some() { + if let Some(ChatMessage::Assistant(grouping)) = self.messages.last() { + if grouping.messages.is_empty() { + self.pop_message(cx); + } + } + return; + } + + cx.propagate(); + } + + fn submit(&mut self, Submit(mode): &Submit, cx: &mut ViewContext) { + if self.composer_editor.focus_handle(cx).is_focused(cx) { + // Don't allow multiple concurrent completions. + if self.pending_completion.is_some() { + cx.propagate(); + return; + } + + let message = self.composer_editor.update(cx, |composer_editor, cx| { + let text = composer_editor.text(cx); + let id = self.next_message_id.post_inc(); + let body = cx.new_view(|cx| { + Markdown::new( + text, + self.markdown_style.clone(), + Some(self.language_registry.clone()), + cx, + ) + }); + composer_editor.clear(cx); + + ChatMessage::User(UserMessage { + id, + body, + attachments: Vec::new(), + }) + }); + self.push_message(message, cx); + } else if let Some(editing_message) = self.editing_message.as_ref() { + let focus_handle = editing_message.body.focus_handle(cx); + if focus_handle.contains_focused(cx) { + if let Some(ChatMessage::User(user_message)) = + self.message_for_id(editing_message.id) + { + user_message.body.update(cx, |body, cx| { + body.reset(editing_message.body.read(cx).text(cx), cx); + }); + } + + self.truncate_messages(editing_message.id, cx); + + self.pending_completion.take(); + self.composer_editor.focus_handle(cx).focus(cx); + self.editing_message.take(); + } else { + log::error!("unexpected state: no user message editor is focused."); + return; + } + } else { + log::error!("unexpected state: no user message editor is focused."); + return; + } + + let mode = *mode; + self.pending_completion = Some(cx.spawn(move |this, mut cx| async move { + let attachments_task = this.update(&mut cx, |this, cx| { + let attachment_registry = this.attachment_registry.clone(); + attachment_registry.call_all_attachment_tools(cx) + }); + + let attachments = maybe!(async { + let attachments_task = attachments_task?; + let attachments = attachments_task.await?; + + anyhow::Ok(attachments) + }) + .await + .log_err() + .unwrap_or_default(); + + // Set the attachments to the _last_ user message + this.update(&mut cx, |this, _cx| { + if let Some(ChatMessage::User(message)) = this.messages.last_mut() { + message.attachments = attachments; + } + }) + .log_err(); + + Self::request_completion( + this.clone(), + mode, + MAX_COMPLETION_CALLS_PER_SUBMISSION, + &mut cx, + ) + .await + .log_err(); + + this.update(&mut cx, |this, _cx| { + this.pending_completion = None; + }) + .context("Failed to push new user message") + .log_err(); + })); + } + + async fn request_completion( + this: WeakView, + mode: SubmitMode, + limit: usize, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let mut call_count = 0; + loop { + let complete = async { + let (tool_definitions, model_name, messages) = this.update(cx, |this, cx| { + this.push_new_assistant_message(cx); + + let definitions = if call_count < limit + && matches!(mode, SubmitMode::Codebase | SubmitMode::Simple) + { + this.tool_registry.definitions() + } else { + Vec::new() + }; + call_count += 1; + + ( + definitions, + this.model.clone(), + this.completion_messages(cx), + ) + })?; + + let messages = messages.await?; + + let completion = cx.update(|cx| { + CompletionProvider::global(cx).complete( + model_name, + messages, + Vec::new(), + 1.0, + tool_definitions, + ) + }); + + let mut stream = completion?.await?; + while let Some(delta) = stream.next().await { + let delta = delta?; + this.update(cx, |this, cx| { + if let Some(ChatMessage::Assistant(AssistantMessage { messages, .. })) = + this.messages.last_mut() + { + if messages.is_empty() { + messages.push(AssistantMessagePart { + body: cx.new_view(|cx| { + Markdown::new( + "".into(), + this.markdown_style.clone(), + Some(this.language_registry.clone()), + cx, + ) + }), + tool_calls: Vec::new(), + }) + } + + let message = messages.last_mut().unwrap(); + + if let Some(content) = &delta.content { + message + .body + .update(cx, |message, cx| message.append(&content, cx)); + } + + for tool_call_delta in delta.tool_calls { + let index = tool_call_delta.index as usize; + if index >= message.tool_calls.len() { + message.tool_calls.resize_with(index + 1, Default::default); + } + let tool_call = &mut message.tool_calls[index]; + + if let Some(id) = &tool_call_delta.id { + tool_call.id.push_str(id); + } + + match tool_call_delta.variant { + Some(proto::tool_call_delta::Variant::Function( + tool_call_delta, + )) => { + this.tool_registry.update_tool_call( + tool_call, + tool_call_delta.name.as_deref(), + tool_call_delta.arguments.as_deref(), + cx, + ); + } + None => {} + } + } + + cx.notify(); + } else { + unreachable!() + } + })?; + } + + anyhow::Ok(()) + } + .await; + + let mut tool_tasks = Vec::new(); + this.update(cx, |this, cx| { + if let Some(ChatMessage::Assistant(AssistantMessage { + error: message_error, + messages, + .. + })) = this.messages.last_mut() + { + if let Err(error) = complete { + message_error.replace(SharedString::from(error.to_string())); + cx.notify(); + } else { + if let Some(current_message) = messages.last_mut() { + for tool_call in current_message.tool_calls.iter_mut() { + tool_tasks + .extend(this.tool_registry.execute_tool_call(tool_call, cx)); + } + } + } + } + })?; + + // This ends recursion on calling for responses after tools + if tool_tasks.is_empty() { + return Ok(()); + } + + join_all(tool_tasks.into_iter()).await; + } + } + + fn push_new_assistant_message(&mut self, cx: &mut ViewContext) { + // If the last message is a grouped assistant message, add to the grouped message + if let Some(ChatMessage::Assistant(AssistantMessage { messages, .. })) = + self.messages.last_mut() + { + messages.push(AssistantMessagePart { + body: cx.new_view(|cx| { + Markdown::new( + "".into(), + self.markdown_style.clone(), + Some(self.language_registry.clone()), + cx, + ) + }), + tool_calls: Vec::new(), + }); + return; + } + + let message = ChatMessage::Assistant(AssistantMessage { + id: self.next_message_id.post_inc(), + messages: vec![AssistantMessagePart { + body: cx.new_view(|cx| { + Markdown::new( + "".into(), + self.markdown_style.clone(), + Some(self.language_registry.clone()), + cx, + ) + }), + tool_calls: Vec::new(), + }], + error: None, + }); + self.push_message(message, cx); + } + + fn push_message(&mut self, message: ChatMessage, cx: &mut ViewContext) { + let old_len = self.messages.len(); + let focus_handle = Some(message.focus_handle(cx)); + self.messages.push(message); + self.list_state + .splice_focusable(old_len..old_len, focus_handle); + cx.notify(); + } + + fn pop_message(&mut self, cx: &mut ViewContext) { + if self.messages.is_empty() { + return; + } + + self.messages.pop(); + self.list_state + .splice(self.messages.len()..self.messages.len() + 1, 0); + cx.notify(); + } + + fn truncate_messages(&mut self, last_message_id: MessageId, cx: &mut ViewContext) { + if let Some(index) = self.messages.iter().position(|message| match message { + ChatMessage::User(message) => message.id == last_message_id, + ChatMessage::Assistant(message) => message.id == last_message_id, + }) { + self.list_state.splice(index + 1..self.messages.len(), 0); + self.messages.truncate(index + 1); + cx.notify(); + } + } + + fn is_message_collapsed(&self, id: &MessageId) -> bool { + self.collapsed_messages.get(id).copied().unwrap_or_default() + } + + fn toggle_message_collapsed(&mut self, id: MessageId) { + let entry = self.collapsed_messages.entry(id).or_insert(false); + *entry = !*entry; + } + + fn reset(&mut self) { + self.messages.clear(); + self.list_state.reset(0); + self.editing_message.take(); + self.collapsed_messages.clear(); + } + + fn new_conversation(&mut self, cx: &mut ViewContext) { + let messages = std::mem::take(&mut self.messages) + .into_iter() + .map(|message| self.serialize_message(message, cx)) + .collect::>(); + + self.reset(); + + let title = messages + .first() + .map(|message| match message { + SavedChatMessage::User { body, .. } => body.clone(), + SavedChatMessage::Assistant { messages, .. } => messages + .first() + .map(|message| message.body.to_string()) + .unwrap_or_default(), + }) + .unwrap_or_else(|| "A conversation with the assistant.".to_string()); + + let saved_conversation = SavedConversation { + version: "0.3.0".to_string(), + title, + messages, + }; + + let discriminant = 1; + + let path = CONVERSATIONS_DIR.join(&format!( + "{title} - {discriminant}.zed.{version}.json", + title = saved_conversation.title, + version = saved_conversation.version + )); + + cx.spawn({ + let fs = self.fs.clone(); + |_this, _cx| async move { + fs.create_dir(CONVERSATIONS_DIR.as_ref()).await?; + fs.atomic_write(path, serde_json::to_string(&saved_conversation)?) + .await?; + + anyhow::Ok(()) + } + }) + .detach_and_log_err(cx); + } + + fn render_error( + &self, + error: Option, + _ix: usize, + cx: &mut ViewContext, + ) -> AnyElement { + let theme = cx.theme(); + + if let Some(error) = error { + div() + .py_1() + .px_2() + .mx_neg_1() + .rounded_md() + .border_1() + .border_color(theme.status().error_border) + // .bg(theme.status().error_background) + .text_color(theme.status().error) + .child(error.clone()) + .into_any_element() + } else { + div().into_any_element() + } + } + + fn render_message(&self, ix: usize, cx: &mut ViewContext) -> AnyElement { + let is_first = ix == 0; + let is_last = ix == self.messages.len().saturating_sub(1); + + let padding = Spacing::Large.rems(cx); + + // Whenever there's a run of assistant messages, group as one Assistant UI element + + match &self.messages[ix] { + ChatMessage::User(UserMessage { + id, + body, + attachments, + }) => div() + .id(SharedString::from(format!("message-{}-container", id.0))) + .when(is_first, |this| this.pt(padding)) + .map(|element| { + if let Some(editing_message) = self.editing_message.as_ref() { + if editing_message.id == *id { + return element.child(Composer::new( + editing_message.body.clone(), + self.project_index_button.clone(), + self.active_file_button.clone(), + crate::ui::ModelSelector::new( + cx.view().downgrade(), + self.model.clone(), + ) + .into_any_element(), + )); + } + } + + element + .on_click(cx.listener({ + let id = *id; + let body = body.clone(); + move |assistant_chat, event: &ClickEvent, cx| { + if event.up.click_count == 2 { + let body = cx.new_view(|cx| { + let mut editor = Editor::auto_height(80, cx); + let source = Arc::from(body.read(cx).source()); + editor.set_text(source, cx); + editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); + editor + }); + assistant_chat.editing_message = Some(EditingMessage { + id, + body: body.clone(), + }); + body.focus_handle(cx).focus(cx); + } + } + })) + .child( + crate::ui::ChatMessage::new( + *id, + UserOrAssistant::User(self.user_store.read(cx).current_user()), + // todo!(): clean up the vec usage + vec![ + body.clone().into_any_element(), + h_flex() + .gap_2() + .children( + attachments + .iter() + .map(|attachment| attachment.view.clone()), + ) + .into_any_element(), + ], + self.is_message_collapsed(id), + Box::new(cx.listener({ + let id = *id; + move |assistant_chat, _event, _cx| { + assistant_chat.toggle_message_collapsed(id) + } + })), + ) + // TODO: Wire up selections. + .selected(is_last), + ) + }) + .into_any(), + ChatMessage::Assistant(AssistantMessage { + id, + messages, + error, + .. + }) => { + let mut message_elements = Vec::new(); + + for message in messages { + if !message.body.read(cx).source().is_empty() { + message_elements.push(div().child(message.body.clone()).into_any()) + } + + let tools = message + .tool_calls + .iter() + .filter_map(|tool_call| self.tool_registry.render_tool_call(tool_call, cx)) + .collect::>(); + + if !tools.is_empty() { + message_elements.push(div().children(tools).into_any()) + } + } + + if message_elements.is_empty() { + message_elements.push(::ui::Label::new("Researching...").into_any_element()) + } + + div() + .when(is_first, |this| this.pt(padding)) + .child( + crate::ui::ChatMessage::new( + *id, + UserOrAssistant::Assistant, + message_elements, + self.is_message_collapsed(id), + Box::new(cx.listener({ + let id = *id; + move |assistant_chat, _event, _cx| { + assistant_chat.toggle_message_collapsed(id) + } + })), + ) + // TODO: Wire up selections. + .selected(is_last), + ) + .child(self.render_error(error.clone(), ix, cx)) + .into_any() + } + } + } + + fn completion_messages(&self, cx: &mut WindowContext) -> Task>> { + let project_index = self.project_index.read(cx); + let project = project_index.project(); + let fs = project_index.fs(); + + let mut project_context = ProjectContext::new(project, fs); + let mut completion_messages = Vec::new(); + + for message in &self.messages { + match message { + ChatMessage::User(UserMessage { + body, attachments, .. + }) => { + for attachment in attachments { + if let Some(content) = attachment.generate(&mut project_context, cx) { + completion_messages.push(CompletionMessage::System { content }); + } + } + + // Show user's message last so that the assistant is grounded in the user's request + completion_messages.push(CompletionMessage::User { + content: body.read(cx).source().to_string(), + }); + } + ChatMessage::Assistant(AssistantMessage { messages, .. }) => { + for message in messages { + let body = message.body.clone(); + + if body.read(cx).source().is_empty() && message.tool_calls.is_empty() { + continue; + } + + let tool_calls_from_assistant = message + .tool_calls + .iter() + .map(|tool_call| ToolCall { + content: ToolCallContent::Function { + function: FunctionContent { + name: tool_call.name.clone(), + arguments: tool_call.arguments.clone(), + }, + }, + id: tool_call.id.clone(), + }) + .collect(); + + completion_messages.push(CompletionMessage::Assistant { + content: Some(body.read(cx).source().to_string()), + tool_calls: tool_calls_from_assistant, + }); + + for tool_call in &message.tool_calls { + // Every tool call _must_ have a result by ID, otherwise OpenAI will error. + let content = self.tool_registry.content_for_tool_call( + tool_call, + &mut project_context, + cx, + ); + completion_messages.push(CompletionMessage::Tool { + content, + tool_call_id: tool_call.id.clone(), + }); + } + } + } + } + } + + let system_message = project_context.generate_system_message(cx); + + cx.background_executor().spawn(async move { + let content = system_message.await?; + completion_messages.insert(0, CompletionMessage::System { content }); + Ok(completion_messages) + }) + } + + fn serialize_message( + &self, + message: ChatMessage, + cx: &mut ViewContext, + ) -> SavedChatMessage { + match message { + ChatMessage::User(message) => SavedChatMessage::User { + id: message.id, + body: message.body.read(cx).source().into(), + attachments: message + .attachments + .iter() + .map(|attachment| { + self.attachment_registry + .serialize_user_attachment(attachment) + }) + .collect(), + }, + ChatMessage::Assistant(message) => SavedChatMessage::Assistant { + id: message.id, + error: message.error, + messages: message + .messages + .iter() + .map(|message| SavedAssistantMessagePart { + body: message.body.read(cx).source().to_string().into(), + tool_calls: message + .tool_calls + .iter() + .filter_map(|tool_call| { + self.tool_registry + .serialize_tool_call(tool_call, cx) + .log_err() + }) + .collect(), + }) + .collect(), + }, + } + } +} + +impl Render for AssistantChat { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let header_height = Spacing::Small.rems(cx) * 2.0 + ButtonSize::Default.rems(); + + div() + .relative() + .flex_1() + .v_flex() + .key_context("AssistantChat") + .on_action(cx.listener(Self::submit)) + .on_action(cx.listener(Self::cancel)) + .text_color(Color::Default.color(cx)) + .child(list(self.list_state.clone()).flex_1().pt(header_height)) + .child( + h_flex() + .absolute() + .top_0() + .justify_between() + .w_full() + .h(header_height) + .p(Spacing::Small.rems(cx)) + .child( + IconButton::new( + "toggle-saved-conversations", + if self.saved_conversations_open { + IconName::ChevronRight + } else { + IconName::ChevronLeft + }, + ) + .on_click(cx.listener(|this, _event, _cx| { + this.toggle_saved_conversations(); + })) + .tooltip(move |cx| Tooltip::text("Switch Conversations", cx)), + ) + .child( + h_flex() + .gap(Spacing::Large.rems(cx)) + .child( + IconButton::new("new-conversation", IconName::Plus) + .on_click(cx.listener(move |this, _event, cx| { + this.new_conversation(cx); + })) + .tooltip(move |cx| Tooltip::text("New Conversation", cx)), + ) + .child( + IconButton::new("assistant-menu", IconName::Menu) + .disabled(true) + .tooltip(move |cx| { + Tooltip::text( + "Coming soon – Assistant settings & controls", + cx, + ) + }), + ), + ), + ) + .when(self.saved_conversations_open, |element| { + element.child( + h_flex() + .absolute() + .top(header_height) + .w_full() + .child(self.saved_conversations.clone()), + ) + }) + .child(Composer::new( + self.composer_editor.clone(), + self.project_index_button.clone(), + self.active_file_button.clone(), + crate::ui::ModelSelector::new(cx.view().downgrade(), self.model.clone()) + .into_any_element(), + )) + } +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] +pub struct MessageId(usize); + +impl MessageId { + fn post_inc(&mut self) -> Self { + let id = *self; + self.0 += 1; + id + } +} + +enum ChatMessage { + User(UserMessage), + Assistant(AssistantMessage), +} + +impl ChatMessage { + fn focus_handle(&self, cx: &AppContext) -> Option { + match self { + ChatMessage::User(message) => Some(message.body.focus_handle(cx)), + ChatMessage::Assistant(_) => None, + } + } +} + +struct UserMessage { + pub id: MessageId, + pub body: View, + pub attachments: Vec, +} + +struct AssistantMessagePart { + pub body: View, + pub tool_calls: Vec, +} + +struct AssistantMessage { + pub id: MessageId, + pub messages: Vec, + pub error: Option, +} diff --git a/crates/assistant2/src/assistant_settings.rs b/crates/assistant2/src/assistant_settings.rs new file mode 100644 index 0000000..7d532fa --- /dev/null +++ b/crates/assistant2/src/assistant_settings.rs @@ -0,0 +1,26 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Default, Debug, Deserialize, Serialize, Clone)] +pub struct AssistantSettings { + pub enabled: bool, +} + +#[derive(Default, Debug, Deserialize, Serialize, Clone, JsonSchema)] +pub struct AssistantSettingsContent { + pub enabled: Option, +} + +impl Settings for AssistantSettings { + const KEY: Option<&'static str> = Some("assistant_v2"); + + type FileContent = AssistantSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + Ok(sources.json_merge().unwrap_or_else(|_| Default::default())) + } +} diff --git a/crates/assistant2/src/attachments.rs b/crates/assistant2/src/attachments.rs new file mode 100644 index 0000000..2187f85 --- /dev/null +++ b/crates/assistant2/src/attachments.rs @@ -0,0 +1,3 @@ +mod active_file; + +pub use active_file::*; diff --git a/crates/assistant2/src/attachments/active_file.rs b/crates/assistant2/src/attachments/active_file.rs new file mode 100644 index 0000000..744d926 --- /dev/null +++ b/crates/assistant2/src/attachments/active_file.rs @@ -0,0 +1,144 @@ +use std::{path::PathBuf, sync::Arc}; + +use anyhow::{anyhow, Result}; +use assistant_tooling::{AttachmentOutput, LanguageModelAttachment, ProjectContext}; +use editor::Editor; +use gpui::{Render, Task, View, WeakModel, WeakView}; +use language::Buffer; +use project::ProjectPath; +use serde::{Deserialize, Serialize}; +use ui::{prelude::*, ButtonLike, Tooltip, WindowContext}; +use util::maybe; +use workspace::Workspace; + +#[derive(Serialize, Deserialize)] +pub struct ActiveEditorAttachment { + #[serde(skip)] + buffer: Option>, + path: Option, +} + +pub struct FileAttachmentView { + project_path: Option, + buffer: Option>, + error: Option, +} + +impl Render for FileAttachmentView { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + if let Some(error) = &self.error { + return div().child(error.to_string()).into_any_element(); + } + + let filename: SharedString = self + .project_path + .as_ref() + .and_then(|p| p.path.file_name()?.to_str()) + .unwrap_or("Untitled") + .to_string() + .into(); + + ButtonLike::new("file-attachment") + .child( + h_flex() + .gap_1() + .bg(cx.theme().colors().editor_background) + .rounded_md() + .child(ui::Icon::new(IconName::File)) + .child(filename.clone()), + ) + .tooltip(move |cx| Tooltip::with_meta("File Attached", None, filename.clone(), cx)) + .into_any_element() + } +} + +impl AttachmentOutput for FileAttachmentView { + fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String { + if let Some(path) = &self.project_path { + project.add_file(path.clone()); + return format!("current file: {}", path.path.display()); + } + + if let Some(buffer) = self.buffer.as_ref().and_then(|buffer| buffer.upgrade()) { + return format!("current untitled buffer text:\n{}", buffer.read(cx).text()); + } + + String::new() + } +} + +pub struct ActiveEditorAttachmentTool { + workspace: WeakView, +} + +impl ActiveEditorAttachmentTool { + pub fn new(workspace: WeakView, _cx: &mut WindowContext) -> Self { + Self { workspace } + } +} + +impl LanguageModelAttachment for ActiveEditorAttachmentTool { + type Output = ActiveEditorAttachment; + type View = FileAttachmentView; + + fn name(&self) -> Arc { + "active-editor-attachment".into() + } + + fn run(&self, cx: &mut WindowContext) -> Task> { + Task::ready(maybe!({ + let active_buffer = self + .workspace + .update(cx, |workspace, cx| { + workspace + .active_item(cx) + .and_then(|item| Some(item.act_as::(cx)?.read(cx).buffer().clone())) + })? + .ok_or_else(|| anyhow!("no active buffer"))?; + + let buffer = active_buffer.read(cx); + + if let Some(buffer) = buffer.as_singleton() { + let path = project::File::from_dyn(buffer.read(cx).file()) + .and_then(|file| file.worktree.read(cx).absolutize(&file.path).ok()); + return Ok(ActiveEditorAttachment { + buffer: Some(buffer.downgrade()), + path, + }); + } else { + Err(anyhow!("no active buffer")) + } + })) + } + + fn view( + &self, + output: Result, + cx: &mut WindowContext, + ) -> View { + let error; + let project_path; + let buffer; + match output { + Ok(output) => { + error = None; + let workspace = self.workspace.upgrade().unwrap(); + let project = workspace.read(cx).project(); + project_path = output + .path + .and_then(|path| project.read(cx).project_path_for_absolute_path(&path, cx)); + buffer = output.buffer; + } + Err(err) => { + error = Some(err); + buffer = None; + project_path = None; + } + } + cx.new_view(|_cx| FileAttachmentView { + project_path, + buffer, + error, + }) + } +} diff --git a/crates/assistant2/src/completion_provider.rs b/crates/assistant2/src/completion_provider.rs new file mode 100644 index 0000000..deb87de --- /dev/null +++ b/crates/assistant2/src/completion_provider.rs @@ -0,0 +1,179 @@ +use anyhow::Result; +use assistant_tooling::ToolFunctionDefinition; +use client::{proto, Client}; +use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt}; +use gpui::Global; +use std::sync::Arc; + +pub use open_ai::RequestMessage as CompletionMessage; + +#[derive(Clone)] +pub struct CompletionProvider(Arc); + +impl CompletionProvider { + pub fn new(backend: impl CompletionProviderBackend) -> Self { + Self(Arc::new(backend)) + } + + pub fn default_model(&self) -> String { + self.0.default_model() + } + + pub fn available_models(&self) -> Vec { + self.0.available_models() + } + + pub fn complete( + &self, + model: String, + messages: Vec, + stop: Vec, + temperature: f32, + tools: Vec, + ) -> BoxFuture<'static, Result>>> + { + self.0.complete(model, messages, stop, temperature, tools) + } +} + +impl Global for CompletionProvider {} + +pub trait CompletionProviderBackend: 'static { + fn default_model(&self) -> String; + fn available_models(&self) -> Vec; + fn complete( + &self, + model: String, + messages: Vec, + stop: Vec, + temperature: f32, + tools: Vec, + ) -> BoxFuture<'static, Result>>>; +} + +pub struct CloudCompletionProvider { + client: Arc, +} + +impl CloudCompletionProvider { + pub fn new(client: Arc) -> Self { + Self { client } + } +} + +impl CompletionProviderBackend for CloudCompletionProvider { + fn default_model(&self) -> String { + "gpt-4-turbo".into() + } + + fn available_models(&self) -> Vec { + vec!["gpt-4-turbo".into(), "gpt-4".into(), "gpt-3.5-turbo".into()] + } + + fn complete( + &self, + model: String, + messages: Vec, + stop: Vec, + temperature: f32, + tools: Vec, + ) -> BoxFuture<'static, Result>>> + { + let client = self.client.clone(); + let tools: Vec = tools + .iter() + .filter_map(|tool| { + Some(proto::ChatCompletionTool { + variant: Some(proto::chat_completion_tool::Variant::Function( + proto::chat_completion_tool::FunctionObject { + name: tool.name.clone(), + description: Some(tool.description.clone()), + parameters: Some(serde_json::to_string(&tool.parameters).ok()?), + }, + )), + }) + }) + .collect(); + + let tool_choice = match tools.is_empty() { + true => None, + false => Some("auto".into()), + }; + + async move { + let stream = client + .request_stream(proto::CompleteWithLanguageModel { + model, + messages: messages + .into_iter() + .map(|message| match message { + CompletionMessage::Assistant { + content, + tool_calls, + } => proto::LanguageModelRequestMessage { + role: proto::LanguageModelRole::LanguageModelAssistant as i32, + content: content.unwrap_or_default(), + tool_call_id: None, + tool_calls: tool_calls + .into_iter() + .map(|tool_call| match tool_call.content { + open_ai::ToolCallContent::Function { function } => { + proto::ToolCall { + id: tool_call.id, + variant: Some(proto::tool_call::Variant::Function( + proto::tool_call::FunctionCall { + name: function.name, + arguments: function.arguments, + }, + )), + } + } + }) + .collect(), + }, + CompletionMessage::User { content } => { + proto::LanguageModelRequestMessage { + role: proto::LanguageModelRole::LanguageModelUser as i32, + content, + tool_call_id: None, + tool_calls: Vec::new(), + } + } + CompletionMessage::System { content } => { + proto::LanguageModelRequestMessage { + role: proto::LanguageModelRole::LanguageModelSystem as i32, + content, + tool_calls: Vec::new(), + tool_call_id: None, + } + } + CompletionMessage::Tool { + content, + tool_call_id, + } => proto::LanguageModelRequestMessage { + role: proto::LanguageModelRole::LanguageModelTool as i32, + content, + tool_call_id: Some(tool_call_id), + tool_calls: Vec::new(), + }, + }) + .collect(), + stop, + temperature, + tool_choice, + tools, + }) + .await?; + + Ok(stream + .filter_map(|response| async move { + match response { + Ok(mut response) => Some(Ok(response.choices.pop()?.delta?)), + Err(error) => Some(Err(error)), + } + }) + .boxed()) + } + .boxed() + } +} diff --git a/crates/assistant2/src/saved_conversation.rs b/crates/assistant2/src/saved_conversation.rs new file mode 100644 index 0000000..a46f8a5 --- /dev/null +++ b/crates/assistant2/src/saved_conversation.rs @@ -0,0 +1,90 @@ +use std::cmp::Reverse; +use std::ffi::OsStr; +use std::path::PathBuf; +use std::sync::Arc; + +use anyhow::Result; +use assistant_tooling::{SavedToolFunctionCall, SavedUserAttachment}; +use fs::Fs; +use futures::StreamExt; +use gpui::SharedString; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use util::paths::CONVERSATIONS_DIR; + +use crate::MessageId; + +#[derive(Serialize, Deserialize)] +pub struct SavedConversation { + /// The schema version of the conversation. + pub version: String, + /// The title of the conversation, generated by the Assistant. + pub title: String, + pub messages: Vec, +} + +#[derive(Serialize, Deserialize)] +pub enum SavedChatMessage { + User { + id: MessageId, + body: String, + attachments: Vec, + }, + Assistant { + id: MessageId, + messages: Vec, + error: Option, + }, +} + +#[derive(Serialize, Deserialize)] +pub struct SavedAssistantMessagePart { + pub body: SharedString, + pub tool_calls: Vec, +} + +pub struct SavedConversationMetadata { + pub title: String, + pub path: PathBuf, + pub mtime: chrono::DateTime, +} + +impl SavedConversationMetadata { + pub async fn list(fs: Arc) -> Result> { + fs.create_dir(&CONVERSATIONS_DIR).await?; + + let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?; + let mut conversations = Vec::new(); + while let Some(path) = paths.next().await { + let path = path?; + if path.extension() != Some(OsStr::new("json")) { + continue; + } + + let pattern = r" - \d+.zed.\d.\d.\d.json$"; + let re = Regex::new(pattern).unwrap(); + + let metadata = fs.metadata(&path).await?; + if let Some((file_name, metadata)) = path + .file_name() + .and_then(|name| name.to_str()) + .zip(metadata) + { + // This is used to filter out conversations saved by the old assistant. + if !re.is_match(file_name) { + continue; + } + + let title = re.replace(file_name, ""); + conversations.push(Self { + title: title.into_owned(), + path, + mtime: metadata.mtime.into(), + }); + } + } + conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime)); + + Ok(conversations) + } +} diff --git a/crates/assistant2/src/saved_conversations.rs b/crates/assistant2/src/saved_conversations.rs new file mode 100644 index 0000000..4ddb90d --- /dev/null +++ b/crates/assistant2/src/saved_conversations.rs @@ -0,0 +1,196 @@ +use std::sync::Arc; + +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, View, WeakView}; +use picker::{Picker, PickerDelegate}; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; +use util::ResultExt; + +use crate::saved_conversation::SavedConversationMetadata; + +pub struct SavedConversations { + focus_handle: FocusHandle, + picker: Option>>, +} + +impl EventEmitter for SavedConversations {} + +impl FocusableView for SavedConversations { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + if let Some(picker) = self.picker.as_ref() { + picker.focus_handle(cx) + } else { + self.focus_handle.clone() + } + } +} + +impl SavedConversations { + pub fn new(cx: &mut ViewContext) -> Self { + Self { + focus_handle: cx.focus_handle(), + picker: None, + } + } + + pub fn init( + &mut self, + saved_conversations: Vec, + cx: &mut ViewContext, + ) { + let delegate = + SavedConversationPickerDelegate::new(cx.view().downgrade(), saved_conversations); + self.picker = Some(cx.new_view(|cx| Picker::uniform_list(delegate, cx).modal(false))); + } +} + +impl Render for SavedConversations { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .w_full() + .bg(cx.theme().colors().panel_background) + .children(self.picker.clone()) + } +} + +pub struct SavedConversationPickerDelegate { + view: WeakView, + saved_conversations: Vec, + selected_index: usize, + matches: Vec, +} + +impl SavedConversationPickerDelegate { + pub fn new( + weak_view: WeakView, + saved_conversations: Vec, + ) -> Self { + let matches = saved_conversations + .iter() + .map(|conversation| StringMatch { + candidate_id: 0, + score: 0.0, + positions: Default::default(), + string: conversation.title.clone(), + }) + .collect(); + + Self { + view: weak_view, + saved_conversations, + selected_index: 0, + matches, + } + } +} + +impl PickerDelegate for SavedConversationPickerDelegate { + type ListItem = ui::ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Select saved conversation...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let background_executor = cx.background_executor().clone(); + let candidates = self + .saved_conversations + .iter() + .enumerate() + .map(|(id, conversation)| { + let text = conversation.title.clone(); + + StringMatchCandidate { + id, + char_bag: text.as_str().into(), + string: text, + } + }) + .collect::>(); + + cx.spawn(move |this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .into_iter() + .enumerate() + .map(|(index, candidate)| StringMatch { + candidate_id: index, + string: candidate.string, + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background_executor, + ) + .await + }; + + this.update(&mut cx, |this, _cx| { + this.delegate.matches = matches; + this.delegate.selected_index = this + .delegate + .selected_index + .min(this.delegate.matches.len().saturating_sub(1)); + }) + .log_err(); + }) + } + + fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext>) { + if self.matches.is_empty() { + self.dismissed(cx); + return; + } + + // TODO: Implement selecting a saved conversation. + } + + fn dismissed(&mut self, cx: &mut ui::prelude::ViewContext>) { + self.view + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _cx: &mut ViewContext>, + ) -> Option { + let conversation_match = &self.matches[ix]; + let _conversation = &self.saved_conversations[conversation_match.candidate_id]; + + Some( + ListItem::new(ix) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child(HighlightedLabel::new( + conversation_match.string.clone(), + conversation_match.positions.clone(), + )), + ) + } +} diff --git a/crates/assistant2/src/tools.rs b/crates/assistant2/src/tools.rs new file mode 100644 index 0000000..f60f41c --- /dev/null +++ b/crates/assistant2/src/tools.rs @@ -0,0 +1,7 @@ +mod annotate_code; +mod create_buffer; +mod project_index; + +pub use annotate_code::*; +pub use create_buffer::*; +pub use project_index::*; diff --git a/crates/assistant2/src/tools/annotate_code.rs b/crates/assistant2/src/tools/annotate_code.rs new file mode 100644 index 0000000..afee701 --- /dev/null +++ b/crates/assistant2/src/tools/annotate_code.rs @@ -0,0 +1,304 @@ +use anyhow::Result; +use assistant_tooling::{LanguageModelTool, ProjectContext, ToolView}; +use editor::{ + display_map::{BlockContext, BlockDisposition, BlockProperties, BlockStyle}, + Editor, MultiBuffer, +}; +use futures::{channel::mpsc::UnboundedSender, StreamExt as _}; +use gpui::{prelude::*, AnyElement, AsyncWindowContext, Model, Task, View, WeakView}; +use language::ToPoint; +use project::{search::SearchQuery, Project, ProjectPath}; +use schemars::JsonSchema; +use serde::Deserialize; +use std::path::Path; +use ui::prelude::*; +use util::ResultExt; +use workspace::Workspace; + +pub struct AnnotationTool { + workspace: WeakView, + project: Model, +} + +impl AnnotationTool { + pub fn new(workspace: WeakView, project: Model) -> Self { + Self { workspace, project } + } +} + +#[derive(Default, Debug, Deserialize, JsonSchema, Clone)] +pub struct AnnotationInput { + /// Name for this set of annotations + #[serde(default = "default_title")] + title: String, + /// Excerpts from the file to show to the user. + excerpts: Vec, +} + +fn default_title() -> String { + "Untitled".to_string() +} + +#[derive(Debug, Deserialize, JsonSchema, Clone)] +struct Excerpt { + /// Path to the file + path: String, + /// A short, distinctive string that appears in the file, used to define a location in the file. + text_passage: String, + /// Text to display above the code excerpt + annotation: String, +} + +impl LanguageModelTool for AnnotationTool { + type View = AnnotationResultView; + + fn name(&self) -> String { + "annotate_code".to_string() + } + + fn description(&self) -> String { + "Dynamically annotate symbols in the current codebase. Opens a buffer in a panel in their editor, to the side of the conversation. The annotations are shown in the editor as a block decoration.".to_string() + } + + fn view(&self, cx: &mut WindowContext) -> View { + cx.new_view(|cx| { + let (tx, mut rx) = futures::channel::mpsc::unbounded(); + cx.spawn(|view, mut cx| async move { + while let Some(excerpt) = rx.next().await { + AnnotationResultView::add_excerpt(view.clone(), excerpt, &mut cx).await?; + } + anyhow::Ok(()) + }) + .detach(); + + AnnotationResultView { + project: self.project.clone(), + workspace: self.workspace.clone(), + tx, + pending_excerpt: None, + added_editor_to_workspace: false, + editor: None, + error: None, + rendered_excerpt_count: 0, + } + }) + } +} + +pub struct AnnotationResultView { + workspace: WeakView, + project: Model, + pending_excerpt: Option, + added_editor_to_workspace: bool, + editor: Option>, + tx: UnboundedSender, + error: Option, + rendered_excerpt_count: usize, +} + +impl AnnotationResultView { + async fn add_excerpt( + this: WeakView, + excerpt: Excerpt, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let project = this.update(cx, |this, _cx| this.project.clone())?; + + let worktree_id = project.update(cx, |project, cx| { + let worktree = project.worktrees().next()?; + let worktree_id = worktree.read(cx).id(); + Some(worktree_id) + })?; + + let worktree_id = if let Some(worktree_id) = worktree_id { + worktree_id + } else { + return Err(anyhow::anyhow!("No worktree found")); + }; + + let buffer_task = project.update(cx, |project, cx| { + project.open_buffer( + ProjectPath { + worktree_id, + path: Path::new(&excerpt.path).into(), + }, + cx, + ) + })?; + + let buffer = match buffer_task.await { + Ok(buffer) => buffer, + Err(error) => { + return this.update(cx, |this, cx| { + this.error = Some(error); + cx.notify(); + }) + } + }; + + let snapshot = buffer.update(cx, |buffer, _cx| buffer.snapshot())?; + let query = SearchQuery::text(&excerpt.text_passage, false, false, false, vec![], vec![])?; + let matches = query.search(&snapshot, None).await; + let Some(first_match) = matches.first() else { + log::warn!( + "text {:?} does not appear in '{}'", + excerpt.text_passage, + excerpt.path + ); + return Ok(()); + }; + + this.update(cx, |this, cx| { + let mut start = first_match.start.to_point(&snapshot); + start.column = 0; + + if let Some(editor) = &this.editor { + editor.update(cx, |editor, cx| { + let ranges = editor.buffer().update(cx, |multibuffer, cx| { + multibuffer.push_excerpts_with_context_lines( + buffer.clone(), + vec![start..start], + 5, + cx, + ) + }); + + let annotation = SharedString::from(excerpt.annotation); + editor.insert_blocks( + [BlockProperties { + position: ranges[0].start, + height: annotation.split('\n').count() as u8 + 1, + style: BlockStyle::Fixed, + render: Box::new(move |cx| Self::render_note_block(&annotation, cx)), + disposition: BlockDisposition::Above, + }], + None, + cx, + ); + }); + + if !this.added_editor_to_workspace { + this.added_editor_to_workspace = true; + this.workspace + .update(cx, |workspace, cx| { + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx); + }) + .log_err(); + } + } + })?; + + Ok(()) + } + + fn render_note_block(explanation: &SharedString, cx: &mut BlockContext) -> AnyElement { + let anchor_x = cx.anchor_x; + let gutter_width = cx.gutter_dimensions.width; + + h_flex() + .w_full() + .py_2() + .border_y_1() + .border_color(cx.theme().colors().border) + .child( + h_flex() + .justify_center() + .w(gutter_width) + .child(Icon::new(IconName::Ai).color(Color::Hint)), + ) + .child( + h_flex() + .w_full() + .ml(anchor_x - gutter_width) + .child(explanation.clone()), + ) + .into_any_element() + } +} + +impl Render for AnnotationResultView { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + if let Some(error) = &self.error { + ui::Label::new(error.to_string()).into_any_element() + } else { + ui::Label::new(SharedString::from(format!( + "Opened a buffer with {} excerpts", + self.rendered_excerpt_count + ))) + .into_any_element() + } + } +} + +impl ToolView for AnnotationResultView { + type Input = AnnotationInput; + type SerializedState = Option; + + fn generate(&self, _: &mut ProjectContext, _: &mut ViewContext) -> String { + if let Some(error) = &self.error { + format!("Failed to create buffer: {error:?}") + } else { + format!( + "opened {} excerpts in a buffer", + self.rendered_excerpt_count + ) + } + } + + fn set_input(&mut self, mut input: Self::Input, cx: &mut ViewContext) { + let editor = if let Some(editor) = &self.editor { + editor.clone() + } else { + let multibuffer = cx.new_model(|_cx| { + MultiBuffer::new(0, language::Capability::ReadWrite).with_title(String::new()) + }); + let editor = cx.new_view(|cx| { + Editor::for_multibuffer(multibuffer.clone(), Some(self.project.clone()), cx) + }); + + self.editor = Some(editor.clone()); + editor + }; + + editor.update(cx, |editor, cx| { + editor.buffer().update(cx, |multibuffer, cx| { + if multibuffer.title(cx) != input.title { + multibuffer.set_title(input.title.clone(), cx); + } + }); + + self.pending_excerpt = input.excerpts.pop(); + for excerpt in input.excerpts.iter().skip(self.rendered_excerpt_count) { + self.tx.unbounded_send(excerpt.clone()).ok(); + } + self.rendered_excerpt_count = input.excerpts.len(); + }); + + cx.notify(); + } + + fn execute(&mut self, _cx: &mut ViewContext) -> Task> { + if let Some(excerpt) = self.pending_excerpt.take() { + self.rendered_excerpt_count += 1; + self.tx.unbounded_send(excerpt.clone()).ok(); + } + + self.tx.close_channel(); + Task::ready(Ok(())) + } + + fn serialize(&self, _cx: &mut ViewContext) -> Self::SerializedState { + self.error.as_ref().map(|error| error.to_string()) + } + + fn deserialize( + &mut self, + output: Self::SerializedState, + _cx: &mut ViewContext, + ) -> Result<()> { + if let Some(error_message) = output { + self.error = Some(anyhow::anyhow!("{}", error_message)); + } + Ok(()) + } +} diff --git a/crates/assistant2/src/tools/create_buffer.rs b/crates/assistant2/src/tools/create_buffer.rs new file mode 100644 index 0000000..894ee75 --- /dev/null +++ b/crates/assistant2/src/tools/create_buffer.rs @@ -0,0 +1,145 @@ +use anyhow::{anyhow, Result}; +use assistant_tooling::{LanguageModelTool, ProjectContext, ToolView}; +use editor::Editor; +use gpui::{prelude::*, Model, Task, View, WeakView}; +use project::Project; +use schemars::JsonSchema; +use serde::Deserialize; +use ui::prelude::*; +use util::ResultExt; +use workspace::Workspace; + +pub struct CreateBufferTool { + workspace: WeakView, + project: Model, +} + +impl CreateBufferTool { + pub fn new(workspace: WeakView, project: Model) -> Self { + Self { workspace, project } + } +} + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +pub struct CreateBufferInput { + /// The contents of the buffer. + text: String, + + /// The name of the language to use for the buffer. + /// + /// This should be a human-readable name, like "Rust", "JavaScript", or "Python". + language: String, +} + +impl LanguageModelTool for CreateBufferTool { + type View = CreateBufferView; + + fn name(&self) -> String { + "create_file".to_string() + } + + fn description(&self) -> String { + "Create a new untitled file in the current codebase. Side effect: opens it in a new pane/tab for the user to edit.".to_string() + } + + fn view(&self, cx: &mut WindowContext) -> View { + cx.new_view(|_cx| CreateBufferView { + workspace: self.workspace.clone(), + project: self.project.clone(), + input: None, + error: None, + }) + } +} + +pub struct CreateBufferView { + workspace: WeakView, + project: Model, + input: Option, + error: Option, +} + +impl Render for CreateBufferView { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + ui::Label::new("Opening a buffer") + } +} + +impl ToolView for CreateBufferView { + type Input = CreateBufferInput; + + type SerializedState = (); + + fn generate(&self, _project: &mut ProjectContext, _cx: &mut ViewContext) -> String { + let Some(input) = self.input.as_ref() else { + return "No input".to_string(); + }; + + match &self.error { + None => format!("Created a new {} buffer", input.language), + Some(err) => format!("Failed to create buffer: {err:?}"), + } + } + + fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext) { + self.input = Some(input); + cx.notify(); + } + + fn execute(&mut self, cx: &mut ViewContext) -> Task> { + cx.spawn({ + let workspace = self.workspace.clone(); + let project = self.project.clone(); + let input = self.input.clone(); + |_this, mut cx| async move { + let input = input.ok_or_else(|| anyhow!("no input"))?; + + let text = input.text.clone(); + let language_name = input.language.clone(); + let language = cx + .update(|cx| { + project + .read(cx) + .languages() + .language_for_name(&language_name) + })? + .await?; + + let buffer = cx + .update(|cx| project.update(cx, |project, cx| project.create_buffer(cx)))? + .await?; + + buffer.update(&mut cx, |buffer, cx| { + buffer.edit([(0..0, text)], None, cx); + buffer.set_language(Some(language), cx) + })?; + + workspace + .update(&mut cx, |workspace, cx| { + workspace.add_item_to_active_pane( + Box::new( + cx.new_view(|cx| Editor::for_buffer(buffer, Some(project), cx)), + ), + None, + cx, + ); + }) + .log_err(); + + Ok(()) + } + }) + } + + fn serialize(&self, _cx: &mut ViewContext) -> Self::SerializedState { + () + } + + fn deserialize( + &mut self, + _output: Self::SerializedState, + _cx: &mut ViewContext, + ) -> Result<()> { + Ok(()) + } +} diff --git a/crates/assistant2/src/tools/project_index.rs b/crates/assistant2/src/tools/project_index.rs new file mode 100644 index 0000000..5d28d47 --- /dev/null +++ b/crates/assistant2/src/tools/project_index.rs @@ -0,0 +1,428 @@ +use anyhow::Result; +use assistant_tooling::{LanguageModelTool, ToolView}; +use collections::BTreeMap; +use file_icons::FileIcons; +use gpui::{prelude::*, AnyElement, Model, Task}; +use project::ProjectPath; +use schemars::JsonSchema; +use semantic_index::{ProjectIndex, Status}; +use serde::{Deserialize, Serialize}; +use std::{ + fmt::Write as _, + ops::Range, + path::{Path, PathBuf}, + str::FromStr as _, + sync::Arc, +}; +use ui::{prelude::*, CollapsibleContainer, Color, Icon, IconName, Label, WindowContext}; + +const DEFAULT_SEARCH_LIMIT: usize = 20; + +pub struct ProjectIndexTool { + project_index: Model, +} + +#[derive(Default)] +enum ProjectIndexToolState { + #[default] + CollectingQuery, + Searching, + Error(anyhow::Error), + Finished { + excerpts: BTreeMap>>, + index_status: Status, + }, +} + +pub struct ProjectIndexView { + project_index: Model, + input: CodebaseQuery, + expanded_header: bool, + state: ProjectIndexToolState, +} + +#[derive(Default, Deserialize, JsonSchema)] +pub struct CodebaseQuery { + /// Semantic search query + query: String, + /// Criteria to include results + includes: Option, + /// Criteria to exclude results + excludes: Option, +} + +#[derive(Deserialize, JsonSchema, Clone, Default)] +pub struct SearchFilter { + /// Filter by file path prefix + prefix_path: Option, + /// Filter by file extension + extension: Option, + // Note: we possibly can't do content filtering very easily given the project context handling + // the final results, so we're leaving out direct string matches for now +} + +fn project_starts_with(prefix_path: Option, project_path: ProjectPath) -> bool { + if let Some(path) = &prefix_path { + if let Some(path) = PathBuf::from_str(path).ok() { + return project_path.path.starts_with(path); + } + } + + return false; +} + +impl SearchFilter { + fn matches(&self, project_path: &ProjectPath) -> bool { + let path_match = project_starts_with(self.prefix_path.clone(), project_path.clone()); + + path_match + && (if let Some(extension) = &self.extension { + project_path + .path + .extension() + .and_then(|ext| ext.to_str()) + .map(|ext| ext == extension) + .unwrap_or(false) + } else { + true + }) + } +} + +#[derive(Serialize, Deserialize)] +pub struct SerializedState { + index_status: Status, + error_message: Option, + worktrees: BTreeMap, WorktreeIndexOutput>, +} + +#[derive(Default, Serialize, Deserialize)] +struct WorktreeIndexOutput { + excerpts: BTreeMap, Vec>>, +} + +impl ProjectIndexView { + fn toggle_header(&mut self, cx: &mut ViewContext) { + self.expanded_header = !self.expanded_header; + cx.notify(); + } + + fn render_filter_section( + &mut self, + heading: &str, + filter: Option, + cx: &mut ViewContext, + ) -> Option { + let filter = match filter { + Some(filter) => filter, + None => return None, + }; + + // Any of the filter fields can be empty. We'll show nothing if they're all empty. + let path = filter.prefix_path.as_ref().map(|path| { + let icon_path = FileIcons::get_icon(Path::new(path), cx) + .map(SharedString::from) + .unwrap_or_else(|| SharedString::from("icons/file_icons/file.svg")); + + h_flex() + .gap_1() + .child("Paths: ") + .child(Icon::from_path(icon_path)) + .child(ui::Label::new(path.clone()).color(Color::Muted)) + }); + + let extension = filter.extension.as_ref().map(|extension| { + let icon_path = FileIcons::get_icon(Path::new(extension), cx) + .map(SharedString::from) + .unwrap_or_else(|| SharedString::from("icons/file_icons/file.svg")); + + h_flex() + .gap_1() + .child("Extensions: ") + .child(Icon::from_path(icon_path)) + .child(ui::Label::new(extension.clone()).color(Color::Muted)) + }); + + if path.is_none() && extension.is_none() { + return None; + } + + Some( + v_flex() + .child(ui::Label::new(heading.to_string())) + .gap_1() + .children(path) + .children(extension) + .into_any_element(), + ) + } +} + +impl Render for ProjectIndexView { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let query = self.input.query.clone(); + + let (header_text, content) = match &self.state { + ProjectIndexToolState::Error(error) => { + return format!("failed to search: {error:?}").into_any_element() + } + ProjectIndexToolState::CollectingQuery | ProjectIndexToolState::Searching => { + ("Searching...".to_string(), div()) + } + ProjectIndexToolState::Finished { excerpts, .. } => { + let file_count = excerpts.len(); + + if excerpts.is_empty() { + ("No results found".to_string(), div()) + } else { + let header_text = format!( + "Read {} {}", + file_count, + if file_count == 1 { "file" } else { "files" } + ); + + let el = v_flex().gap_2().children(excerpts.keys().map(|path| { + h_flex().gap_2().child(Icon::new(IconName::File)).child( + Label::new(path.path.to_string_lossy().to_string()).color(Color::Muted), + ) + })); + + (header_text, el) + } + } + }; + + let header = h_flex() + .gap_2() + .child(Icon::new(IconName::File)) + .child(header_text); + + v_flex() + .gap_3() + .child( + CollapsibleContainer::new("collapsible-container", self.expanded_header) + .start_slot(header) + .on_click(cx.listener(move |this, _, cx| { + this.toggle_header(cx); + })) + .child( + v_flex() + .gap_3() + .p_3() + .child( + h_flex() + .gap_2() + .child(Icon::new(IconName::MagnifyingGlass)) + .child(Label::new(format!("`{}`", query)).color(Color::Muted)), + ) + .children(self.render_filter_section( + "Includes", + self.input.includes.clone(), + cx, + )) + .children(self.render_filter_section( + "Excludes", + self.input.excludes.clone(), + cx, + )) + .child(content), + ), + ) + .into_any_element() + } +} + +impl ToolView for ProjectIndexView { + type Input = CodebaseQuery; + type SerializedState = SerializedState; + + fn generate( + &self, + context: &mut assistant_tooling::ProjectContext, + _: &mut ViewContext, + ) -> String { + match &self.state { + ProjectIndexToolState::CollectingQuery => String::new(), + ProjectIndexToolState::Searching => String::new(), + ProjectIndexToolState::Error(error) => format!("failed to search: {error:?}"), + ProjectIndexToolState::Finished { + excerpts, + index_status, + } => { + let mut body = "found results in the following paths:\n".to_string(); + + for (project_path, ranges) in excerpts { + context.add_excerpts(project_path.clone(), ranges); + writeln!(&mut body, "* {}", &project_path.path.display()).unwrap(); + } + + if *index_status != Status::Idle { + body.push_str("Still indexing. Results may be incomplete.\n"); + } + + body + } + } + } + + fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext) { + self.input = input; + cx.notify(); + } + + fn execute(&mut self, cx: &mut ViewContext) -> Task> { + self.state = ProjectIndexToolState::Searching; + cx.notify(); + + let project_index = self.project_index.read(cx); + let index_status = project_index.status(); + + // TODO: wire the filters into the search here instead of processing after. + // Otherwise we'll get zero results sometimes. + let search = project_index.search(self.input.query.clone(), DEFAULT_SEARCH_LIMIT, cx); + + let includes = self.input.includes.clone(); + let excludes = self.input.excludes.clone(); + + cx.spawn(|this, mut cx| async move { + let search_result = search.await; + this.update(&mut cx, |this, cx| { + match search_result { + Ok(search_results) => { + let mut excerpts = BTreeMap::>>::new(); + for search_result in search_results { + let project_path = ProjectPath { + worktree_id: search_result.worktree.read(cx).id(), + path: search_result.path, + }; + + if let Some(includes) = &includes { + if !includes.matches(&project_path) { + continue; + } + } else if let Some(excludes) = &excludes { + if excludes.matches(&project_path) { + continue; + } + } + + excerpts + .entry(project_path) + .or_default() + .push(search_result.range); + } + this.state = ProjectIndexToolState::Finished { + excerpts, + index_status, + }; + } + Err(error) => { + this.state = ProjectIndexToolState::Error(error); + } + } + cx.notify(); + }) + }) + } + + fn serialize(&self, cx: &mut ViewContext) -> Self::SerializedState { + let mut serialized = SerializedState { + error_message: None, + index_status: Status::Idle, + worktrees: Default::default(), + }; + match &self.state { + ProjectIndexToolState::Error(err) => serialized.error_message = Some(err.to_string()), + ProjectIndexToolState::Finished { + excerpts, + index_status, + } => { + serialized.index_status = *index_status; + if let Some(project) = self.project_index.read(cx).project().upgrade() { + let project = project.read(cx); + for (project_path, excerpts) in excerpts { + if let Some(worktree) = + project.worktree_for_id(project_path.worktree_id, cx) + { + let worktree_path = worktree.read(cx).abs_path(); + serialized + .worktrees + .entry(worktree_path) + .or_default() + .excerpts + .insert(project_path.path.clone(), excerpts.clone()); + } + } + } + } + _ => {} + } + serialized + } + + fn deserialize( + &mut self, + serialized: Self::SerializedState, + cx: &mut ViewContext, + ) -> Result<()> { + if !serialized.worktrees.is_empty() { + let mut excerpts = BTreeMap::>>::new(); + if let Some(project) = self.project_index.read(cx).project().upgrade() { + let project = project.read(cx); + for (worktree_path, worktree_state) in serialized.worktrees { + if let Some(worktree) = project + .worktrees() + .find(|worktree| worktree.read(cx).abs_path() == worktree_path) + { + let worktree_id = worktree.read(cx).id(); + for (path, serialized_excerpts) in worktree_state.excerpts { + excerpts.insert(ProjectPath { worktree_id, path }, serialized_excerpts); + } + } + } + } + self.state = ProjectIndexToolState::Finished { + excerpts, + index_status: serialized.index_status, + }; + } + cx.notify(); + Ok(()) + } +} + +impl ProjectIndexTool { + pub fn new(project_index: Model) -> Self { + Self { project_index } + } +} + +impl LanguageModelTool for ProjectIndexTool { + type View = ProjectIndexView; + + fn name(&self) -> String { + "semantic_search_codebase".to_string() + } + + fn description(&self) -> String { + unindent::unindent( + r#"This search tool uses a semantic index to perform search queries across your codebase, identifying and returning excerpts of text and code possibly related to the query. + + Ideal for: + - Discovering implementations of similar logic within the project + - Finding usage examples of functions, classes/structures, libraries, and other code elements + - Developing understanding of the codebase's architecture and design + + Note: The search's effectiveness is directly related to the current state of the codebase and the specificity of your query. It is recommended that you use snippets of code that are similar to the code you wish to find."#, + ) + } + + fn view(&self, cx: &mut WindowContext) -> gpui::View { + cx.new_view(|_| ProjectIndexView { + state: ProjectIndexToolState::CollectingQuery, + input: Default::default(), + expanded_header: false, + project_index: self.project_index.clone(), + }) + } +} diff --git a/crates/assistant2/src/ui.rs b/crates/assistant2/src/ui.rs new file mode 100644 index 0000000..3333620 --- /dev/null +++ b/crates/assistant2/src/ui.rs @@ -0,0 +1,17 @@ +mod active_file_button; +mod chat_message; +mod chat_notice; +mod composer; +mod project_index_button; + +#[cfg(feature = "stories")] +mod stories; + +pub use active_file_button::*; +pub use chat_message::*; +pub use chat_notice::*; +pub use composer::*; +pub use project_index_button::*; + +#[cfg(feature = "stories")] +pub use stories::*; diff --git a/crates/assistant2/src/ui/active_file_button.rs b/crates/assistant2/src/ui/active_file_button.rs new file mode 100644 index 0000000..1041578 --- /dev/null +++ b/crates/assistant2/src/ui/active_file_button.rs @@ -0,0 +1,134 @@ +use crate::attachments::ActiveEditorAttachmentTool; +use assistant_tooling::AttachmentRegistry; +use editor::Editor; +use gpui::{prelude::*, Subscription, View}; +use std::sync::Arc; +use ui::{prelude::*, ButtonLike, Color, Icon, IconName, Tooltip}; +use workspace::Workspace; + +#[derive(Clone)] +enum Status { + ActiveFile(String), + #[allow(dead_code)] + NoFile, +} + +pub struct ActiveFileButton { + attachment_registry: Arc, + status: Status, + #[allow(dead_code)] + workspace_subscription: Subscription, +} + +impl ActiveFileButton { + pub fn new( + attachment_registry: Arc, + workspace: View, + cx: &mut ViewContext, + ) -> Self { + let workspace_subscription = cx.subscribe(&workspace, Self::handle_workspace_event); + + cx.defer(move |this, cx| this.update_active_buffer(workspace.clone(), cx)); + + Self { + attachment_registry, + status: Status::NoFile, + workspace_subscription, + } + } + + pub fn set_enabled(&mut self, enabled: bool) { + self.attachment_registry + .set_attachment_tool_enabled::(enabled); + } + + pub fn update_active_buffer(&mut self, workspace: View, cx: &mut ViewContext) { + let active_buffer = workspace + .read(cx) + .active_item(cx) + .and_then(|item| Some(item.act_as::(cx)?.read(cx).buffer().clone())); + + if let Some(buffer) = active_buffer { + let buffer = buffer.read(cx); + + if let Some(singleton) = buffer.as_singleton() { + let singleton = singleton.read(cx); + + let filename: String = singleton + .file() + .map(|file| file.path().to_string_lossy()) + .unwrap_or("Untitled".into()) + .into(); + + self.status = Status::ActiveFile(filename); + } + } + } + + fn handle_workspace_event( + &mut self, + workspace: View, + event: &workspace::Event, + cx: &mut ViewContext, + ) { + if let workspace::Event::ActiveItemChanged = event { + self.update_active_buffer(workspace, cx); + } + } +} + +impl Render for ActiveFileButton { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let is_enabled = self + .attachment_registry + .is_attachment_tool_enabled::(); + + let icon = if is_enabled { + Icon::new(IconName::File) + .size(IconSize::XSmall) + .color(Color::Default) + } else { + Icon::new(IconName::File) + .size(IconSize::XSmall) + .color(Color::Disabled) + }; + + let indicator = None; + + let status = self.status.clone(); + + ButtonLike::new("active-file-button") + .child( + ui::IconWithIndicator::new(icon, indicator) + .indicator_border_color(Some(gpui::transparent_black())), + ) + .tooltip({ + move |cx| { + let status = status.clone(); + let (tooltip, meta) = match (is_enabled, status) { + (false, _) => ( + "Active file disabled".to_string(), + Some("Click to enable".to_string()), + ), + (true, Status::ActiveFile(filename)) => ( + format!("Active file {filename} enabled"), + Some("Click to disable".to_string()), + ), + (true, Status::NoFile) => { + ("No file active for conversation".to_string(), None) + } + }; + + if let Some(meta) = meta { + Tooltip::with_meta(tooltip, None, meta, cx) + } else { + Tooltip::text(tooltip, cx) + } + } + }) + .on_click(cx.listener(move |this, _, cx| { + this.set_enabled(!is_enabled); + cx.notify(); + })) + } +} diff --git a/crates/assistant2/src/ui/chat_message.rs b/crates/assistant2/src/ui/chat_message.rs new file mode 100644 index 0000000..fb07e55 --- /dev/null +++ b/crates/assistant2/src/ui/chat_message.rs @@ -0,0 +1,140 @@ +use std::sync::Arc; + +use client::User; +use gpui::{hsla, AnyElement, ClickEvent}; +use ui::{prelude::*, Avatar, Tooltip}; + +use crate::MessageId; + +pub enum UserOrAssistant { + User(Option>), + Assistant, +} + +#[derive(IntoElement)] +pub struct ChatMessage { + id: MessageId, + player: UserOrAssistant, + messages: Vec, + selected: bool, + collapsed: bool, + on_collapse_handle_click: Box, +} + +impl ChatMessage { + pub fn new( + id: MessageId, + player: UserOrAssistant, + messages: Vec, + collapsed: bool, + on_collapse_handle_click: Box, + ) -> Self { + Self { + id, + player, + messages, + selected: false, + collapsed, + on_collapse_handle_click, + } + } +} + +impl Selectable for ChatMessage { + fn selected(mut self, selected: bool) -> Self { + self.selected = selected; + self + } +} + +impl RenderOnce for ChatMessage { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let message_group = SharedString::from(format!("{}_group", self.id.0)); + + let collapse_handle_id = SharedString::from(format!("{}_collapse_handle", self.id.0)); + + let content_padding = Spacing::Small.rems(cx); + // Clamp the message height to exactly 1.5 lines when collapsed. + let collapsed_height = content_padding.to_pixels(cx.rem_size()) + cx.line_height() * 1.5; + + let background_color = if let UserOrAssistant::User(_) = &self.player { + Some(cx.theme().colors().surface_background) + } else { + None + }; + + let (username, avatar_uri) = match self.player { + UserOrAssistant::Assistant => ( + "Assistant".into(), + Some("https://zed.dev/assistant_avatar.png".into()), + ), + UserOrAssistant::User(Some(user)) => { + (user.github_login.clone(), Some(user.avatar_uri.clone())) + } + UserOrAssistant::User(None) => ("You".into(), None), + }; + + v_flex() + .group(message_group.clone()) + .gap(Spacing::XSmall.rems(cx)) + .p(Spacing::XSmall.rems(cx)) + .when(self.selected, |element| { + element.bg(hsla(0.6, 0.67, 0.46, 0.12)) + }) + .rounded_lg() + .child( + h_flex() + .justify_between() + .px(content_padding) + .child( + h_flex() + .gap_2() + .map(|this| { + let avatar_size = rems_from_px(20.); + if let Some(avatar_uri) = avatar_uri { + this.child(Avatar::new(avatar_uri).size(avatar_size)) + } else { + this.child(div().size(avatar_size)) + } + }) + .child(Label::new(username).color(Color::Muted)), + ) + .child( + h_flex().visible_on_hover(message_group).child( + // temp icons + IconButton::new( + collapse_handle_id.clone(), + if self.collapsed { + IconName::ArrowUp + } else { + IconName::ArrowDown + }, + ) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(self.on_collapse_handle_click) + .tooltip(|cx| Tooltip::text("Collapse Message", cx)), + ), + ), + ) + .when(self.messages.len() > 0, |el| { + el.child( + h_flex().w_full().child( + v_flex() + .relative() + .overflow_hidden() + .w_full() + .p(content_padding) + .gap_3() + .text_ui(cx) + .rounded_lg() + .when_some(background_color, |this, background_color| { + this.bg(background_color) + }) + .when(self.collapsed, |this| this.h(collapsed_height)) + .children(self.messages), + ), + ) + }) + } +} diff --git a/crates/assistant2/src/ui/chat_notice.rs b/crates/assistant2/src/ui/chat_notice.rs new file mode 100644 index 0000000..5001d2d --- /dev/null +++ b/crates/assistant2/src/ui/chat_notice.rs @@ -0,0 +1,71 @@ +use ui::{prelude::*, Avatar, IconButtonShape}; + +#[derive(IntoElement)] +pub struct ChatNotice { + message: SharedString, + meta: Option, +} + +impl ChatNotice { + pub fn new(message: impl Into) -> Self { + Self { + message: message.into(), + meta: None, + } + } + + pub fn meta(mut self, meta: impl Into) -> Self { + self.meta = Some(meta.into()); + self + } +} + +impl RenderOnce for ChatNotice { + fn render(self, _cx: &mut WindowContext) -> impl IntoElement { + h_flex() + .w_full() + .items_start() + .mt_4() + .gap_3() + .child( + // TODO: Replace with question mark. + Avatar::new("https://zed.dev/assistant_avatar.png").size(rems_from_px(20.)), + ) + .child( + v_flex() + .size_full() + .gap_1() + .pr_4() + .overflow_hidden() + .child( + h_flex() + .justify_between() + .overflow_hidden() + .child( + h_flex() + .flex_none() + .overflow_hidden() + .child(Label::new(self.message)), + ) + .child( + h_flex() + .flex_shrink_0() + .gap_1() + .child(Button::new("allow", "Allow")) + .child( + IconButton::new("deny", IconName::Close) + .shape(IconButtonShape::Square) + .icon_color(Color::Muted) + .size(ButtonSize::None) + .icon_size(IconSize::XSmall), + ), + ), + ) + .children( + self.meta.map(|meta| { + Label::new(meta).size(LabelSize::Small).color(Color::Muted) + }), + ), + ) + } +} diff --git a/crates/assistant2/src/ui/composer.rs b/crates/assistant2/src/ui/composer.rs new file mode 100644 index 0000000..742ef15 --- /dev/null +++ b/crates/assistant2/src/ui/composer.rs @@ -0,0 +1,193 @@ +use crate::{ + ui::{ActiveFileButton, ProjectIndexButton}, + AssistantChat, CompletionProvider, +}; +use editor::{Editor, EditorElement, EditorStyle}; +use gpui::{AnyElement, FontStyle, FontWeight, ReadGlobal, TextStyle, View, WeakView, WhiteSpace}; +use settings::Settings; +use theme::ThemeSettings; +use ui::{popover_menu, prelude::*, ButtonLike, ContextMenu, Divider, TextSize, Tooltip}; + +#[derive(IntoElement)] +pub struct Composer { + editor: View, + project_index_button: View, + active_file_button: Option>, + model_selector: AnyElement, +} + +impl Composer { + pub fn new( + editor: View, + project_index_button: View, + active_file_button: Option>, + model_selector: AnyElement, + ) -> Self { + Self { + editor, + project_index_button, + active_file_button, + model_selector, + } + } + + fn render_tools(&mut self, _cx: &mut WindowContext) -> impl IntoElement { + h_flex().child(self.project_index_button.clone()) + } + + fn render_attachment_tools(&mut self, _cx: &mut WindowContext) -> impl IntoElement { + h_flex().children( + self.active_file_button + .clone() + .map(|view| view.into_any_element()), + ) + } +} + +impl RenderOnce for Composer { + fn render(mut self, cx: &mut WindowContext) -> impl IntoElement { + let font_size = TextSize::Default.rems(cx); + let line_height = font_size.to_pixels(cx.rem_size()) * 1.3; + let mut editor_border = cx.theme().colors().text; + editor_border.fade_out(0.90); + + // Remove the extra 1px added by the border + let padding = Spacing::XLarge.rems(cx) - rems_from_px(1.); + + h_flex() + .p(Spacing::Small.rems(cx)) + .w_full() + .items_start() + .child( + v_flex() + .w_full() + .rounded_lg() + .p(padding) + .border_1() + .border_color(editor_border) + .bg(cx.theme().colors().editor_background) + .child( + v_flex() + .justify_between() + .w_full() + .gap_2() + .child({ + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.buffer_font.family.clone(), + font_features: settings.buffer_font.features.clone(), + font_size: font_size.into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: line_height.into(), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }; + + EditorElement::new( + &self.editor, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + }) + .child( + h_flex() + .flex_none() + .gap_2() + .justify_between() + .w_full() + .child( + h_flex().gap_1().child( + h_flex() + .gap_2() + .child(self.render_tools(cx)) + .child(Divider::vertical()) + .child(self.render_attachment_tools(cx)), + ), + ) + .child(h_flex().gap_1().child(self.model_selector)), + ), + ), + ) + } +} + +#[derive(IntoElement)] +pub struct ModelSelector { + assistant_chat: WeakView, + model: String, +} + +impl ModelSelector { + pub fn new(assistant_chat: WeakView, model: String) -> Self { + Self { + assistant_chat, + model, + } + } +} + +impl RenderOnce for ModelSelector { + fn render(self, _cx: &mut WindowContext) -> impl IntoElement { + popover_menu("model-switcher") + .menu(move |cx| { + ContextMenu::build(cx, |mut menu, cx| { + for model in CompletionProvider::global(cx).available_models() { + menu = menu.custom_entry( + { + let model = model.clone(); + move |_| Label::new(model.clone()).into_any_element() + }, + { + let assistant_chat = self.assistant_chat.clone(); + move |cx| { + _ = assistant_chat.update(cx, |assistant_chat, cx| { + assistant_chat.model.clone_from(&model); + cx.notify(); + }); + } + }, + ); + } + menu + }) + .into() + }) + .trigger( + ButtonLike::new("active-model") + .child( + h_flex() + .w_full() + .gap_0p5() + .child( + div() + .overflow_x_hidden() + .flex_grow() + .whitespace_nowrap() + .child( + Label::new(self.model) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + .child( + div().child( + Icon::new(IconName::ChevronDown) + .color(Color::Muted) + .size(IconSize::XSmall), + ), + ), + ) + .style(ButtonStyle::Subtle) + .tooltip(move |cx| Tooltip::text("Change Model", cx)), + ) + .anchor(gpui::AnchorCorner::BottomRight) + } +} diff --git a/crates/assistant2/src/ui/project_index_button.rs b/crates/assistant2/src/ui/project_index_button.rs new file mode 100644 index 0000000..6d7cb08 --- /dev/null +++ b/crates/assistant2/src/ui/project_index_button.rs @@ -0,0 +1,112 @@ +use assistant_tooling::ToolRegistry; +use gpui::{percentage, prelude::*, Animation, AnimationExt, Model, Transformation}; +use semantic_index::{ProjectIndex, Status}; +use std::{sync::Arc, time::Duration}; +use ui::{prelude::*, ButtonLike, Color, Icon, IconName, Indicator, Tooltip}; + +use crate::tools::ProjectIndexTool; + +pub struct ProjectIndexButton { + project_index: Model, + tool_registry: Arc, +} + +impl ProjectIndexButton { + pub fn new( + project_index: Model, + tool_registry: Arc, + cx: &mut ViewContext, + ) -> Self { + cx.subscribe(&project_index, |_this, _, _status: &Status, cx| { + cx.notify(); + }) + .detach(); + Self { + project_index, + tool_registry, + } + } + + pub fn set_enabled(&mut self, enabled: bool) { + self.tool_registry + .set_tool_enabled::(enabled); + } +} + +impl Render for ProjectIndexButton { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let status = self.project_index.read(cx).status(); + let is_enabled = self.tool_registry.is_tool_enabled::(); + + let icon = if is_enabled { + match status { + Status::Idle => Icon::new(IconName::Code) + .size(IconSize::XSmall) + .color(Color::Default), + Status::Loading => Icon::new(IconName::Code) + .size(IconSize::XSmall) + .color(Color::Muted), + Status::Scanning { .. } => Icon::new(IconName::Code) + .size(IconSize::XSmall) + .color(Color::Muted), + } + } else { + Icon::new(IconName::Code) + .size(IconSize::XSmall) + .color(Color::Disabled) + }; + + let indicator = if is_enabled { + match status { + Status::Idle => Some(Indicator::dot().color(Color::Success)), + Status::Scanning { .. } => Some(Indicator::dot().color(Color::Warning)), + Status::Loading => Some(Indicator::icon( + Icon::new(IconName::Spinner) + .color(Color::Accent) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), + ), + )), + } + } else { + None + }; + + ButtonLike::new("project-index") + .child( + ui::IconWithIndicator::new(icon, indicator) + .indicator_border_color(Some(gpui::transparent_black())), + ) + .tooltip({ + move |cx| { + let (tooltip, meta) = match (is_enabled, status) { + (false, _) => ( + "Project index disabled".to_string(), + Some("Click to enable".to_string()), + ), + (_, Status::Idle) => ( + "Project index ready".to_string(), + Some("Click to disable".to_string()), + ), + (_, Status::Loading) => ("Project index loading...".to_string(), None), + (_, Status::Scanning { remaining_count }) => ( + "Project index scanning...".to_string(), + Some(format!("{} remaining...", remaining_count)), + ), + }; + + if let Some(meta) = meta { + Tooltip::with_meta(tooltip, None, meta, cx) + } else { + Tooltip::text(tooltip, cx) + } + } + }) + .on_click(cx.listener(move |this, _, cx| { + this.set_enabled(!is_enabled); + cx.notify(); + })) + } +} diff --git a/crates/assistant2/src/ui/stories.rs b/crates/assistant2/src/ui/stories.rs new file mode 100644 index 0000000..8bc2b30 --- /dev/null +++ b/crates/assistant2/src/ui/stories.rs @@ -0,0 +1,5 @@ +mod chat_message; +mod chat_notice; + +pub use chat_message::*; +pub use chat_notice::*; diff --git a/crates/assistant2/src/ui/stories/chat_message.rs b/crates/assistant2/src/ui/stories/chat_message.rs new file mode 100644 index 0000000..1d63ae7 --- /dev/null +++ b/crates/assistant2/src/ui/stories/chat_message.rs @@ -0,0 +1,101 @@ +use std::sync::Arc; + +use client::User; +use story::{StoryContainer, StoryItem, StorySection}; +use ui::prelude::*; + +use crate::ui::{ChatMessage, UserOrAssistant}; +use crate::MessageId; + +pub struct ChatMessageStory; + +impl Render for ChatMessageStory { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + let user_1 = Arc::new(User { + id: 12345, + github_login: "iamnbutler".into(), + avatar_uri: "https://avatars.githubusercontent.com/u/1714999?v=4".into(), + }); + + StoryContainer::new( + "ChatMessage Story", + "crates/assistant2/src/ui/stories/chat_message.rs", + ) + .child( + StorySection::new() + .child(StoryItem::new( + "User chat message", + ChatMessage::new( + MessageId(0), + UserOrAssistant::User(Some(user_1.clone())), + vec![div().child("What can I do here?").into_any_element()], + false, + Box::new(|_, _| {}), + ), + )) + .child(StoryItem::new( + "User chat message (collapsed)", + ChatMessage::new( + MessageId(0), + UserOrAssistant::User(Some(user_1.clone())), + vec![div().child("What can I do here?").into_any_element()], + true, + Box::new(|_, _| {}), + ), + )), + ) + .child( + StorySection::new() + .child(StoryItem::new( + "Assistant chat message", + ChatMessage::new( + MessageId(0), + UserOrAssistant::Assistant, + vec![div().child("You can talk to me!").into_any_element()], + false, + Box::new(|_, _| {}), + ), + )) + .child(StoryItem::new( + "Assistant chat message (collapsed)", + ChatMessage::new( + MessageId(0), + UserOrAssistant::Assistant, + vec![div().child(MULTI_LINE_MESSAGE).into_any_element()], + true, + Box::new(|_, _| {}), + ), + )), + ) + .child( + StorySection::new().child(StoryItem::new( + "Conversation between user and assistant", + v_flex() + .gap_2() + .child(ChatMessage::new( + MessageId(0), + UserOrAssistant::User(Some(user_1.clone())), + vec![div().child("What is Rust??").into_any_element()], + false, + Box::new(|_, _| {}), + )) + .child(ChatMessage::new( + MessageId(0), + UserOrAssistant::Assistant, + vec![div().child("Rust is a multi-paradigm programming language focused on performance and safety").into_any_element()], + false, + Box::new(|_, _| {}), + )) + .child(ChatMessage::new( + MessageId(0), + UserOrAssistant::User(Some(user_1)), + vec![div().child("Sounds pretty cool!").into_any_element()], + false, + Box::new(|_, _| {}), + )), + )), + ) + } +} + +const MULTI_LINE_MESSAGE: &str = "In 2010, the movies nominated for the 82nd Academy Awards, for films released in 2009, were as follows. Note that 2010 nominees were announced for the ceremony happening in that year, but they honor movies from the previous year"; diff --git a/crates/assistant2/src/ui/stories/chat_notice.rs b/crates/assistant2/src/ui/stories/chat_notice.rs new file mode 100644 index 0000000..ad8eef9 --- /dev/null +++ b/crates/assistant2/src/ui/stories/chat_notice.rs @@ -0,0 +1,22 @@ +use story::{StoryContainer, StoryItem, StorySection}; +use ui::prelude::*; + +use crate::ui::ChatNotice; + +pub struct ChatNoticeStory; + +impl Render for ChatNoticeStory { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + StoryContainer::new( + "ChatNotice Story", + "crates/assistant2/src/ui/stories/chat_notice.rs", + ) + .child( + StorySection::new().child(StoryItem::new( + "Project index request", + ChatNotice::new("Allow assistant to index your project?") + .meta("Enabling will allow responses more relevant to this project."), + )), + ) + } +} diff --git a/crates/assistant_tooling/Cargo.toml b/crates/assistant_tooling/Cargo.toml new file mode 100644 index 0000000..79f41fa --- /dev/null +++ b/crates/assistant_tooling/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "assistant_tooling" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/assistant_tooling.rs" + +[dependencies] +anyhow.workspace = true +collections.workspace = true +futures.workspace = true +gpui.workspace = true +log.workspace = true +project.workspace = true +repair_json.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +sum_tree.workspace = true +ui.workspace = true +util.workspace = true + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +unindent.workspace = true diff --git a/crates/assistant_tooling/LICENSE-GPL b/crates/assistant_tooling/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/assistant_tooling/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/assistant_tooling/README.md b/crates/assistant_tooling/README.md new file mode 100644 index 0000000..bc27283 --- /dev/null +++ b/crates/assistant_tooling/README.md @@ -0,0 +1,85 @@ +# Assistant Tooling + +Bringing Language Model tool calling to GPUI. + +This unlocks: + +- **Structured Extraction** of model responses +- **Validation** of model inputs +- **Execution** of chosen tools + +## Overview + +Language Models can produce structured outputs that are perfect for calling functions. The most famous of these is OpenAI's tool calling. When making a chat completion you can pass a list of tools available to the model. The model will choose `0..n` tools to help them complete a user's task. It's up to _you_ to create the tools that the model can call. + +> **User**: "Hey I need help with implementing a collapsible panel in GPUI" +> +> **Assistant**: "Sure, I can help with that. Let me see what I can find." +> +> `tool_calls: ["name": "query_codebase", arguments: "{ 'query': 'GPUI collapsible panel' }"]` +> +> `result: "['crates/gpui/src/panel.rs:12: impl Panel { ... }', 'crates/gpui/src/panel.rs:20: impl Panel { ... }']"` +> +> **Assistant**: "Here are some excerpts from the GPUI codebase that might help you." + +This library is designed to facilitate this interaction mode by allowing you to go from `struct` to `tool` with two simple traits, `LanguageModelTool` and `ToolView`. + +## Using the Tool Registry + +```rust +let mut tool_registry = ToolRegistry::new(); +tool_registry + .register(WeatherTool { api_client }, + }) + .unwrap(); // You can only register one tool per name + +let completion = cx.update(|cx| { + CompletionProvider::get(cx).complete( + model_name, + messages, + Vec::new(), + 1.0, + // The definitions get passed directly to OpenAI when you want + // the model to be able to call your tool + tool_registry.definitions(), + ) +}); + +let mut stream = completion?.await?; + +let mut message = AssistantMessage::new(); + +while let Some(delta) = stream.next().await { + // As messages stream in, you'll get both assistant content + if let Some(content) = &delta.content { + message + .body + .update(cx, |message, cx| message.append(&content, cx)); + } + + // And tool calls! + for tool_call_delta in delta.tool_calls { + let index = tool_call_delta.index as usize; + if index >= message.tool_calls.len() { + message.tool_calls.resize_with(index + 1, Default::default); + } + let tool_call = &mut message.tool_calls[index]; + + // Build up an ID + if let Some(id) = &tool_call_delta.id { + tool_call.id.push_str(id); + } + + tool_registry.update_tool_call( + tool_call, + tool_call_delta.name.as_deref(), + tool_call_delta.arguments.as_deref(), + cx, + ); + } +} +``` + +Once the stream of tokens is complete, you can exexute the tool call by calling `tool_registry.execute_tool_call(tool_call, cx)`, which returns a `Task>`. + +As the tokens stream in and tool calls are executed, your `ToolView` will get updates. Render each tool call by passing that `tool_call` in to `tool_registry.render_tool_call(tool_call, cx)`. The final message for the model can be pulled by calling `self.tool_registry.content_for_tool_call( tool_call, &mut project_context, cx, )`. diff --git a/crates/assistant_tooling/src/assistant_tooling.rs b/crates/assistant_tooling/src/assistant_tooling.rs new file mode 100644 index 0000000..9dcf290 --- /dev/null +++ b/crates/assistant_tooling/src/assistant_tooling.rs @@ -0,0 +1,13 @@ +mod attachment_registry; +mod project_context; +mod tool_registry; + +pub use attachment_registry::{ + AttachmentOutput, AttachmentRegistry, LanguageModelAttachment, SavedUserAttachment, + UserAttachment, +}; +pub use project_context::ProjectContext; +pub use tool_registry::{ + LanguageModelTool, SavedToolFunctionCall, ToolFunctionCall, ToolFunctionDefinition, + ToolRegistry, ToolView, +}; diff --git a/crates/assistant_tooling/src/attachment_registry.rs b/crates/assistant_tooling/src/attachment_registry.rs new file mode 100644 index 0000000..e8b52d2 --- /dev/null +++ b/crates/assistant_tooling/src/attachment_registry.rs @@ -0,0 +1,234 @@ +use crate::ProjectContext; +use anyhow::{anyhow, Result}; +use collections::HashMap; +use futures::future::join_all; +use gpui::{AnyView, Render, Task, View, WindowContext}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde_json::value::RawValue; +use std::{ + any::TypeId, + sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, + }, +}; +use util::ResultExt as _; + +pub struct AttachmentRegistry { + registered_attachments: HashMap, +} + +pub trait AttachmentOutput { + fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String; +} + +pub trait LanguageModelAttachment { + type Output: DeserializeOwned + Serialize + 'static; + type View: Render + AttachmentOutput; + + fn name(&self) -> Arc; + fn run(&self, cx: &mut WindowContext) -> Task>; + fn view(&self, output: Result, cx: &mut WindowContext) -> View; +} + +/// A collected attachment from running an attachment tool +pub struct UserAttachment { + pub view: AnyView, + name: Arc, + serialized_output: Result, String>, + generate_fn: fn(AnyView, &mut ProjectContext, cx: &mut WindowContext) -> String, +} + +#[derive(Serialize, Deserialize)] +pub struct SavedUserAttachment { + name: Arc, + serialized_output: Result, String>, +} + +/// Internal representation of an attachment tool to allow us to treat them dynamically +struct RegisteredAttachment { + name: Arc, + enabled: AtomicBool, + call: Box Task>>, + deserialize: Box Result>, +} + +impl AttachmentRegistry { + pub fn new() -> Self { + Self { + registered_attachments: HashMap::default(), + } + } + + pub fn register(&mut self, attachment: A) { + let attachment = Arc::new(attachment); + + let call = Box::new({ + let attachment = attachment.clone(); + move |cx: &mut WindowContext| { + let result = attachment.run(cx); + let attachment = attachment.clone(); + cx.spawn(move |mut cx| async move { + let result: Result = result.await; + let serialized_output = + result + .as_ref() + .map_err(ToString::to_string) + .and_then(|output| { + Ok(RawValue::from_string( + serde_json::to_string(output).map_err(|e| e.to_string())?, + ) + .unwrap()) + }); + + let view = cx.update(|cx| attachment.view(result, cx))?; + + Ok(UserAttachment { + name: attachment.name(), + view: view.into(), + generate_fn: generate::, + serialized_output, + }) + }) + } + }); + + let deserialize = Box::new({ + let attachment = attachment.clone(); + move |saved_attachment: &SavedUserAttachment, cx: &mut WindowContext| { + let serialized_output = saved_attachment.serialized_output.clone(); + let output = match &serialized_output { + Ok(serialized_output) => { + Ok(serde_json::from_str::(serialized_output.get())?) + } + Err(error) => Err(anyhow!("{error}")), + }; + let view = attachment.view(output, cx).into(); + + Ok(UserAttachment { + name: saved_attachment.name.clone(), + view, + serialized_output, + generate_fn: generate::, + }) + } + }); + + self.registered_attachments.insert( + TypeId::of::(), + RegisteredAttachment { + name: attachment.name(), + call, + deserialize, + enabled: AtomicBool::new(true), + }, + ); + return; + + fn generate( + view: AnyView, + project: &mut ProjectContext, + cx: &mut WindowContext, + ) -> String { + view.downcast::() + .unwrap() + .update(cx, |view, cx| T::View::generate(view, project, cx)) + } + } + + pub fn set_attachment_tool_enabled( + &self, + is_enabled: bool, + ) { + if let Some(attachment) = self.registered_attachments.get(&TypeId::of::()) { + attachment.enabled.store(is_enabled, SeqCst); + } + } + + pub fn is_attachment_tool_enabled(&self) -> bool { + if let Some(attachment) = self.registered_attachments.get(&TypeId::of::()) { + attachment.enabled.load(SeqCst) + } else { + false + } + } + + pub fn call( + &self, + cx: &mut WindowContext, + ) -> Task> { + let Some(attachment) = self.registered_attachments.get(&TypeId::of::()) else { + return Task::ready(Err(anyhow!("no attachment tool"))); + }; + + (attachment.call)(cx) + } + + pub fn call_all_attachment_tools( + self: Arc, + cx: &mut WindowContext<'_>, + ) -> Task>> { + let this = self.clone(); + cx.spawn(|mut cx| async move { + let attachment_tasks = cx.update(|cx| { + let mut tasks = Vec::new(); + for attachment in this + .registered_attachments + .values() + .filter(|attachment| attachment.enabled.load(SeqCst)) + { + tasks.push((attachment.call)(cx)) + } + + tasks + })?; + + let attachments = join_all(attachment_tasks.into_iter()).await; + + Ok(attachments + .into_iter() + .filter_map(|attachment| attachment.log_err()) + .collect()) + }) + } + + pub fn serialize_user_attachment( + &self, + user_attachment: &UserAttachment, + ) -> SavedUserAttachment { + SavedUserAttachment { + name: user_attachment.name.clone(), + serialized_output: user_attachment.serialized_output.clone(), + } + } + + pub fn deserialize_user_attachment( + &self, + saved_user_attachment: SavedUserAttachment, + cx: &mut WindowContext, + ) -> Result { + if let Some(registered_attachment) = self + .registered_attachments + .values() + .find(|attachment| attachment.name == saved_user_attachment.name) + { + (registered_attachment.deserialize)(&saved_user_attachment, cx) + } else { + Err(anyhow!( + "no attachment tool for name {}", + saved_user_attachment.name + )) + } + } +} + +impl UserAttachment { + pub fn generate(&self, output: &mut ProjectContext, cx: &mut WindowContext) -> Option { + let result = (self.generate_fn)(self.view.clone(), output, cx); + if result.is_empty() { + None + } else { + Some(result) + } + } +} diff --git a/crates/assistant_tooling/src/project_context.rs b/crates/assistant_tooling/src/project_context.rs new file mode 100644 index 0000000..aafe272 --- /dev/null +++ b/crates/assistant_tooling/src/project_context.rs @@ -0,0 +1,296 @@ +use anyhow::{anyhow, Result}; +use gpui::{AppContext, Model, Task, WeakModel}; +use project::{Fs, Project, ProjectPath, Worktree}; +use std::{cmp::Ordering, fmt::Write as _, ops::Range, sync::Arc}; +use sum_tree::TreeMap; + +pub struct ProjectContext { + files: TreeMap, + project: WeakModel, + fs: Arc, +} + +#[derive(Debug, Clone)] +enum PathState { + PathOnly, + EntireFile, + Excerpts { ranges: Vec> }, +} + +impl ProjectContext { + pub fn new(project: WeakModel, fs: Arc) -> Self { + Self { + files: TreeMap::default(), + fs, + project, + } + } + + pub fn add_path(&mut self, project_path: ProjectPath) { + if self.files.get(&project_path).is_none() { + self.files.insert(project_path, PathState::PathOnly); + } + } + + pub fn add_excerpts(&mut self, project_path: ProjectPath, new_ranges: &[Range]) { + let previous_state = self + .files + .get(&project_path) + .unwrap_or(&PathState::PathOnly); + + let mut ranges = match previous_state { + PathState::EntireFile => return, + PathState::PathOnly => Vec::new(), + PathState::Excerpts { ranges } => ranges.to_vec(), + }; + + for new_range in new_ranges { + let ix = ranges.binary_search_by(|probe| { + if probe.end < new_range.start { + Ordering::Less + } else if probe.start > new_range.end { + Ordering::Greater + } else { + Ordering::Equal + } + }); + + match ix { + Ok(mut ix) => { + let existing = &mut ranges[ix]; + existing.start = existing.start.min(new_range.start); + existing.end = existing.end.max(new_range.end); + while ix + 1 < ranges.len() && ranges[ix + 1].start <= ranges[ix].end { + ranges[ix].end = ranges[ix].end.max(ranges[ix + 1].end); + ranges.remove(ix + 1); + } + while ix > 0 && ranges[ix - 1].end >= ranges[ix].start { + ranges[ix].start = ranges[ix].start.min(ranges[ix - 1].start); + ranges.remove(ix - 1); + ix -= 1; + } + } + Err(ix) => { + ranges.insert(ix, new_range.clone()); + } + } + } + + self.files + .insert(project_path, PathState::Excerpts { ranges }); + } + + pub fn add_file(&mut self, project_path: ProjectPath) { + self.files.insert(project_path, PathState::EntireFile); + } + + pub fn generate_system_message(&self, cx: &mut AppContext) -> Task> { + let project = self + .project + .upgrade() + .ok_or_else(|| anyhow!("project dropped")); + let files = self.files.clone(); + let fs = self.fs.clone(); + cx.spawn(|cx| async move { + let project = project?; + let mut result = "project structure:\n".to_string(); + + let mut last_worktree: Option> = None; + for (project_path, path_state) in files.iter() { + if let Some(worktree) = &last_worktree { + if worktree.read_with(&cx, |tree, _| tree.id())? != project_path.worktree_id { + last_worktree = None; + } + } + + let worktree; + if let Some(last_worktree) = &last_worktree { + worktree = last_worktree.clone(); + } else if let Some(tree) = project.read_with(&cx, |project, cx| { + project.worktree_for_id(project_path.worktree_id, cx) + })? { + worktree = tree; + last_worktree = Some(worktree.clone()); + let worktree_name = + worktree.read_with(&cx, |tree, _cx| tree.root_name().to_string())?; + writeln!(&mut result, "# {}", worktree_name).unwrap(); + } else { + continue; + } + + let worktree_abs_path = worktree.read_with(&cx, |tree, _cx| tree.abs_path())?; + let path = &project_path.path; + writeln!(&mut result, "## {}", path.display()).unwrap(); + + match path_state { + PathState::PathOnly => {} + PathState::EntireFile => { + let text = fs.load(&worktree_abs_path.join(&path)).await?; + writeln!(&mut result, "~~~\n{text}\n~~~").unwrap(); + } + PathState::Excerpts { ranges } => { + let text = fs.load(&worktree_abs_path.join(&path)).await?; + + writeln!(&mut result, "~~~").unwrap(); + + // Assumption: ranges are in order, not overlapping + let mut prev_range_end = 0; + for range in ranges { + if range.start > prev_range_end { + writeln!(&mut result, "...").unwrap(); + prev_range_end = range.end; + } + + let mut start = range.start; + let mut end = range.end.min(text.len()); + while !text.is_char_boundary(start) { + start += 1; + } + while !text.is_char_boundary(end) { + end -= 1; + } + result.push_str(&text[start..end]); + if !result.ends_with('\n') { + result.push('\n'); + } + } + + if prev_range_end < text.len() { + writeln!(&mut result, "...").unwrap(); + } + + writeln!(&mut result, "~~~").unwrap(); + } + } + } + Ok(result) + }) + } +} + +#[cfg(test)] +mod tests { + use std::path::Path; + + use super::*; + use gpui::TestAppContext; + use project::FakeFs; + use serde_json::json; + use settings::SettingsStore; + + use unindent::Unindent as _; + + #[gpui::test] + async fn test_system_message_generation(cx: &mut TestAppContext) { + init_test(cx); + + let file_3_contents = r#" + fn test1() {} + fn test2() {} + fn test3() {} + "# + .unindent(); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/code", + json!({ + "root1": { + "lib": { + "file1.rs": "mod example;", + "file2.rs": "", + }, + "test": { + "file3.rs": file_3_contents, + } + }, + "root2": { + "src": { + "main.rs": "" + } + } + }), + ) + .await; + + let project = Project::test( + fs.clone(), + ["/code/root1".as_ref(), "/code/root2".as_ref()], + cx, + ) + .await; + + let worktree_ids = project.read_with(cx, |project, cx| { + project + .worktrees() + .map(|worktree| worktree.read(cx).id()) + .collect::>() + }); + + let mut ax = ProjectContext::new(project.downgrade(), fs); + + ax.add_file(ProjectPath { + worktree_id: worktree_ids[0], + path: Path::new("lib/file1.rs").into(), + }); + + let message = cx + .update(|cx| ax.generate_system_message(cx)) + .await + .unwrap(); + assert_eq!( + r#" + project structure: + # root1 + ## lib/file1.rs + ~~~ + mod example; + ~~~ + "# + .unindent(), + message + ); + + ax.add_excerpts( + ProjectPath { + worktree_id: worktree_ids[0], + path: Path::new("test/file3.rs").into(), + }, + &[ + file_3_contents.find("fn test2").unwrap() + ..file_3_contents.find("fn test3").unwrap(), + ], + ); + + let message = cx + .update(|cx| ax.generate_system_message(cx)) + .await + .unwrap(); + assert_eq!( + r#" + project structure: + # root1 + ## lib/file1.rs + ~~~ + mod example; + ~~~ + ## test/file3.rs + ~~~ + ... + fn test2() {} + ... + ~~~ + "# + .unindent(), + message + ); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + Project::init_settings(cx); + }); + } +} diff --git a/crates/assistant_tooling/src/tool_registry.rs b/crates/assistant_tooling/src/tool_registry.rs new file mode 100644 index 0000000..e5f8914 --- /dev/null +++ b/crates/assistant_tooling/src/tool_registry.rs @@ -0,0 +1,526 @@ +use crate::ProjectContext; +use anyhow::{anyhow, Result}; +use gpui::{AnyElement, AnyView, IntoElement, Render, Task, View, WindowContext}; +use repair_json::repair; +use schemars::{schema::RootSchema, schema_for, JsonSchema}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde_json::value::RawValue; +use std::{ + any::TypeId, + collections::HashMap, + fmt::Display, + mem, + sync::atomic::{AtomicBool, Ordering::SeqCst}, +}; +use ui::ViewContext; + +pub struct ToolRegistry { + registered_tools: HashMap, +} + +#[derive(Default)] +pub struct ToolFunctionCall { + pub id: String, + pub name: String, + pub arguments: String, + state: ToolFunctionCallState, +} + +#[derive(Default)] +enum ToolFunctionCallState { + #[default] + Initializing, + NoSuchTool, + KnownTool(Box), + ExecutedTool(Box), +} + +trait InternalToolView { + fn view(&self) -> AnyView; + fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String; + fn try_set_input(&self, input: &str, cx: &mut WindowContext); + fn execute(&self, cx: &mut WindowContext) -> Task>; + fn serialize_output(&self, cx: &mut WindowContext) -> Result>; + fn deserialize_output(&self, raw_value: &RawValue, cx: &mut WindowContext) -> Result<()>; +} + +#[derive(Default, Serialize, Deserialize)] +pub struct SavedToolFunctionCall { + id: String, + name: String, + arguments: String, + state: SavedToolFunctionCallState, +} + +#[derive(Default, Serialize, Deserialize)] +enum SavedToolFunctionCallState { + #[default] + Initializing, + NoSuchTool, + KnownTool, + ExecutedTool(Box), +} + +#[derive(Clone, Debug, PartialEq)] +pub struct ToolFunctionDefinition { + pub name: String, + pub description: String, + pub parameters: RootSchema, +} + +pub trait LanguageModelTool { + type View: ToolView; + + /// Returns the name of the tool. + /// + /// This name is exposed to the language model to allow the model to pick + /// which tools to use. As this name is used to identify the tool within a + /// tool registry, it should be unique. + fn name(&self) -> String; + + /// Returns the description of the tool. + /// + /// This can be used to _prompt_ the model as to what the tool does. + fn description(&self) -> String; + + /// Returns the OpenAI Function definition for the tool, for direct use with OpenAI's API. + fn definition(&self) -> ToolFunctionDefinition { + let root_schema = schema_for!(::Input); + + ToolFunctionDefinition { + name: self.name(), + description: self.description(), + parameters: root_schema, + } + } + + /// A view of the output of running the tool, for displaying to the user. + fn view(&self, cx: &mut WindowContext) -> View; +} + +pub trait ToolView: Render { + /// The input type that will be passed in to `execute` when the tool is called + /// by the language model. + type Input: DeserializeOwned + JsonSchema; + + /// The output returned by executing the tool. + type SerializedState: DeserializeOwned + Serialize; + + fn generate(&self, project: &mut ProjectContext, cx: &mut ViewContext) -> String; + fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext); + fn execute(&mut self, cx: &mut ViewContext) -> Task>; + + fn serialize(&self, cx: &mut ViewContext) -> Self::SerializedState; + fn deserialize( + &mut self, + output: Self::SerializedState, + cx: &mut ViewContext, + ) -> Result<()>; +} + +struct RegisteredTool { + enabled: AtomicBool, + type_id: TypeId, + build_view: Box Box>, + definition: ToolFunctionDefinition, +} + +impl ToolRegistry { + pub fn new() -> Self { + Self { + registered_tools: HashMap::new(), + } + } + + pub fn set_tool_enabled(&self, is_enabled: bool) { + for tool in self.registered_tools.values() { + if tool.type_id == TypeId::of::() { + tool.enabled.store(is_enabled, SeqCst); + return; + } + } + } + + pub fn is_tool_enabled(&self) -> bool { + for tool in self.registered_tools.values() { + if tool.type_id == TypeId::of::() { + return tool.enabled.load(SeqCst); + } + } + false + } + + pub fn definitions(&self) -> Vec { + self.registered_tools + .values() + .filter(|tool| tool.enabled.load(SeqCst)) + .map(|tool| tool.definition.clone()) + .collect() + } + + pub fn update_tool_call( + &self, + call: &mut ToolFunctionCall, + name: Option<&str>, + arguments: Option<&str>, + cx: &mut WindowContext, + ) { + if let Some(name) = name { + call.name.push_str(name); + } + if let Some(arguments) = arguments { + if call.arguments.is_empty() { + if let Some(tool) = self.registered_tools.get(&call.name) { + let view = (tool.build_view)(cx); + call.state = ToolFunctionCallState::KnownTool(view); + } else { + call.state = ToolFunctionCallState::NoSuchTool; + } + } + call.arguments.push_str(arguments); + + if let ToolFunctionCallState::KnownTool(view) = &call.state { + if let Ok(repaired_arguments) = repair(call.arguments.clone()) { + view.try_set_input(&repaired_arguments, cx) + } + } + } + } + + pub fn execute_tool_call( + &self, + tool_call: &mut ToolFunctionCall, + cx: &mut WindowContext, + ) -> Option>> { + if let ToolFunctionCallState::KnownTool(view) = mem::take(&mut tool_call.state) { + let task = view.execute(cx); + tool_call.state = ToolFunctionCallState::ExecutedTool(view); + Some(task) + } else { + None + } + } + + pub fn render_tool_call( + &self, + tool_call: &ToolFunctionCall, + _cx: &mut WindowContext, + ) -> Option { + match &tool_call.state { + ToolFunctionCallState::NoSuchTool => { + Some(ui::Label::new("No such tool").into_any_element()) + } + ToolFunctionCallState::Initializing => None, + ToolFunctionCallState::KnownTool(view) | ToolFunctionCallState::ExecutedTool(view) => { + Some(view.view().into_any_element()) + } + } + } + + pub fn content_for_tool_call( + &self, + tool_call: &ToolFunctionCall, + project_context: &mut ProjectContext, + cx: &mut WindowContext, + ) -> String { + match &tool_call.state { + ToolFunctionCallState::Initializing => String::new(), + ToolFunctionCallState::NoSuchTool => { + format!("No such tool: {}", tool_call.name) + } + ToolFunctionCallState::KnownTool(view) | ToolFunctionCallState::ExecutedTool(view) => { + view.generate(project_context, cx) + } + } + } + + pub fn serialize_tool_call( + &self, + call: &ToolFunctionCall, + cx: &mut WindowContext, + ) -> Result { + Ok(SavedToolFunctionCall { + id: call.id.clone(), + name: call.name.clone(), + arguments: call.arguments.clone(), + state: match &call.state { + ToolFunctionCallState::Initializing => SavedToolFunctionCallState::Initializing, + ToolFunctionCallState::NoSuchTool => SavedToolFunctionCallState::NoSuchTool, + ToolFunctionCallState::KnownTool(_) => SavedToolFunctionCallState::KnownTool, + ToolFunctionCallState::ExecutedTool(view) => { + SavedToolFunctionCallState::ExecutedTool(view.serialize_output(cx)?) + } + }, + }) + } + + pub fn deserialize_tool_call( + &self, + call: &SavedToolFunctionCall, + cx: &mut WindowContext, + ) -> Result { + let Some(tool) = self.registered_tools.get(&call.name) else { + return Err(anyhow!("no such tool {}", call.name)); + }; + + Ok(ToolFunctionCall { + id: call.id.clone(), + name: call.name.clone(), + arguments: call.arguments.clone(), + state: match &call.state { + SavedToolFunctionCallState::Initializing => ToolFunctionCallState::Initializing, + SavedToolFunctionCallState::NoSuchTool => ToolFunctionCallState::NoSuchTool, + SavedToolFunctionCallState::KnownTool => { + log::error!("Deserialized tool that had not executed"); + let view = (tool.build_view)(cx); + view.try_set_input(&call.arguments, cx); + ToolFunctionCallState::KnownTool(view) + } + SavedToolFunctionCallState::ExecutedTool(output) => { + let view = (tool.build_view)(cx); + view.try_set_input(&call.arguments, cx); + view.deserialize_output(output, cx)?; + ToolFunctionCallState::ExecutedTool(view) + } + }, + }) + } + + pub fn register(&mut self, tool: T) -> Result<()> { + let name = tool.name(); + let registered_tool = RegisteredTool { + type_id: TypeId::of::(), + definition: tool.definition(), + enabled: AtomicBool::new(true), + build_view: Box::new(move |cx: &mut WindowContext| Box::new(tool.view(cx))), + }; + + let previous = self.registered_tools.insert(name.clone(), registered_tool); + if previous.is_some() { + return Err(anyhow!("already registered a tool with name {}", name)); + } + + return Ok(()); + } +} + +impl InternalToolView for View { + fn view(&self) -> AnyView { + self.clone().into() + } + + fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String { + self.update(cx, |view, cx| view.generate(project, cx)) + } + + fn try_set_input(&self, input: &str, cx: &mut WindowContext) { + if let Ok(input) = serde_json::from_str::(input) { + self.update(cx, |view, cx| { + view.set_input(input, cx); + cx.notify(); + }); + } + } + + fn execute(&self, cx: &mut WindowContext) -> Task> { + self.update(cx, |view, cx| view.execute(cx)) + } + + fn serialize_output(&self, cx: &mut WindowContext) -> Result> { + let output = self.update(cx, |view, cx| view.serialize(cx)); + Ok(RawValue::from_string(serde_json::to_string(&output)?)?) + } + + fn deserialize_output(&self, output: &RawValue, cx: &mut WindowContext) -> Result<()> { + let state = serde_json::from_str::(output.get())?; + self.update(cx, |view, cx| view.deserialize(state, cx))?; + Ok(()) + } +} + +impl Display for ToolFunctionDefinition { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let schema = serde_json::to_string(&self.parameters).ok(); + let schema = schema.unwrap_or("None".to_string()); + write!(f, "Name: {}:\n", self.name)?; + write!(f, "Description: {}\n", self.description)?; + write!(f, "Parameters: {}", schema) + } +} + +#[cfg(test)] +mod test { + use super::*; + use gpui::{div, prelude::*, Render, TestAppContext}; + use gpui::{EmptyView, View}; + use schemars::JsonSchema; + use serde::{Deserialize, Serialize}; + use serde_json::json; + + #[derive(Deserialize, Serialize, JsonSchema)] + struct WeatherQuery { + location: String, + unit: String, + } + + #[derive(Clone, Serialize, Deserialize, PartialEq, Debug)] + struct WeatherResult { + location: String, + temperature: f64, + unit: String, + } + + struct WeatherView { + input: Option, + result: Option, + + // Fake API call + current_weather: WeatherResult, + } + + #[derive(Clone, Serialize)] + struct WeatherTool { + current_weather: WeatherResult, + } + + impl WeatherView { + fn new(current_weather: WeatherResult) -> Self { + Self { + input: None, + result: None, + current_weather, + } + } + } + + impl Render for WeatherView { + fn render(&mut self, _cx: &mut gpui::ViewContext) -> impl IntoElement { + match self.result { + Some(ref result) => div() + .child(format!("temperature: {}", result.temperature)) + .into_any_element(), + None => div().child("Calculating weather...").into_any_element(), + } + } + } + + impl ToolView for WeatherView { + type Input = WeatherQuery; + + type SerializedState = WeatherResult; + + fn generate(&self, _output: &mut ProjectContext, _cx: &mut ViewContext) -> String { + serde_json::to_string(&self.result).unwrap() + } + + fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext) { + self.input = Some(input); + cx.notify(); + } + + fn execute(&mut self, _cx: &mut ViewContext) -> Task> { + let input = self.input.as_ref().unwrap(); + + let _location = input.location.clone(); + let _unit = input.unit.clone(); + + let weather = self.current_weather.clone(); + + self.result = Some(weather); + + Task::ready(Ok(())) + } + + fn serialize(&self, _cx: &mut ViewContext) -> Self::SerializedState { + self.current_weather.clone() + } + + fn deserialize( + &mut self, + output: Self::SerializedState, + _cx: &mut ViewContext, + ) -> Result<()> { + self.current_weather = output; + Ok(()) + } + } + + impl LanguageModelTool for WeatherTool { + type View = WeatherView; + + fn name(&self) -> String { + "get_current_weather".to_string() + } + + fn description(&self) -> String { + "Fetches the current weather for a given location.".to_string() + } + + fn view(&self, cx: &mut WindowContext) -> View { + cx.new_view(|_cx| WeatherView::new(self.current_weather.clone())) + } + } + + #[gpui::test] + async fn test_openai_weather_example(cx: &mut TestAppContext) { + let (_, cx) = cx.add_window_view(|_cx| EmptyView); + + let mut registry = ToolRegistry::new(); + registry + .register(WeatherTool { + current_weather: WeatherResult { + location: "San Francisco".to_string(), + temperature: 21.0, + unit: "Celsius".to_string(), + }, + }) + .unwrap(); + + let definitions = registry.definitions(); + assert_eq!( + definitions, + [ToolFunctionDefinition { + name: "get_current_weather".to_string(), + description: "Fetches the current weather for a given location.".to_string(), + parameters: serde_json::from_value(json!({ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "WeatherQuery", + "type": "object", + "properties": { + "location": { + "type": "string" + }, + "unit": { + "type": "string" + } + }, + "required": ["location", "unit"] + })) + .unwrap(), + }] + ); + + let mut call = ToolFunctionCall { + id: "the-id".to_string(), + name: "get_cur".to_string(), + ..Default::default() + }; + + let task = cx.update(|cx| { + registry.update_tool_call( + &mut call, + Some("rent_weather"), + Some(r#"{"location": "San Francisco","#), + cx, + ); + registry.update_tool_call(&mut call, None, Some(r#" "unit": "Celsius"}"#), cx); + registry.execute_tool_call(&mut call, cx).unwrap() + }); + task.await.unwrap(); + + match &call.state { + ToolFunctionCallState::ExecutedTool(_view) => {} + _ => panic!(), + } + } +} diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml new file mode 100644 index 0000000..bfe22de --- /dev/null +++ b/crates/audio/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "audio" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/audio.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +collections.workspace = true +derive_more.workspace = true +gpui.workspace = true +parking_lot.workspace = true +rodio = { version = "0.17.1", default-features = false, features = ["wav"] } +util.workspace = true diff --git a/crates/audio/LICENSE-GPL b/crates/audio/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/audio/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/audio/src/assets.rs b/crates/audio/src/assets.rs new file mode 100644 index 0000000..387990c --- /dev/null +++ b/crates/audio/src/assets.rs @@ -0,0 +1,52 @@ +use std::{io::Cursor, sync::Arc}; + +use anyhow::Result; +use collections::HashMap; +use gpui::{AppContext, AssetSource, Global}; +use rodio::{ + source::{Buffered, SamplesConverter}, + Decoder, Source, +}; + +type Sound = Buffered>>, f32>>; + +pub struct SoundRegistry { + cache: Arc>>, + assets: Box, +} + +struct GlobalSoundRegistry(Arc); + +impl Global for GlobalSoundRegistry {} + +impl SoundRegistry { + pub fn new(source: impl AssetSource) -> Arc { + Arc::new(Self { + cache: Default::default(), + assets: Box::new(source), + }) + } + + pub fn global(cx: &AppContext) -> Arc { + cx.global::().0.clone() + } + + pub(crate) fn set_global(source: impl AssetSource, cx: &mut AppContext) { + cx.set_global(GlobalSoundRegistry(SoundRegistry::new(source))); + } + + pub fn get(&self, name: &str) -> Result> { + if let Some(wav) = self.cache.lock().get(name) { + return Ok(wav.clone()); + } + + let path = format!("sounds/{}.wav", name); + let bytes = self.assets.load(&path)?.into_owned(); + let cursor = Cursor::new(bytes); + let source = Decoder::new(cursor)?.convert_samples::().buffered(); + + self.cache.lock().insert(name.to_string(), source.clone()); + + Ok(source) + } +} diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs new file mode 100644 index 0000000..0389bd6 --- /dev/null +++ b/crates/audio/src/audio.rs @@ -0,0 +1,87 @@ +use assets::SoundRegistry; +use derive_more::{Deref, DerefMut}; +use gpui::{AppContext, AssetSource, BorrowAppContext, Global}; +use rodio::{OutputStream, OutputStreamHandle}; +use util::ResultExt; + +mod assets; + +pub fn init(source: impl AssetSource, cx: &mut AppContext) { + SoundRegistry::set_global(source, cx); + cx.set_global(GlobalAudio(Audio::new())); +} + +pub enum Sound { + Joined, + Leave, + Mute, + Unmute, + StartScreenshare, + StopScreenshare, +} + +impl Sound { + fn file(&self) -> &'static str { + match self { + Self::Joined => "joined_call", + Self::Leave => "leave_call", + Self::Mute => "mute", + Self::Unmute => "unmute", + Self::StartScreenshare => "start_screenshare", + Self::StopScreenshare => "stop_screenshare", + } + } +} + +pub struct Audio { + _output_stream: Option, + output_handle: Option, +} + +#[derive(Deref, DerefMut)] +struct GlobalAudio(Audio); + +impl Global for GlobalAudio {} + +impl Audio { + pub fn new() -> Self { + Self { + _output_stream: None, + output_handle: None, + } + } + + fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> { + if self.output_handle.is_none() { + let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip(); + self.output_handle = output_handle; + self._output_stream = _output_stream; + } + + self.output_handle.as_ref() + } + + pub fn play_sound(sound: Sound, cx: &mut AppContext) { + if !cx.has_global::() { + return; + } + + cx.update_global::(|this, cx| { + let output_handle = this.ensure_output_exists()?; + let source = SoundRegistry::global(cx).get(sound.file()).log_err()?; + output_handle.play_raw(source).log_err()?; + Some(()) + }); + } + + pub fn end_call(cx: &mut AppContext) { + if !cx.has_global::() { + return; + } + + cx.update_global::(|this, _| { + this._output_stream.take(); + this.output_handle.take(); + }); + } +} diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml new file mode 100644 index 0000000..4e5c64b --- /dev/null +++ b/crates/auto_update/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "auto_update" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/auto_update.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +client.workspace = true +db.workspace = true +editor.workspace = true +gpui.workspace = true +http.workspace = true +isahc.workspace = true +log.workspace = true +markdown_preview.workspace = true +menu.workspace = true +release_channel.workspace = true +schemars.workspace = true +serde.workspace = true +serde_derive.workspace = true +serde_json.workspace = true +settings.workspace = true +smol.workspace = true +tempfile.workspace = true +util.workspace = true +workspace.workspace = true diff --git a/crates/auto_update/LICENSE-GPL b/crates/auto_update/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/auto_update/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs new file mode 100644 index 0000000..e5b314d --- /dev/null +++ b/crates/auto_update/src/auto_update.rs @@ -0,0 +1,608 @@ +mod update_notification; + +use anyhow::{anyhow, Context, Result}; +use client::{Client, TelemetrySettings, ZED_APP_PATH}; +use db::kvp::KEY_VALUE_STORE; +use db::RELEASE_CHANNEL; +use editor::{Editor, MultiBuffer}; +use gpui::{ + actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext, + SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext, +}; +use isahc::AsyncBody; + +use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; +use schemars::JsonSchema; +use serde::Deserialize; +use serde_derive::Serialize; +use smol::{fs, io::AsyncReadExt}; + +use settings::{Settings, SettingsSources, SettingsStore}; +use smol::{fs::File, process::Command}; + +use http::{HttpClient, HttpClientWithUrl}; +use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; +use std::{ + env::consts::{ARCH, OS}, + ffi::OsString, + path::PathBuf, + sync::Arc, + time::Duration, +}; +use update_notification::UpdateNotification; +use util::ResultExt; +use workspace::notifications::NotificationId; +use workspace::Workspace; + +const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification"; +const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60); + +actions!( + auto_update, + [ + Check, + DismissErrorMessage, + ViewReleaseNotes, + ViewReleaseNotesLocally + ] +); + +#[derive(Serialize)] +struct UpdateRequestBody { + installation_id: Option>, + release_channel: Option<&'static str>, + telemetry: bool, +} + +#[derive(Clone, PartialEq, Eq)] +pub enum AutoUpdateStatus { + Idle, + Checking, + Downloading, + Installing, + Updated { binary_path: PathBuf }, + Errored, +} + +impl AutoUpdateStatus { + pub fn is_updated(&self) -> bool { + matches!(self, Self::Updated { .. }) + } +} + +pub struct AutoUpdater { + status: AutoUpdateStatus, + current_version: SemanticVersion, + http_client: Arc, + pending_poll: Option>>, +} + +#[derive(Deserialize)] +struct JsonRelease { + version: String, + url: String, +} + +struct AutoUpdateSetting(bool); + +/// Whether or not to automatically check for updates. +/// +/// Default: true +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] +#[serde(transparent)] +struct AutoUpdateSettingContent(bool); + +impl Settings for AutoUpdateSetting { + const KEY: Option<&'static str> = Some("auto_update"); + + type FileContent = Option; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + let auto_update = [sources.release_channel, sources.user] + .into_iter() + .find_map(|value| value.copied().flatten()) + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); + + Ok(Self(auto_update.0)) + } +} + +#[derive(Default)] +struct GlobalAutoUpdate(Option>); + +impl Global for GlobalAutoUpdate {} + +#[derive(Deserialize)] +struct ReleaseNotesBody { + title: String, + release_notes: String, +} + +pub fn init(http_client: Arc, cx: &mut AppContext) { + AutoUpdateSetting::register(cx); + + cx.observe_new_views(|workspace: &mut Workspace, _cx| { + workspace.register_action(|_, action: &Check, cx| check(action, cx)); + + workspace.register_action(|_, action, cx| { + view_release_notes(action, cx); + }); + + workspace.register_action(|workspace, _: &ViewReleaseNotesLocally, cx| { + view_release_notes_locally(workspace, cx); + }); + }) + .detach(); + + let version = release_channel::AppVersion::global(cx); + let auto_updater = cx.new_model(|cx| { + let updater = AutoUpdater::new(version, http_client); + + let mut update_subscription = AutoUpdateSetting::get_global(cx) + .0 + .then(|| updater.start_polling(cx)); + + cx.observe_global::(move |updater, cx| { + if AutoUpdateSetting::get_global(cx).0 { + if update_subscription.is_none() { + update_subscription = Some(updater.start_polling(cx)) + } + } else { + update_subscription.take(); + } + }) + .detach(); + + updater + }); + cx.set_global(GlobalAutoUpdate(Some(auto_updater))); +} + +pub fn check(_: &Check, cx: &mut WindowContext) { + if let Some(updater) = AutoUpdater::get(cx) { + updater.update(cx, |updater, cx| updater.poll(cx)); + } else { + drop(cx.prompt( + gpui::PromptLevel::Info, + "Could not check for updates", + Some("Auto-updates disabled for non-bundled app."), + &["Ok"], + )); + } +} + +pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<()> { + let auto_updater = AutoUpdater::get(cx)?; + let release_channel = ReleaseChannel::try_global(cx)?; + + if matches!( + release_channel, + ReleaseChannel::Stable | ReleaseChannel::Preview + ) { + let auto_updater = auto_updater.read(cx); + let release_channel = release_channel.dev_name(); + let current_version = auto_updater.current_version; + let url = &auto_updater + .http_client + .build_url(&format!("/releases/{release_channel}/{current_version}")); + cx.open_url(&url); + } + + None +} + +fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext) { + let release_channel = ReleaseChannel::global(cx); + let version = AppVersion::global(cx).to_string(); + + let client = client::Client::global(cx).http_client(); + let url = client.build_url(&format!( + "/api/release_notes/{}/{}", + release_channel.dev_name(), + version + )); + + let markdown = workspace + .app_state() + .languages + .language_for_name("Markdown"); + + workspace + .with_local_workspace(cx, move |_, cx| { + cx.spawn(|workspace, mut cx| async move { + let markdown = markdown.await.log_err(); + let response = client.get(&url, Default::default(), true).await; + let Some(mut response) = response.log_err() else { + return; + }; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await.ok(); + + let body: serde_json::Result = + serde_json::from_slice(body.as_slice()); + + if let Ok(body) = body { + workspace + .update(&mut cx, |workspace, cx| { + let project = workspace.project().clone(); + let buffer = project.update(cx, |project, cx| { + project.create_local_buffer("", markdown, cx) + }); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, body.release_notes)], None, cx) + }); + let language_registry = project.read(cx).languages().clone(); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + + let tab_description = SharedString::from(body.title.to_string()); + let editor = cx + .new_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx)); + let workspace_handle = workspace.weak_handle(); + let view: View = MarkdownPreviewView::new( + MarkdownPreviewMode::Default, + editor, + workspace_handle, + language_registry, + Some(tab_description), + cx, + ); + workspace.add_item_to_active_pane(Box::new(view.clone()), None, cx); + cx.notify(); + }) + .log_err(); + } + }) + .detach(); + }) + .detach(); +} + +pub fn notify_of_any_new_update(cx: &mut ViewContext) -> Option<()> { + let updater = AutoUpdater::get(cx)?; + let version = updater.read(cx).current_version; + let should_show_notification = updater.read(cx).should_show_update_notification(cx); + + cx.spawn(|workspace, mut cx| async move { + let should_show_notification = should_show_notification.await?; + if should_show_notification { + workspace.update(&mut cx, |workspace, cx| { + workspace.show_notification( + NotificationId::unique::(), + cx, + |cx| cx.new_view(|_| UpdateNotification::new(version)), + ); + updater + .read(cx) + .set_should_show_update_notification(false, cx) + .detach_and_log_err(cx); + })?; + } + anyhow::Ok(()) + }) + .detach(); + + None +} + +impl AutoUpdater { + pub fn get(cx: &mut AppContext) -> Option> { + cx.default_global::().0.clone() + } + + fn new(current_version: SemanticVersion, http_client: Arc) -> Self { + Self { + status: AutoUpdateStatus::Idle, + current_version, + http_client, + pending_poll: None, + } + } + + pub fn start_polling(&self, cx: &mut ModelContext) -> Task> { + cx.spawn(|this, mut cx| async move { + loop { + this.update(&mut cx, |this, cx| this.poll(cx))?; + cx.background_executor().timer(POLL_INTERVAL).await; + } + }) + } + + pub fn poll(&mut self, cx: &mut ModelContext) { + if self.pending_poll.is_some() || self.status.is_updated() { + return; + } + + self.status = AutoUpdateStatus::Checking; + cx.notify(); + + self.pending_poll = Some(cx.spawn(|this, mut cx| async move { + let result = Self::update(this.upgrade()?, cx.clone()).await; + this.update(&mut cx, |this, cx| { + this.pending_poll = None; + if let Err(error) = result { + log::error!("auto-update failed: error:{:?}", error); + this.status = AutoUpdateStatus::Errored; + cx.notify(); + } + }) + .ok() + })); + } + + pub fn status(&self) -> AutoUpdateStatus { + self.status.clone() + } + + pub fn dismiss_error(&mut self, cx: &mut ModelContext) { + self.status = AutoUpdateStatus::Idle; + cx.notify(); + } + + async fn update(this: Model, mut cx: AsyncAppContext) -> Result<()> { + let (client, current_version) = this.read_with(&cx, |this, _| { + (this.http_client.clone(), this.current_version) + })?; + + let asset = match OS { + "linux" => format!("zed-linux-{}.tar.gz", ARCH), + "macos" => "Zed.dmg".into(), + _ => return Err(anyhow!("auto-update not supported for OS {:?}", OS)), + }; + + let mut url_string = client.build_url(&format!( + "/api/releases/latest?asset={}&os={}&arch={}", + asset, OS, ARCH + )); + cx.update(|cx| { + if let Some(param) = ReleaseChannel::try_global(cx) + .and_then(|release_channel| release_channel.release_query_param()) + { + url_string += "&"; + url_string += param; + } + })?; + + let mut response = client.get(&url_string, Default::default(), true).await?; + + let mut body = Vec::new(); + response + .body_mut() + .read_to_end(&mut body) + .await + .context("error reading release")?; + + let release: JsonRelease = + serde_json::from_slice(body.as_slice()).context("error deserializing release")?; + + let should_download = match *RELEASE_CHANNEL { + ReleaseChannel::Nightly => cx + .update(|cx| AppCommitSha::try_global(cx).map(|sha| release.version != sha.0)) + .ok() + .flatten() + .unwrap_or(true), + _ => release.version.parse::()? > current_version, + }; + + if !should_download { + this.update(&mut cx, |this, cx| { + this.status = AutoUpdateStatus::Idle; + cx.notify(); + })?; + return Ok(()); + } + + this.update(&mut cx, |this, cx| { + this.status = AutoUpdateStatus::Downloading; + cx.notify(); + })?; + + let temp_dir = tempfile::Builder::new() + .prefix("zed-auto-update") + .tempdir()?; + let downloaded_asset = download_release(&temp_dir, release, &asset, client, &cx).await?; + + this.update(&mut cx, |this, cx| { + this.status = AutoUpdateStatus::Installing; + cx.notify(); + })?; + + // We store the path of our current binary, before we install, since installation might + // delete it. Once deleted, it's hard to get the path to our binary on Linux. + // So we cache it here, which allows us to then restart later on. + let binary_path = cx.update(|cx| cx.app_path())??; + + match OS { + "macos" => install_release_macos(&temp_dir, downloaded_asset, &cx).await, + "linux" => install_release_linux(&temp_dir, downloaded_asset, &cx).await, + _ => Err(anyhow!("not supported: {:?}", OS)), + }?; + + this.update(&mut cx, |this, cx| { + this.set_should_show_update_notification(true, cx) + .detach_and_log_err(cx); + this.status = AutoUpdateStatus::Updated { binary_path }; + cx.notify(); + })?; + + Ok(()) + } + + fn set_should_show_update_notification( + &self, + should_show: bool, + cx: &AppContext, + ) -> Task> { + cx.background_executor().spawn(async move { + if should_show { + KEY_VALUE_STORE + .write_kvp( + SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(), + "".to_string(), + ) + .await?; + } else { + KEY_VALUE_STORE + .delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string()) + .await?; + } + Ok(()) + }) + } + + fn should_show_update_notification(&self, cx: &AppContext) -> Task> { + cx.background_executor().spawn(async move { + Ok(KEY_VALUE_STORE + .read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)? + .is_some()) + }) + } +} + +async fn download_release( + temp_dir: &tempfile::TempDir, + release: JsonRelease, + target_filename: &str, + client: Arc, + cx: &AsyncAppContext, +) -> Result { + let target_path = temp_dir.path().join(target_filename); + let mut target_file = File::create(&target_path).await?; + + let (installation_id, release_channel, telemetry) = cx.update(|cx| { + let installation_id = Client::global(cx).telemetry().installation_id(); + let release_channel = + ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name()); + let telemetry = TelemetrySettings::get_global(cx).metrics; + + (installation_id, release_channel, telemetry) + })?; + + let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody { + installation_id, + release_channel, + telemetry, + })?); + + let mut response = client.get(&release.url, request_body, true).await?; + smol::io::copy(response.body_mut(), &mut target_file).await?; + log::info!("downloaded update. path:{:?}", target_path); + + Ok(target_path) +} + +async fn install_release_linux( + temp_dir: &tempfile::TempDir, + downloaded_tar_gz: PathBuf, + cx: &AsyncAppContext, +) -> Result<()> { + let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?; + let home_dir = PathBuf::from(std::env::var("HOME").context("no HOME env var set")?); + + let extracted = temp_dir.path().join("zed"); + fs::create_dir_all(&extracted) + .await + .context("failed to create directory into which to extract update")?; + + let output = Command::new("tar") + .arg("-xzf") + .arg(&downloaded_tar_gz) + .arg("-C") + .arg(&extracted) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "failed to extract {:?} to {:?}: {:?}", + downloaded_tar_gz, + extracted, + String::from_utf8_lossy(&output.stderr) + ); + + let suffix = if channel != "stable" { + format!("-{}", channel) + } else { + String::default() + }; + let app_folder_name = format!("zed{}.app", suffix); + + let from = extracted.join(&app_folder_name); + let to = home_dir.join(".local"); + + let output = Command::new("rsync") + .args(&["-av", "--delete"]) + .arg(&from) + .arg(&to) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "failed to copy Zed update from {:?} to {:?}: {:?}", + from, + to, + String::from_utf8_lossy(&output.stderr) + ); + + Ok(()) +} + +async fn install_release_macos( + temp_dir: &tempfile::TempDir, + downloaded_dmg: PathBuf, + cx: &AsyncAppContext, +) -> Result<()> { + let running_app_path = ZED_APP_PATH + .clone() + .map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?; + let running_app_filename = running_app_path + .file_name() + .ok_or_else(|| anyhow!("invalid running app path"))?; + + let mount_path = temp_dir.path().join("Zed"); + let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into(); + + mounted_app_path.push("/"); + let output = Command::new("hdiutil") + .args(&["attach", "-nobrowse"]) + .arg(&downloaded_dmg) + .arg("-mountroot") + .arg(&temp_dir.path()) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "failed to mount: {:?}", + String::from_utf8_lossy(&output.stderr) + ); + + let output = Command::new("rsync") + .args(&["-av", "--delete"]) + .arg(&mounted_app_path) + .arg(&running_app_path) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "failed to copy app: {:?}", + String::from_utf8_lossy(&output.stderr) + ); + + let output = Command::new("hdiutil") + .args(&["detach"]) + .arg(&mount_path) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "failed to unount: {:?}", + String::from_utf8_lossy(&output.stderr) + ); + + Ok(()) +} diff --git a/crates/auto_update/src/update_notification.rs b/crates/auto_update/src/update_notification.rs new file mode 100644 index 0000000..66028c2 --- /dev/null +++ b/crates/auto_update/src/update_notification.rs @@ -0,0 +1,59 @@ +use gpui::{ + div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render, + SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, +}; +use menu::Cancel; +use release_channel::ReleaseChannel; +use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}; + +pub struct UpdateNotification { + version: SemanticVersion, +} + +impl EventEmitter for UpdateNotification {} + +impl Render for UpdateNotification { + fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { + let app_name = ReleaseChannel::global(cx).display_name(); + + v_flex() + .on_action(cx.listener(UpdateNotification::dismiss)) + .elevation_3(cx) + .p_4() + .child( + h_flex() + .justify_between() + .child(Label::new(format!( + "Updated to {app_name} {}", + self.version + ))) + .child( + div() + .id("cancel") + .child(Icon::new(IconName::Close)) + .cursor_pointer() + .on_click(cx.listener(|this, _, cx| this.dismiss(&menu::Cancel, cx))), + ), + ) + .child( + div() + .id("notes") + .child(Label::new("View the release notes")) + .cursor_pointer() + .on_click(cx.listener(|this, _, cx| { + crate::view_release_notes(&Default::default(), cx); + this.dismiss(&menu::Cancel, cx) + })), + ) + } +} + +impl UpdateNotification { + pub fn new(version: SemanticVersion) -> Self { + Self { version } + } + + pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext) { + cx.emit(DismissEvent); + } +} diff --git a/crates/breadcrumbs/Cargo.toml b/crates/breadcrumbs/Cargo.toml new file mode 100644 index 0000000..45d0f09 --- /dev/null +++ b/crates/breadcrumbs/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "breadcrumbs" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/breadcrumbs.rs" +doctest = false + +[dependencies] +editor.workspace = true +gpui.workspace = true +itertools.workspace = true +outline.workspace = true +theme.workspace = true +ui.workspace = true +workspace.workspace = true + +[dev-dependencies] +editor = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/breadcrumbs/LICENSE-GPL b/crates/breadcrumbs/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/breadcrumbs/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs new file mode 100644 index 0000000..d70b1cb --- /dev/null +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -0,0 +1,138 @@ +use editor::Editor; +use gpui::{ + Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription, + ViewContext, +}; +use itertools::Itertools; +use std::cmp; +use theme::ActiveTheme; +use ui::{prelude::*, ButtonLike, ButtonStyle, Label, Tooltip}; +use workspace::{ + item::{BreadcrumbText, ItemEvent, ItemHandle}, + ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, +}; + +pub struct Breadcrumbs { + pane_focused: bool, + active_item: Option>, + subscription: Option, +} + +impl Breadcrumbs { + pub fn new() -> Self { + Self { + pane_focused: false, + active_item: Default::default(), + subscription: Default::default(), + } + } +} + +impl EventEmitter for Breadcrumbs {} + +impl Render for Breadcrumbs { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + const MAX_SEGMENTS: usize = 12; + let element = h_flex().text_ui(cx); + let Some(active_item) = self.active_item.as_ref() else { + return element; + }; + let Some(mut segments) = active_item.breadcrumbs(cx.theme(), cx) else { + return element; + }; + + let prefix_end_ix = cmp::min(segments.len(), MAX_SEGMENTS / 2); + let suffix_start_ix = cmp::max( + prefix_end_ix, + segments.len().saturating_sub(MAX_SEGMENTS / 2), + ); + if suffix_start_ix > prefix_end_ix { + segments.splice( + prefix_end_ix..suffix_start_ix, + Some(BreadcrumbText { + text: "⋯".into(), + highlights: None, + font: None, + }), + ); + } + + let highlighted_segments = segments.into_iter().map(|segment| { + let mut text_style = cx.text_style(); + if let Some(font) = segment.font { + text_style.font_family = font.family; + text_style.font_features = font.features; + text_style.font_style = font.style; + text_style.font_weight = font.weight; + } + text_style.color = Color::Muted.color(cx); + + StyledText::new(segment.text.replace('\n', "␤")) + .with_highlights(&text_style, segment.highlights.unwrap_or_default()) + .into_any() + }); + let breadcrumbs = Itertools::intersperse_with(highlighted_segments, || { + Label::new("›").color(Color::Muted).into_any_element() + }); + + let breadcrumbs_stack = h_flex().gap_1().children(breadcrumbs); + match active_item + .downcast::() + .map(|editor| editor.downgrade()) + { + Some(editor) => element.child( + ButtonLike::new("toggle outline view") + .child(breadcrumbs_stack) + .style(ButtonStyle::Subtle) + .on_click(move |_, cx| { + if let Some(editor) = editor.upgrade() { + outline::toggle(editor, &outline::Toggle, cx) + } + }) + .tooltip(|cx| Tooltip::for_action("Show symbol outline", &outline::Toggle, cx)), + ), + None => element + // Match the height of the `ButtonLike` in the other arm. + .h(rems_from_px(22.)) + .child(breadcrumbs_stack), + } + } +} + +impl ToolbarItemView for Breadcrumbs { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + cx: &mut ViewContext, + ) -> ToolbarItemLocation { + cx.notify(); + self.active_item = None; + if let Some(item) = active_pane_item { + let this = cx.view().downgrade(); + self.subscription = Some(item.subscribe_to_item_events( + cx, + Box::new(move |event, cx| { + if let ItemEvent::UpdateBreadcrumbs = event { + this.update(cx, |this, cx| { + cx.notify(); + if let Some(active_item) = this.active_item.as_ref() { + cx.emit(ToolbarItemEvent::ChangeLocation( + active_item.breadcrumb_location(cx), + )) + } + }) + .ok(); + } + }), + )); + self.active_item = Some(item.boxed_clone()); + item.breadcrumb_location(cx) + } else { + ToolbarItemLocation::Hidden + } + } + + fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext) { + self.pane_focused = pane_focused; + } +} diff --git a/crates/call/Cargo.toml b/crates/call/Cargo.toml new file mode 100644 index 0000000..0377c4f --- /dev/null +++ b/crates/call/Cargo.toml @@ -0,0 +1,53 @@ +[package] +name = "call" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/call.rs" +doctest = false + +[features] +test-support = [ + "client/test-support", + "collections/test-support", + "gpui/test-support", + "live_kit_client/test-support", + "project/test-support", + "util/test-support" +] + +[dependencies] +anyhow.workspace = true +audio.workspace = true +client.workspace = true +collections.workspace = true +fs.workspace = true +futures.workspace = true +gpui.workspace = true +language.workspace = true +live_kit_client.workspace = true +log.workspace = true +postage.workspace = true +project.workspace = true +schemars.workspace = true +serde.workspace = true +serde_derive.workspace = true +settings.workspace = true +util.workspace = true + +[dev-dependencies] +client = { workspace = true, features = ["test-support"] } +collections = { workspace = true, features = ["test-support"] } +fs = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +live_kit_client = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } diff --git a/crates/call/LICENSE-GPL b/crates/call/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/call/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/call/src/call.rs b/crates/call/src/call.rs new file mode 100644 index 0000000..66187e0 --- /dev/null +++ b/crates/call/src/call.rs @@ -0,0 +1,549 @@ +pub mod call_settings; +pub mod participant; +pub mod room; + +use anyhow::{anyhow, Result}; +use audio::Audio; +use call_settings::CallSettings; +use client::{proto, ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE}; +use collections::HashSet; +use futures::{channel::oneshot, future::Shared, Future, FutureExt}; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Subscription, + Task, WeakModel, +}; +use postage::watch; +use project::Project; +use room::Event; +use settings::Settings; +use std::sync::Arc; + +pub use participant::ParticipantLocation; +pub use room::Room; + +struct GlobalActiveCall(Model); + +impl Global for GlobalActiveCall {} + +pub fn init(client: Arc, user_store: Model, cx: &mut AppContext) { + CallSettings::register(cx); + + let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx)); + cx.set_global(GlobalActiveCall(active_call)); +} + +pub struct OneAtATime { + cancel: Option>, +} + +impl OneAtATime { + /// spawn a task in the given context. + /// if another task is spawned before that resolves, or if the OneAtATime itself is dropped, the first task will be cancelled and return Ok(None) + /// otherwise you'll see the result of the task. + fn spawn(&mut self, cx: &mut AppContext, f: F) -> Task>> + where + F: 'static + FnOnce(AsyncAppContext) -> Fut, + Fut: Future>, + R: 'static, + { + let (tx, rx) = oneshot::channel(); + self.cancel.replace(tx); + cx.spawn(|cx| async move { + futures::select_biased! { + _ = rx.fuse() => Ok(None), + result = f(cx).fuse() => result.map(Some), + } + }) + } + + fn running(&self) -> bool { + self.cancel + .as_ref() + .is_some_and(|cancel| !cancel.is_canceled()) + } +} + +#[derive(Clone)] +pub struct IncomingCall { + pub room_id: u64, + pub calling_user: Arc, + pub participants: Vec>, + pub initial_project: Option, +} + +/// Singleton global maintaining the user's participation in a room across workspaces. +pub struct ActiveCall { + room: Option<(Model, Vec)>, + pending_room_creation: Option, Arc>>>>, + location: Option>, + _join_debouncer: OneAtATime, + pending_invites: HashSet, + incoming_call: ( + watch::Sender>, + watch::Receiver>, + ), + client: Arc, + user_store: Model, + _subscriptions: Vec, +} + +impl EventEmitter for ActiveCall {} + +impl ActiveCall { + fn new(client: Arc, user_store: Model, cx: &mut ModelContext) -> Self { + Self { + room: None, + pending_room_creation: None, + location: None, + pending_invites: Default::default(), + incoming_call: watch::channel(), + _join_debouncer: OneAtATime { cancel: None }, + _subscriptions: vec![ + client.add_request_handler(cx.weak_model(), Self::handle_incoming_call), + client.add_message_handler(cx.weak_model(), Self::handle_call_canceled), + ], + client, + user_store, + } + } + + pub fn channel_id(&self, cx: &AppContext) -> Option { + self.room()?.read(cx).channel_id() + } + + async fn handle_incoming_call( + this: Model, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { + let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?; + let call = IncomingCall { + room_id: envelope.payload.room_id, + participants: user_store + .update(&mut cx, |user_store, cx| { + user_store.get_users(envelope.payload.participant_user_ids, cx) + })? + .await?, + calling_user: user_store + .update(&mut cx, |user_store, cx| { + user_store.get_user(envelope.payload.calling_user_id, cx) + })? + .await?, + initial_project: envelope.payload.initial_project, + }; + this.update(&mut cx, |this, _| { + *this.incoming_call.0.borrow_mut() = Some(call); + })?; + + Ok(proto::Ack {}) + } + + async fn handle_call_canceled( + this: Model, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, _| { + let mut incoming_call = this.incoming_call.0.borrow_mut(); + if incoming_call + .as_ref() + .map_or(false, |call| call.room_id == envelope.payload.room_id) + { + incoming_call.take(); + } + })?; + Ok(()) + } + + pub fn global(cx: &AppContext) -> Model { + cx.global::().0.clone() + } + + pub fn try_global(cx: &AppContext) -> Option> { + cx.try_global::() + .map(|call| call.0.clone()) + } + + pub fn invite( + &mut self, + called_user_id: u64, + initial_project: Option>, + cx: &mut ModelContext, + ) -> Task> { + if !self.pending_invites.insert(called_user_id) { + return Task::ready(Err(anyhow!("user was already invited"))); + } + cx.notify(); + + if self._join_debouncer.running() { + return Task::ready(Ok(())); + } + + let room = if let Some(room) = self.room().cloned() { + Some(Task::ready(Ok(room)).shared()) + } else { + self.pending_room_creation.clone() + }; + + let invite = if let Some(room) = room { + cx.spawn(move |_, mut cx| async move { + let room = room.await.map_err(|err| anyhow!("{:?}", err))?; + + let initial_project_id = if let Some(initial_project) = initial_project { + Some( + room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))? + .await?, + ) + } else { + None + }; + + room.update(&mut cx, move |room, cx| { + room.call(called_user_id, initial_project_id, cx) + })? + .await?; + + anyhow::Ok(()) + }) + } else { + let client = self.client.clone(); + let user_store = self.user_store.clone(); + let room = cx + .spawn(move |this, mut cx| async move { + let create_room = async { + let room = cx + .update(|cx| { + Room::create( + called_user_id, + initial_project, + client, + user_store, + cx, + ) + })? + .await?; + + this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))? + .await?; + + anyhow::Ok(room) + }; + + let room = create_room.await; + this.update(&mut cx, |this, _| this.pending_room_creation = None)?; + room.map_err(Arc::new) + }) + .shared(); + self.pending_room_creation = Some(room.clone()); + cx.background_executor().spawn(async move { + room.await.map_err(|err| anyhow!("{:?}", err))?; + anyhow::Ok(()) + }) + }; + + cx.spawn(move |this, mut cx| async move { + let result = invite.await; + if result.is_ok() { + this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?; + } else { + //TODO: report collaboration error + log::error!("invite failed: {:?}", result); + } + + this.update(&mut cx, |this, cx| { + this.pending_invites.remove(&called_user_id); + cx.notify(); + })?; + result + }) + } + + pub fn cancel_invite( + &mut self, + called_user_id: u64, + cx: &mut ModelContext, + ) -> Task> { + let room_id = if let Some(room) = self.room() { + room.read(cx).id() + } else { + return Task::ready(Err(anyhow!("no active call"))); + }; + + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::CancelCall { + room_id, + called_user_id, + }) + .await?; + anyhow::Ok(()) + }) + } + + pub fn incoming(&self) -> watch::Receiver> { + self.incoming_call.1.clone() + } + + pub fn accept_incoming(&mut self, cx: &mut ModelContext) -> Task> { + if self.room.is_some() { + return Task::ready(Err(anyhow!("cannot join while on another call"))); + } + + let call = if let Some(call) = self.incoming_call.0.borrow_mut().take() { + call + } else { + return Task::ready(Err(anyhow!("no incoming call"))); + }; + + if self.pending_room_creation.is_some() { + return Task::ready(Ok(())); + } + + let room_id = call.room_id; + let client = self.client.clone(); + let user_store = self.user_store.clone(); + let join = self + ._join_debouncer + .spawn(cx, move |cx| Room::join(room_id, client, user_store, cx)); + + cx.spawn(|this, mut cx| async move { + let room = join.await?; + this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))? + .await?; + this.update(&mut cx, |this, cx| { + this.report_call_event("accept incoming", cx) + })?; + Ok(()) + }) + } + + pub fn decline_incoming(&mut self, _: &mut ModelContext) -> Result<()> { + let call = self + .incoming_call + .0 + .borrow_mut() + .take() + .ok_or_else(|| anyhow!("no incoming call"))?; + report_call_event_for_room("decline incoming", call.room_id, None, &self.client); + self.client.send(proto::DeclineCall { + room_id: call.room_id, + })?; + Ok(()) + } + + pub fn join_channel( + &mut self, + channel_id: ChannelId, + cx: &mut ModelContext, + ) -> Task>>> { + if let Some(room) = self.room().cloned() { + if room.read(cx).channel_id() == Some(channel_id) { + return Task::ready(Ok(Some(room))); + } else { + room.update(cx, |room, cx| room.clear_state(cx)); + } + } + + if self.pending_room_creation.is_some() { + return Task::ready(Ok(None)); + } + + let client = self.client.clone(); + let user_store = self.user_store.clone(); + let join = self._join_debouncer.spawn(cx, move |cx| async move { + Room::join_channel(channel_id, client, user_store, cx).await + }); + + cx.spawn(|this, mut cx| async move { + let room = join.await?; + this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))? + .await?; + this.update(&mut cx, |this, cx| { + this.report_call_event("join channel", cx) + })?; + Ok(room) + }) + } + + pub fn hang_up(&mut self, cx: &mut ModelContext) -> Task> { + cx.notify(); + self.report_call_event("hang up", cx); + + Audio::end_call(cx); + + let channel_id = self.channel_id(cx); + if let Some((room, _)) = self.room.take() { + cx.emit(Event::RoomLeft { channel_id }); + room.update(cx, |room, cx| room.leave(cx)) + } else { + Task::ready(Ok(())) + } + } + + pub fn share_project( + &mut self, + project: Model, + cx: &mut ModelContext, + ) -> Task> { + if let Some((room, _)) = self.room.as_ref() { + self.report_call_event("share project", cx); + room.update(cx, |room, cx| room.share_project(project, cx)) + } else { + Task::ready(Err(anyhow!("no active call"))) + } + } + + pub fn unshare_project( + &mut self, + project: Model, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some((room, _)) = self.room.as_ref() { + self.report_call_event("unshare project", cx); + room.update(cx, |room, cx| room.unshare_project(project, cx)) + } else { + Err(anyhow!("no active call")) + } + } + + pub fn location(&self) -> Option<&WeakModel> { + self.location.as_ref() + } + + pub fn set_location( + &mut self, + project: Option<&Model>, + cx: &mut ModelContext, + ) -> Task> { + if project.is_some() || !*ZED_ALWAYS_ACTIVE { + self.location = project.map(|project| project.downgrade()); + if let Some((room, _)) = self.room.as_ref() { + return room.update(cx, |room, cx| room.set_location(project, cx)); + } + } + Task::ready(Ok(())) + } + + fn set_room( + &mut self, + room: Option>, + cx: &mut ModelContext, + ) -> Task> { + if room.as_ref() == self.room.as_ref().map(|room| &room.0) { + Task::ready(Ok(())) + } else { + cx.notify(); + if let Some(room) = room { + if room.read(cx).status().is_offline() { + self.room = None; + Task::ready(Ok(())) + } else { + let subscriptions = vec![ + cx.observe(&room, |this, room, cx| { + if room.read(cx).status().is_offline() { + this.set_room(None, cx).detach_and_log_err(cx); + } + + cx.notify(); + }), + cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())), + ]; + self.room = Some((room.clone(), subscriptions)); + let location = self + .location + .as_ref() + .and_then(|location| location.upgrade()); + let channel_id = room.read(cx).channel_id(); + cx.emit(Event::RoomJoined { channel_id }); + room.update(cx, |room, cx| room.set_location(location.as_ref(), cx)) + } + } else { + self.room = None; + Task::ready(Ok(())) + } + } + } + + pub fn room(&self) -> Option<&Model> { + self.room.as_ref().map(|(room, _)| room) + } + + pub fn client(&self) -> Arc { + self.client.clone() + } + + pub fn pending_invites(&self) -> &HashSet { + &self.pending_invites + } + + pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) { + if let Some(room) = self.room() { + let room = room.read(cx); + report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client); + } + } +} + +pub fn report_call_event_for_room( + operation: &'static str, + room_id: u64, + channel_id: Option, + client: &Arc, +) { + let telemetry = client.telemetry(); + + telemetry.report_call_event(operation, Some(room_id), channel_id) +} + +pub fn report_call_event_for_channel( + operation: &'static str, + channel_id: ChannelId, + client: &Arc, + cx: &AppContext, +) { + let room = ActiveCall::global(cx).read(cx).room(); + + let telemetry = client.telemetry(); + + telemetry.report_call_event(operation, room.map(|r| r.read(cx).id()), Some(channel_id)) +} + +#[cfg(test)] +mod test { + use gpui::TestAppContext; + + use crate::OneAtATime; + + #[gpui::test] + async fn test_one_at_a_time(cx: &mut TestAppContext) { + let mut one_at_a_time = OneAtATime { cancel: None }; + + assert_eq!( + cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(1) })) + .await + .unwrap(), + Some(1) + ); + + let (a, b) = cx.update(|cx| { + ( + one_at_a_time.spawn(cx, |_| async { + assert!(false); + Ok(2) + }), + one_at_a_time.spawn(cx, |_| async { Ok(3) }), + ) + }); + + assert_eq!(a.await.unwrap(), None); + assert_eq!(b.await.unwrap(), Some(3)); + + let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) })); + drop(one_at_a_time); + + assert_eq!(promise.await.unwrap(), None); + } +} diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs new file mode 100644 index 0000000..446178f --- /dev/null +++ b/crates/call/src/call_settings.rs @@ -0,0 +1,35 @@ +use anyhow::Result; +use gpui::AppContext; +use schemars::JsonSchema; +use serde_derive::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug)] +pub struct CallSettings { + pub mute_on_join: bool, + pub share_on_join: bool, +} + +/// Configuration of voice calls in Zed. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct CallSettingsContent { + /// Whether the microphone should be muted when joining a channel or a call. + /// + /// Default: false + pub mute_on_join: Option, + + /// Whether your current project should be shared when joining an empty channel. + /// + /// Default: true + pub share_on_join: Option, +} + +impl Settings for CallSettings { + const KEY: Option<&'static str> = Some("calls"); + + type FileContent = CallSettingsContent; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + sources.json_merge() + } +} diff --git a/crates/call/src/participant.rs b/crates/call/src/participant.rs new file mode 100644 index 0000000..9faefc6 --- /dev/null +++ b/crates/call/src/participant.rs @@ -0,0 +1,54 @@ +use anyhow::{anyhow, Result}; +use client::ParticipantIndex; +use client::{proto, User}; +use collections::HashMap; +use gpui::WeakModel; +pub use live_kit_client::Frame; +pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack}; +use project::Project; +use std::sync::Arc; + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum ParticipantLocation { + SharedProject { project_id: u64 }, + UnsharedProject, + External, +} + +impl ParticipantLocation { + pub fn from_proto(location: Option) -> Result { + match location.and_then(|l| l.variant) { + Some(proto::participant_location::Variant::SharedProject(project)) => { + Ok(Self::SharedProject { + project_id: project.id, + }) + } + Some(proto::participant_location::Variant::UnsharedProject(_)) => { + Ok(Self::UnsharedProject) + } + Some(proto::participant_location::Variant::External(_)) => Ok(Self::External), + None => Err(anyhow!("participant location was not provided")), + } + } +} + +#[derive(Clone, Default)] +pub struct LocalParticipant { + pub projects: Vec, + pub active_project: Option>, + pub role: proto::ChannelRole, +} + +#[derive(Clone, Debug)] +pub struct RemoteParticipant { + pub user: Arc, + pub peer_id: proto::PeerId, + pub role: proto::ChannelRole, + pub projects: Vec, + pub location: ParticipantLocation, + pub participant_index: ParticipantIndex, + pub muted: bool, + pub speaking: bool, + pub video_tracks: HashMap>, + pub audio_tracks: HashMap>, +} diff --git a/crates/call/src/room.rs b/crates/call/src/room.rs new file mode 100644 index 0000000..61fb694 --- /dev/null +++ b/crates/call/src/room.rs @@ -0,0 +1,1725 @@ +use crate::{ + call_settings::CallSettings, + participant::{LocalParticipant, ParticipantLocation, RemoteParticipant}, +}; +use anyhow::{anyhow, Result}; +use audio::{Audio, Sound}; +use client::{ + proto::{self, PeerId}, + ChannelId, Client, ParticipantIndex, TypedEnvelope, User, UserStore, +}; +use collections::{BTreeMap, HashMap, HashSet}; +use fs::Fs; +use futures::{FutureExt, StreamExt}; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel, +}; +use language::LanguageRegistry; +use live_kit_client::{LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RoomUpdate}; +use postage::{sink::Sink, stream::Stream, watch}; +use project::Project; +use settings::Settings as _; +use std::{future::Future, mem, sync::Arc, time::Duration}; +use util::{post_inc, ResultExt, TryFutureExt}; + +pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Event { + RoomJoined { + channel_id: Option, + }, + ParticipantLocationChanged { + participant_id: proto::PeerId, + }, + RemoteVideoTracksChanged { + participant_id: proto::PeerId, + }, + RemoteAudioTracksChanged { + participant_id: proto::PeerId, + }, + RemoteProjectShared { + owner: Arc, + project_id: u64, + worktree_root_names: Vec, + }, + RemoteProjectUnshared { + project_id: u64, + }, + RemoteProjectJoined { + project_id: u64, + }, + RemoteProjectInvitationDiscarded { + project_id: u64, + }, + RoomLeft { + channel_id: Option, + }, +} + +pub struct Room { + id: u64, + channel_id: Option, + live_kit: Option, + status: RoomStatus, + shared_projects: HashSet>, + joined_projects: HashSet>, + local_participant: LocalParticipant, + remote_participants: BTreeMap, + pending_participants: Vec>, + participant_user_ids: HashSet, + pending_call_count: usize, + leave_when_empty: bool, + client: Arc, + user_store: Model, + follows_by_leader_id_project_id: HashMap<(PeerId, u64), Vec>, + client_subscriptions: Vec, + _subscriptions: Vec, + room_update_completed_tx: watch::Sender>, + room_update_completed_rx: watch::Receiver>, + pending_room_update: Option>, + maintain_connection: Option>>, +} + +impl EventEmitter for Room {} + +impl Room { + pub fn channel_id(&self) -> Option { + self.channel_id + } + + pub fn is_sharing_project(&self) -> bool { + !self.shared_projects.is_empty() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn is_connected(&self) -> bool { + if let Some(live_kit) = self.live_kit.as_ref() { + matches!( + *live_kit.room.status().borrow(), + live_kit_client::ConnectionState::Connected { .. } + ) + } else { + false + } + } + + fn new( + id: u64, + channel_id: Option, + live_kit_connection_info: Option, + client: Arc, + user_store: Model, + cx: &mut ModelContext, + ) -> Self { + let live_kit_room = if let Some(connection_info) = live_kit_connection_info { + let room = live_kit_client::Room::new(); + let mut status = room.status(); + // Consume the initial status of the room. + let _ = status.try_recv(); + let _maintain_room = cx.spawn(|this, mut cx| async move { + while let Some(status) = status.next().await { + let this = if let Some(this) = this.upgrade() { + this + } else { + break; + }; + + if status == live_kit_client::ConnectionState::Disconnected { + this.update(&mut cx, |this, cx| this.leave(cx).log_err()) + .ok(); + break; + } + } + }); + + let _handle_updates = cx.spawn({ + let room = room.clone(); + move |this, mut cx| async move { + let mut updates = room.updates(); + while let Some(update) = updates.next().await { + let this = if let Some(this) = this.upgrade() { + this + } else { + break; + }; + + this.update(&mut cx, |this, cx| { + this.live_kit_room_updated(update, cx).log_err() + }) + .ok(); + } + } + }); + + let connect = room.connect(&connection_info.server_url, &connection_info.token); + cx.spawn(|this, mut cx| async move { + connect.await?; + this.update(&mut cx, |this, cx| { + if this.can_use_microphone() { + if let Some(live_kit) = &this.live_kit { + if !live_kit.muted_by_user && !live_kit.deafened { + return this.share_microphone(cx); + } + } + } + Task::ready(Ok(())) + })? + .await + }) + .detach_and_log_err(cx); + + Some(LiveKitRoom { + room, + screen_track: LocalTrack::None, + microphone_track: LocalTrack::None, + next_publish_id: 0, + muted_by_user: Self::mute_on_join(cx), + deafened: false, + speaking: false, + _maintain_room, + _handle_updates, + }) + } else { + None + }; + + let maintain_connection = cx.spawn({ + let client = client.clone(); + move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err() + }); + + Audio::play_sound(Sound::Joined, cx); + + let (room_update_completed_tx, room_update_completed_rx) = watch::channel(); + + Self { + id, + channel_id, + live_kit: live_kit_room, + status: RoomStatus::Online, + shared_projects: Default::default(), + joined_projects: Default::default(), + participant_user_ids: Default::default(), + local_participant: Default::default(), + remote_participants: Default::default(), + pending_participants: Default::default(), + pending_call_count: 0, + client_subscriptions: vec![ + client.add_message_handler(cx.weak_model(), Self::handle_room_updated) + ], + _subscriptions: vec![ + cx.on_release(Self::released), + cx.on_app_quit(Self::app_will_quit), + ], + leave_when_empty: false, + pending_room_update: None, + client, + user_store, + follows_by_leader_id_project_id: Default::default(), + maintain_connection: Some(maintain_connection), + room_update_completed_tx, + room_update_completed_rx, + } + } + + pub(crate) fn create( + called_user_id: u64, + initial_project: Option>, + client: Arc, + user_store: Model, + cx: &mut AppContext, + ) -> Task>> { + cx.spawn(move |mut cx| async move { + let response = client.request(proto::CreateRoom {}).await?; + let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?; + let room = cx.new_model(|cx| { + let mut room = Self::new( + room_proto.id, + None, + response.live_kit_connection_info, + client, + user_store, + cx, + ); + if let Some(participant) = room_proto.participants.first() { + room.local_participant.role = participant.role() + } + room + })?; + + let initial_project_id = if let Some(initial_project) = initial_project { + let initial_project_id = room + .update(&mut cx, |room, cx| { + room.share_project(initial_project.clone(), cx) + })? + .await?; + Some(initial_project_id) + } else { + None + }; + + match room + .update(&mut cx, |room, cx| { + room.leave_when_empty = true; + room.call(called_user_id, initial_project_id, cx) + })? + .await + { + Ok(()) => Ok(room), + Err(error) => Err(anyhow!("room creation failed: {:?}", error)), + } + }) + } + + pub(crate) async fn join_channel( + channel_id: ChannelId, + client: Arc, + user_store: Model, + cx: AsyncAppContext, + ) -> Result> { + Self::from_join_response( + client + .request(proto::JoinChannel { + channel_id: channel_id.0, + }) + .await?, + client, + user_store, + cx, + ) + } + + pub(crate) async fn join( + room_id: u64, + client: Arc, + user_store: Model, + cx: AsyncAppContext, + ) -> Result> { + Self::from_join_response( + client.request(proto::JoinRoom { id: room_id }).await?, + client, + user_store, + cx, + ) + } + + fn released(&mut self, cx: &mut AppContext) { + if self.status.is_online() { + self.leave_internal(cx).detach_and_log_err(cx); + } + } + + fn app_will_quit(&mut self, cx: &mut ModelContext) -> impl Future { + let task = if self.status.is_online() { + let leave = self.leave_internal(cx); + Some(cx.background_executor().spawn(async move { + leave.await.log_err(); + })) + } else { + None + }; + + async move { + if let Some(task) = task { + task.await; + } + } + } + + pub fn mute_on_join(cx: &AppContext) -> bool { + CallSettings::get_global(cx).mute_on_join || client::IMPERSONATE_LOGIN.is_some() + } + + fn from_join_response( + response: proto::JoinRoomResponse, + client: Arc, + user_store: Model, + mut cx: AsyncAppContext, + ) -> Result> { + let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?; + let room = cx.new_model(|cx| { + Self::new( + room_proto.id, + response.channel_id.map(ChannelId), + response.live_kit_connection_info, + client, + user_store, + cx, + ) + })?; + room.update(&mut cx, |room, cx| { + room.leave_when_empty = room.channel_id.is_none(); + room.apply_room_update(room_proto, cx)?; + anyhow::Ok(()) + })??; + Ok(room) + } + + fn should_leave(&self) -> bool { + self.leave_when_empty + && self.pending_room_update.is_none() + && self.pending_participants.is_empty() + && self.remote_participants.is_empty() + && self.pending_call_count == 0 + } + + pub(crate) fn leave(&mut self, cx: &mut ModelContext) -> Task> { + cx.notify(); + self.leave_internal(cx) + } + + fn leave_internal(&mut self, cx: &mut AppContext) -> Task> { + if self.status.is_offline() { + return Task::ready(Err(anyhow!("room is offline"))); + } + + log::info!("leaving room"); + Audio::play_sound(Sound::Leave, cx); + + self.clear_state(cx); + + let leave_room = self.client.request(proto::LeaveRoom {}); + cx.background_executor().spawn(async move { + leave_room.await?; + anyhow::Ok(()) + }) + } + + pub(crate) fn clear_state(&mut self, cx: &mut AppContext) { + for project in self.shared_projects.drain() { + if let Some(project) = project.upgrade() { + project.update(cx, |project, cx| { + project.unshare(cx).log_err(); + }); + } + } + for project in self.joined_projects.drain() { + if let Some(project) = project.upgrade() { + project.update(cx, |project, cx| { + project.disconnected_from_host(cx); + project.close(cx); + }); + } + } + + self.status = RoomStatus::Offline; + self.remote_participants.clear(); + self.pending_participants.clear(); + self.participant_user_ids.clear(); + self.client_subscriptions.clear(); + self.live_kit.take(); + self.pending_room_update.take(); + self.maintain_connection.take(); + } + + async fn maintain_connection( + this: WeakModel, + client: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let mut client_status = client.status(); + loop { + let _ = client_status.try_recv(); + let is_connected = client_status.borrow().is_connected(); + // Even if we're initially connected, any future change of the status means we momentarily disconnected. + if !is_connected || client_status.next().await.is_some() { + log::info!("detected client disconnection"); + + this.upgrade() + .ok_or_else(|| anyhow!("room was dropped"))? + .update(&mut cx, |this, cx| { + this.status = RoomStatus::Rejoining; + cx.notify(); + })?; + + // Wait for client to re-establish a connection to the server. + { + let mut reconnection_timeout = + cx.background_executor().timer(RECONNECT_TIMEOUT).fuse(); + let client_reconnection = async { + let mut remaining_attempts = 3; + while remaining_attempts > 0 { + if client_status.borrow().is_connected() { + log::info!("client reconnected, attempting to rejoin room"); + + let Some(this) = this.upgrade() else { break }; + match this.update(&mut cx, |this, cx| this.rejoin(cx)) { + Ok(task) => { + if task.await.log_err().is_some() { + return true; + } else { + remaining_attempts -= 1; + } + } + Err(_app_dropped) => return false, + } + } else if client_status.borrow().is_signed_out() { + return false; + } + + log::info!( + "waiting for client status change, remaining attempts {}", + remaining_attempts + ); + client_status.next().await; + } + false + } + .fuse(); + futures::pin_mut!(client_reconnection); + + futures::select_biased! { + reconnected = client_reconnection => { + if reconnected { + log::info!("successfully reconnected to room"); + // If we successfully joined the room, go back around the loop + // waiting for future connection status changes. + continue; + } + } + _ = reconnection_timeout => { + log::info!("room reconnection timeout expired"); + } + } + } + + break; + } + } + + // The client failed to re-establish a connection to the server + // or an error occurred while trying to re-join the room. Either way + // we leave the room and return an error. + if let Some(this) = this.upgrade() { + log::info!("reconnection failed, leaving room"); + let _ = this.update(&mut cx, |this, cx| this.leave(cx))?; + } + Err(anyhow!( + "can't reconnect to room: client failed to re-establish connection" + )) + } + + fn rejoin(&mut self, cx: &mut ModelContext) -> Task> { + let mut projects = HashMap::default(); + let mut reshared_projects = Vec::new(); + let mut rejoined_projects = Vec::new(); + self.shared_projects.retain(|project| { + if let Some(handle) = project.upgrade() { + let project = handle.read(cx); + if let Some(project_id) = project.remote_id() { + projects.insert(project_id, handle.clone()); + reshared_projects.push(proto::UpdateProject { + project_id, + worktrees: project.worktree_metadata_protos(cx), + }); + return true; + } + } + false + }); + self.joined_projects.retain(|project| { + if let Some(handle) = project.upgrade() { + let project = handle.read(cx); + if let Some(project_id) = project.remote_id() { + projects.insert(project_id, handle.clone()); + rejoined_projects.push(proto::RejoinProject { + id: project_id, + worktrees: project + .worktrees() + .map(|worktree| { + let worktree = worktree.read(cx); + proto::RejoinWorktree { + id: worktree.id().to_proto(), + scan_id: worktree.completed_scan_id() as u64, + } + }) + .collect(), + }); + } + return true; + } + false + }); + + let response = self.client.request_envelope(proto::RejoinRoom { + id: self.id, + reshared_projects, + rejoined_projects, + }); + + cx.spawn(|this, mut cx| async move { + let response = response.await?; + let message_id = response.message_id; + let response = response.payload; + let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?; + this.update(&mut cx, |this, cx| { + this.status = RoomStatus::Online; + this.apply_room_update(room_proto, cx)?; + + for reshared_project in response.reshared_projects { + if let Some(project) = projects.get(&reshared_project.id) { + project.update(cx, |project, cx| { + project.reshared(reshared_project, cx).log_err(); + }); + } + } + + for rejoined_project in response.rejoined_projects { + if let Some(project) = projects.get(&rejoined_project.id) { + project.update(cx, |project, cx| { + project.rejoined(rejoined_project, message_id, cx).log_err(); + }); + } + } + + anyhow::Ok(()) + })? + }) + } + + pub fn id(&self) -> u64 { + self.id + } + + pub fn status(&self) -> RoomStatus { + self.status + } + + pub fn local_participant(&self) -> &LocalParticipant { + &self.local_participant + } + + pub fn remote_participants(&self) -> &BTreeMap { + &self.remote_participants + } + + pub fn remote_participant_for_peer_id(&self, peer_id: PeerId) -> Option<&RemoteParticipant> { + self.remote_participants + .values() + .find(|p| p.peer_id == peer_id) + } + + pub fn role_for_user(&self, user_id: u64) -> Option { + self.remote_participants + .get(&user_id) + .map(|participant| participant.role) + } + + pub fn contains_guests(&self) -> bool { + self.local_participant.role == proto::ChannelRole::Guest + || self + .remote_participants + .values() + .any(|p| p.role == proto::ChannelRole::Guest) + } + + pub fn local_participant_is_admin(&self) -> bool { + self.local_participant.role == proto::ChannelRole::Admin + } + + pub fn local_participant_is_guest(&self) -> bool { + self.local_participant.role == proto::ChannelRole::Guest + } + + pub fn set_participant_role( + &mut self, + user_id: u64, + role: proto::ChannelRole, + cx: &ModelContext, + ) -> Task> { + let client = self.client.clone(); + let room_id = self.id; + let role = role.into(); + cx.spawn(|_, _| async move { + client + .request(proto::SetRoomParticipantRole { + room_id, + user_id, + role, + }) + .await + .map(|_| ()) + }) + } + + pub fn pending_participants(&self) -> &[Arc] { + &self.pending_participants + } + + pub fn contains_participant(&self, user_id: u64) -> bool { + self.participant_user_ids.contains(&user_id) + } + + pub fn followers_for(&self, leader_id: PeerId, project_id: u64) -> &[PeerId] { + self.follows_by_leader_id_project_id + .get(&(leader_id, project_id)) + .map_or(&[], |v| v.as_slice()) + } + + /// Returns the most 'active' projects, defined as most people in the project + pub fn most_active_project(&self, cx: &AppContext) -> Option<(u64, u64)> { + let mut project_hosts_and_guest_counts = HashMap::, u32)>::default(); + for participant in self.remote_participants.values() { + match participant.location { + ParticipantLocation::SharedProject { project_id } => { + project_hosts_and_guest_counts + .entry(project_id) + .or_default() + .1 += 1; + } + ParticipantLocation::External | ParticipantLocation::UnsharedProject => {} + } + for project in &participant.projects { + project_hosts_and_guest_counts + .entry(project.id) + .or_default() + .0 = Some(participant.user.id); + } + } + + if let Some(user) = self.user_store.read(cx).current_user() { + for project in &self.local_participant.projects { + project_hosts_and_guest_counts + .entry(project.id) + .or_default() + .0 = Some(user.id); + } + } + + project_hosts_and_guest_counts + .into_iter() + .filter_map(|(id, (host, guest_count))| Some((id, host?, guest_count))) + .max_by_key(|(_, _, guest_count)| *guest_count) + .map(|(id, host, _)| (id, host)) + } + + async fn handle_room_updated( + this: Model, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let room = envelope + .payload + .room + .ok_or_else(|| anyhow!("invalid room"))?; + this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))? + } + + fn apply_room_update( + &mut self, + mut room: proto::Room, + cx: &mut ModelContext, + ) -> Result<()> { + // Filter ourselves out from the room's participants. + let local_participant_ix = room + .participants + .iter() + .position(|participant| Some(participant.user_id) == self.client.user_id()); + let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix)); + + let pending_participant_user_ids = room + .pending_participants + .iter() + .map(|p| p.user_id) + .collect::>(); + + let remote_participant_user_ids = room + .participants + .iter() + .map(|p| p.user_id) + .collect::>(); + + let (remote_participants, pending_participants) = + self.user_store.update(cx, move |user_store, cx| { + ( + user_store.get_users(remote_participant_user_ids, cx), + user_store.get_users(pending_participant_user_ids, cx), + ) + }); + + self.pending_room_update = Some(cx.spawn(|this, mut cx| async move { + let (remote_participants, pending_participants) = + futures::join!(remote_participants, pending_participants); + + this.update(&mut cx, |this, cx| { + this.participant_user_ids.clear(); + + if let Some(participant) = local_participant { + let role = participant.role(); + this.local_participant.projects = participant.projects; + if this.local_participant.role != role { + this.local_participant.role = role; + + if role == proto::ChannelRole::Guest { + for project in mem::take(&mut this.shared_projects) { + if let Some(project) = project.upgrade() { + this.unshare_project(project, cx).log_err(); + } + } + this.local_participant.projects.clear(); + if let Some(live_kit_room) = &mut this.live_kit { + live_kit_room.stop_publishing(cx); + } + } + + this.joined_projects.retain(|project| { + if let Some(project) = project.upgrade() { + project.update(cx, |project, cx| project.set_role(role, cx)); + true + } else { + false + } + }); + } + } else { + this.local_participant.projects.clear(); + } + + if let Some(participants) = remote_participants.log_err() { + for (participant, user) in room.participants.into_iter().zip(participants) { + let Some(peer_id) = participant.peer_id else { + continue; + }; + let participant_index = ParticipantIndex(participant.participant_index); + this.participant_user_ids.insert(participant.user_id); + + let old_projects = this + .remote_participants + .get(&participant.user_id) + .into_iter() + .flat_map(|existing| &existing.projects) + .map(|project| project.id) + .collect::>(); + let new_projects = participant + .projects + .iter() + .map(|project| project.id) + .collect::>(); + + for project in &participant.projects { + if !old_projects.contains(&project.id) { + cx.emit(Event::RemoteProjectShared { + owner: user.clone(), + project_id: project.id, + worktree_root_names: project.worktree_root_names.clone(), + }); + } + } + + for unshared_project_id in old_projects.difference(&new_projects) { + this.joined_projects.retain(|project| { + if let Some(project) = project.upgrade() { + project.update(cx, |project, cx| { + if project.remote_id() == Some(*unshared_project_id) { + project.disconnected_from_host(cx); + false + } else { + true + } + }) + } else { + false + } + }); + cx.emit(Event::RemoteProjectUnshared { + project_id: *unshared_project_id, + }); + } + + let role = participant.role(); + let location = ParticipantLocation::from_proto(participant.location) + .unwrap_or(ParticipantLocation::External); + if let Some(remote_participant) = + this.remote_participants.get_mut(&participant.user_id) + { + remote_participant.peer_id = peer_id; + remote_participant.projects = participant.projects; + remote_participant.participant_index = participant_index; + if location != remote_participant.location + || role != remote_participant.role + { + remote_participant.location = location; + remote_participant.role = role; + cx.emit(Event::ParticipantLocationChanged { + participant_id: peer_id, + }); + } + } else { + this.remote_participants.insert( + participant.user_id, + RemoteParticipant { + user: user.clone(), + participant_index, + peer_id, + projects: participant.projects, + location, + role, + muted: true, + speaking: false, + video_tracks: Default::default(), + audio_tracks: Default::default(), + }, + ); + + Audio::play_sound(Sound::Joined, cx); + + if let Some(live_kit) = this.live_kit.as_ref() { + let video_tracks = + live_kit.room.remote_video_tracks(&user.id.to_string()); + let audio_tracks = + live_kit.room.remote_audio_tracks(&user.id.to_string()); + let publications = live_kit + .room + .remote_audio_track_publications(&user.id.to_string()); + + for track in video_tracks { + this.live_kit_room_updated( + RoomUpdate::SubscribedToRemoteVideoTrack(track), + cx, + ) + .log_err(); + } + + for (track, publication) in + audio_tracks.iter().zip(publications.iter()) + { + this.live_kit_room_updated( + RoomUpdate::SubscribedToRemoteAudioTrack( + track.clone(), + publication.clone(), + ), + cx, + ) + .log_err(); + } + } + } + } + + this.remote_participants.retain(|user_id, participant| { + if this.participant_user_ids.contains(user_id) { + true + } else { + for project in &participant.projects { + cx.emit(Event::RemoteProjectUnshared { + project_id: project.id, + }); + } + false + } + }); + } + + if let Some(pending_participants) = pending_participants.log_err() { + this.pending_participants = pending_participants; + for participant in &this.pending_participants { + this.participant_user_ids.insert(participant.id); + } + } + + this.follows_by_leader_id_project_id.clear(); + for follower in room.followers { + let project_id = follower.project_id; + let (leader, follower) = match (follower.leader_id, follower.follower_id) { + (Some(leader), Some(follower)) => (leader, follower), + + _ => { + log::error!("Follower message {follower:?} missing some state"); + continue; + } + }; + + let list = this + .follows_by_leader_id_project_id + .entry((leader, project_id)) + .or_insert(Vec::new()); + if !list.contains(&follower) { + list.push(follower); + } + } + + this.pending_room_update.take(); + if this.should_leave() { + log::info!("room is empty, leaving"); + let _ = this.leave(cx); + } + + this.user_store.update(cx, |user_store, cx| { + let participant_indices_by_user_id = this + .remote_participants + .iter() + .map(|(user_id, participant)| (*user_id, participant.participant_index)) + .collect(); + user_store.set_participant_indices(participant_indices_by_user_id, cx); + }); + + this.check_invariants(); + this.room_update_completed_tx.try_send(Some(())).ok(); + cx.notify(); + }) + .ok(); + })); + + cx.notify(); + Ok(()) + } + + pub fn room_update_completed(&mut self) -> impl Future { + let mut done_rx = self.room_update_completed_rx.clone(); + async move { + while let Some(result) = done_rx.next().await { + if result.is_some() { + break; + } + } + } + } + + fn live_kit_room_updated( + &mut self, + update: RoomUpdate, + cx: &mut ModelContext, + ) -> Result<()> { + match update { + RoomUpdate::SubscribedToRemoteVideoTrack(track) => { + let user_id = track.publisher_id().parse()?; + let track_id = track.sid().to_string(); + let participant = self + .remote_participants + .get_mut(&user_id) + .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?; + participant.video_tracks.insert(track_id.clone(), track); + cx.emit(Event::RemoteVideoTracksChanged { + participant_id: participant.peer_id, + }); + } + + RoomUpdate::UnsubscribedFromRemoteVideoTrack { + publisher_id, + track_id, + } => { + let user_id = publisher_id.parse()?; + let participant = self + .remote_participants + .get_mut(&user_id) + .ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?; + participant.video_tracks.remove(&track_id); + cx.emit(Event::RemoteVideoTracksChanged { + participant_id: participant.peer_id, + }); + } + + RoomUpdate::ActiveSpeakersChanged { speakers } => { + let mut speaker_ids = speakers + .into_iter() + .filter_map(|speaker_sid| speaker_sid.parse().ok()) + .collect::>(); + speaker_ids.sort_unstable(); + for (sid, participant) in &mut self.remote_participants { + if let Ok(_) = speaker_ids.binary_search(sid) { + participant.speaking = true; + } else { + participant.speaking = false; + } + } + if let Some(id) = self.client.user_id() { + if let Some(room) = &mut self.live_kit { + if let Ok(_) = speaker_ids.binary_search(&id) { + room.speaking = true; + } else { + room.speaking = false; + } + } + } + } + + RoomUpdate::RemoteAudioTrackMuteChanged { track_id, muted } => { + let mut found = false; + for participant in &mut self.remote_participants.values_mut() { + for track in participant.audio_tracks.values() { + if track.sid() == track_id { + found = true; + break; + } + } + if found { + participant.muted = muted; + break; + } + } + } + + RoomUpdate::SubscribedToRemoteAudioTrack(track, publication) => { + if let Some(live_kit) = &self.live_kit { + if live_kit.deafened { + track.stop(); + cx.foreground_executor() + .spawn(publication.set_enabled(false)) + .detach(); + } + } + + let user_id = track.publisher_id().parse()?; + let track_id = track.sid().to_string(); + let participant = self + .remote_participants + .get_mut(&user_id) + .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?; + participant.audio_tracks.insert(track_id.clone(), track); + participant.muted = publication.is_muted(); + + cx.emit(Event::RemoteAudioTracksChanged { + participant_id: participant.peer_id, + }); + } + + RoomUpdate::UnsubscribedFromRemoteAudioTrack { + publisher_id, + track_id, + } => { + let user_id = publisher_id.parse()?; + let participant = self + .remote_participants + .get_mut(&user_id) + .ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?; + participant.audio_tracks.remove(&track_id); + cx.emit(Event::RemoteAudioTracksChanged { + participant_id: participant.peer_id, + }); + } + + RoomUpdate::LocalAudioTrackUnpublished { publication } => { + log::info!("unpublished audio track {}", publication.sid()); + if let Some(room) = &mut self.live_kit { + room.microphone_track = LocalTrack::None; + } + } + + RoomUpdate::LocalVideoTrackUnpublished { publication } => { + log::info!("unpublished video track {}", publication.sid()); + if let Some(room) = &mut self.live_kit { + room.screen_track = LocalTrack::None; + } + } + + RoomUpdate::LocalAudioTrackPublished { publication } => { + log::info!("published audio track {}", publication.sid()); + } + + RoomUpdate::LocalVideoTrackPublished { publication } => { + log::info!("published video track {}", publication.sid()); + } + } + + cx.notify(); + Ok(()) + } + + fn check_invariants(&self) { + #[cfg(any(test, feature = "test-support"))] + { + for participant in self.remote_participants.values() { + assert!(self.participant_user_ids.contains(&participant.user.id)); + assert_ne!(participant.user.id, self.client.user_id().unwrap()); + } + + for participant in &self.pending_participants { + assert!(self.participant_user_ids.contains(&participant.id)); + assert_ne!(participant.id, self.client.user_id().unwrap()); + } + + assert_eq!( + self.participant_user_ids.len(), + self.remote_participants.len() + self.pending_participants.len() + ); + } + } + + pub(crate) fn call( + &mut self, + called_user_id: u64, + initial_project_id: Option, + cx: &mut ModelContext, + ) -> Task> { + if self.status.is_offline() { + return Task::ready(Err(anyhow!("room is offline"))); + } + + cx.notify(); + let client = self.client.clone(); + let room_id = self.id; + self.pending_call_count += 1; + cx.spawn(move |this, mut cx| async move { + let result = client + .request(proto::Call { + room_id, + called_user_id, + initial_project_id, + }) + .await; + this.update(&mut cx, |this, cx| { + this.pending_call_count -= 1; + if this.should_leave() { + this.leave(cx).detach_and_log_err(cx); + } + })?; + result?; + Ok(()) + }) + } + + pub fn join_project( + &mut self, + id: u64, + language_registry: Arc, + fs: Arc, + cx: &mut ModelContext, + ) -> Task>> { + let client = self.client.clone(); + let user_store = self.user_store.clone(); + cx.emit(Event::RemoteProjectJoined { project_id: id }); + cx.spawn(move |this, mut cx| async move { + let project = + Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?; + + this.update(&mut cx, |this, cx| { + this.joined_projects.retain(|project| { + if let Some(project) = project.upgrade() { + !project.read(cx).is_disconnected() + } else { + false + } + }); + this.joined_projects.insert(project.downgrade()); + })?; + Ok(project) + }) + } + + pub fn share_project( + &mut self, + project: Model, + cx: &mut ModelContext, + ) -> Task> { + let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id() + { + self.client.request(proto::ShareProject { + room_id: self.id(), + worktrees: vec![], + dev_server_project_id: Some(dev_server_project_id.0), + }) + } else { + if let Some(project_id) = project.read(cx).remote_id() { + return Task::ready(Ok(project_id)); + } + + self.client.request(proto::ShareProject { + room_id: self.id(), + worktrees: project.read(cx).worktree_metadata_protos(cx), + dev_server_project_id: None, + }) + }; + + cx.spawn(|this, mut cx| async move { + let response = request.await?; + + project.update(&mut cx, |project, cx| { + project.shared(response.project_id, cx) + })??; + + // If the user's location is in this project, it changes from UnsharedProject to SharedProject. + this.update(&mut cx, |this, cx| { + this.shared_projects.insert(project.downgrade()); + let active_project = this.local_participant.active_project.as_ref(); + if active_project.map_or(false, |location| *location == project) { + this.set_location(Some(&project), cx) + } else { + Task::ready(Ok(())) + } + })? + .await?; + + Ok(response.project_id) + }) + } + + pub(crate) fn unshare_project( + &mut self, + project: Model, + cx: &mut ModelContext, + ) -> Result<()> { + let project_id = match project.read(cx).remote_id() { + Some(project_id) => project_id, + None => return Ok(()), + }; + + self.client.send(proto::UnshareProject { project_id })?; + project.update(cx, |this, cx| this.unshare(cx))?; + + if self.local_participant.active_project == Some(project.downgrade()) { + self.set_location(Some(&project), cx).detach_and_log_err(cx); + } + Ok(()) + } + + pub(crate) fn set_location( + &mut self, + project: Option<&Model>, + cx: &mut ModelContext, + ) -> Task> { + if self.status.is_offline() { + return Task::ready(Err(anyhow!("room is offline"))); + } + + let client = self.client.clone(); + let room_id = self.id; + let location = if let Some(project) = project { + self.local_participant.active_project = Some(project.downgrade()); + if let Some(project_id) = project.read(cx).remote_id() { + proto::participant_location::Variant::SharedProject( + proto::participant_location::SharedProject { id: project_id }, + ) + } else { + proto::participant_location::Variant::UnsharedProject( + proto::participant_location::UnsharedProject {}, + ) + } + } else { + self.local_participant.active_project = None; + proto::participant_location::Variant::External(proto::participant_location::External {}) + }; + + cx.notify(); + cx.background_executor().spawn(async move { + client + .request(proto::UpdateParticipantLocation { + room_id, + location: Some(proto::ParticipantLocation { + variant: Some(location), + }), + }) + .await?; + Ok(()) + }) + } + + pub fn is_screen_sharing(&self) -> bool { + self.live_kit.as_ref().map_or(false, |live_kit| { + !matches!(live_kit.screen_track, LocalTrack::None) + }) + } + + pub fn is_sharing_mic(&self) -> bool { + self.live_kit.as_ref().map_or(false, |live_kit| { + !matches!(live_kit.microphone_track, LocalTrack::None) + }) + } + + pub fn is_muted(&self) -> bool { + self.live_kit.as_ref().map_or(false, |live_kit| { + matches!(live_kit.microphone_track, LocalTrack::None) + || live_kit.muted_by_user + || live_kit.deafened + }) + } + + pub fn is_speaking(&self) -> bool { + self.live_kit + .as_ref() + .map_or(false, |live_kit| live_kit.speaking) + } + + pub fn is_deafened(&self) -> Option { + self.live_kit.as_ref().map(|live_kit| live_kit.deafened) + } + + pub fn can_use_microphone(&self) -> bool { + use proto::ChannelRole::*; + match self.local_participant.role { + Admin | Member | Talker => true, + Guest | Banned => false, + } + } + + pub fn can_share_projects(&self) -> bool { + use proto::ChannelRole::*; + match self.local_participant.role { + Admin | Member => true, + Guest | Banned | Talker => false, + } + } + + #[track_caller] + pub fn share_microphone(&mut self, cx: &mut ModelContext) -> Task> { + if self.status.is_offline() { + return Task::ready(Err(anyhow!("room is offline"))); + } + + let publish_id = if let Some(live_kit) = self.live_kit.as_mut() { + let publish_id = post_inc(&mut live_kit.next_publish_id); + live_kit.microphone_track = LocalTrack::Pending { publish_id }; + cx.notify(); + publish_id + } else { + return Task::ready(Err(anyhow!("live-kit was not initialized"))); + }; + + cx.spawn(move |this, mut cx| async move { + let publish_track = async { + let track = LocalAudioTrack::create(); + this.upgrade() + .ok_or_else(|| anyhow!("room was dropped"))? + .update(&mut cx, |this, _| { + this.live_kit + .as_ref() + .map(|live_kit| live_kit.room.publish_audio_track(track)) + })? + .ok_or_else(|| anyhow!("live-kit was not initialized"))? + .await + }; + let publication = publish_track.await; + this.upgrade() + .ok_or_else(|| anyhow!("room was dropped"))? + .update(&mut cx, |this, cx| { + let live_kit = this + .live_kit + .as_mut() + .ok_or_else(|| anyhow!("live-kit was not initialized"))?; + + let canceled = if let LocalTrack::Pending { + publish_id: cur_publish_id, + } = &live_kit.microphone_track + { + *cur_publish_id != publish_id + } else { + true + }; + + match publication { + Ok(publication) => { + if canceled { + live_kit.room.unpublish_track(publication); + } else { + if live_kit.muted_by_user || live_kit.deafened { + cx.background_executor() + .spawn(publication.set_mute(true)) + .detach(); + } + live_kit.microphone_track = LocalTrack::Published { + track_publication: publication, + }; + cx.notify(); + } + Ok(()) + } + Err(error) => { + if canceled { + Ok(()) + } else { + live_kit.microphone_track = LocalTrack::None; + cx.notify(); + Err(error) + } + } + } + })? + }) + } + + pub fn share_screen(&mut self, cx: &mut ModelContext) -> Task> { + if self.status.is_offline() { + return Task::ready(Err(anyhow!("room is offline"))); + } else if self.is_screen_sharing() { + return Task::ready(Err(anyhow!("screen was already shared"))); + } + + let (displays, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() { + let publish_id = post_inc(&mut live_kit.next_publish_id); + live_kit.screen_track = LocalTrack::Pending { publish_id }; + cx.notify(); + (live_kit.room.display_sources(), publish_id) + } else { + return Task::ready(Err(anyhow!("live-kit was not initialized"))); + }; + + cx.spawn(move |this, mut cx| async move { + let publish_track = async { + let displays = displays.await?; + let display = displays + .first() + .ok_or_else(|| anyhow!("no display found"))?; + let track = LocalVideoTrack::screen_share_for_display(display); + this.upgrade() + .ok_or_else(|| anyhow!("room was dropped"))? + .update(&mut cx, |this, _| { + this.live_kit + .as_ref() + .map(|live_kit| live_kit.room.publish_video_track(track)) + })? + .ok_or_else(|| anyhow!("live-kit was not initialized"))? + .await + }; + + let publication = publish_track.await; + this.upgrade() + .ok_or_else(|| anyhow!("room was dropped"))? + .update(&mut cx, |this, cx| { + let live_kit = this + .live_kit + .as_mut() + .ok_or_else(|| anyhow!("live-kit was not initialized"))?; + + let canceled = if let LocalTrack::Pending { + publish_id: cur_publish_id, + } = &live_kit.screen_track + { + *cur_publish_id != publish_id + } else { + true + }; + + match publication { + Ok(publication) => { + if canceled { + live_kit.room.unpublish_track(publication); + } else { + live_kit.screen_track = LocalTrack::Published { + track_publication: publication, + }; + cx.notify(); + } + + Audio::play_sound(Sound::StartScreenshare, cx); + + Ok(()) + } + Err(error) => { + if canceled { + Ok(()) + } else { + live_kit.screen_track = LocalTrack::None; + cx.notify(); + Err(error) + } + } + } + })? + }) + } + + pub fn toggle_mute(&mut self, cx: &mut ModelContext) { + if let Some(live_kit) = self.live_kit.as_mut() { + // When unmuting, undeafen if the user was deafened before. + let was_deafened = live_kit.deafened; + if live_kit.muted_by_user + || live_kit.deafened + || matches!(live_kit.microphone_track, LocalTrack::None) + { + live_kit.muted_by_user = false; + live_kit.deafened = false; + } else { + live_kit.muted_by_user = true; + } + let muted = live_kit.muted_by_user; + let should_undeafen = was_deafened && !live_kit.deafened; + + if let Some(task) = self.set_mute(muted, cx) { + task.detach_and_log_err(cx); + } + + if should_undeafen { + if let Some(task) = self.set_deafened(false, cx) { + task.detach_and_log_err(cx); + } + } + } + } + + pub fn toggle_deafen(&mut self, cx: &mut ModelContext) { + if let Some(live_kit) = self.live_kit.as_mut() { + // When deafening, mute the microphone if it was not already muted. + // When un-deafening, unmute the microphone, unless it was explicitly muted. + let deafened = !live_kit.deafened; + live_kit.deafened = deafened; + let should_change_mute = !live_kit.muted_by_user; + + if let Some(task) = self.set_deafened(deafened, cx) { + task.detach_and_log_err(cx); + } + + if should_change_mute { + if let Some(task) = self.set_mute(deafened, cx) { + task.detach_and_log_err(cx); + } + } + } + } + + pub fn unshare_screen(&mut self, cx: &mut ModelContext) -> Result<()> { + if self.status.is_offline() { + return Err(anyhow!("room is offline")); + } + + let live_kit = self + .live_kit + .as_mut() + .ok_or_else(|| anyhow!("live-kit was not initialized"))?; + match mem::take(&mut live_kit.screen_track) { + LocalTrack::None => Err(anyhow!("screen was not shared")), + LocalTrack::Pending { .. } => { + cx.notify(); + Ok(()) + } + LocalTrack::Published { + track_publication, .. + } => { + live_kit.room.unpublish_track(track_publication); + cx.notify(); + + Audio::play_sound(Sound::StopScreenshare, cx); + Ok(()) + } + } + } + + fn set_deafened( + &mut self, + deafened: bool, + cx: &mut ModelContext, + ) -> Option>> { + let live_kit = self.live_kit.as_mut()?; + cx.notify(); + + let mut track_updates = Vec::new(); + for participant in self.remote_participants.values() { + for publication in live_kit + .room + .remote_audio_track_publications(&participant.user.id.to_string()) + { + track_updates.push(publication.set_enabled(!deafened)); + } + + for track in participant.audio_tracks.values() { + if deafened { + track.stop(); + } else { + track.start(); + } + } + } + + Some(cx.foreground_executor().spawn(async move { + for result in futures::future::join_all(track_updates).await { + result?; + } + Ok(()) + })) + } + + fn set_mute( + &mut self, + should_mute: bool, + cx: &mut ModelContext, + ) -> Option>> { + let live_kit = self.live_kit.as_mut()?; + cx.notify(); + + if should_mute { + Audio::play_sound(Sound::Mute, cx); + } else { + Audio::play_sound(Sound::Unmute, cx); + } + + match &mut live_kit.microphone_track { + LocalTrack::None => { + if should_mute { + None + } else { + Some(self.share_microphone(cx)) + } + } + LocalTrack::Pending { .. } => None, + LocalTrack::Published { track_publication } => Some( + cx.foreground_executor() + .spawn(track_publication.set_mute(should_mute)), + ), + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn set_display_sources(&self, sources: Vec) { + self.live_kit + .as_ref() + .unwrap() + .room + .set_display_sources(sources); + } +} + +struct LiveKitRoom { + room: Arc, + screen_track: LocalTrack, + microphone_track: LocalTrack, + /// Tracks whether we're currently in a muted state due to auto-mute from deafening or manual mute performed by user. + muted_by_user: bool, + deafened: bool, + speaking: bool, + next_publish_id: usize, + _maintain_room: Task<()>, + _handle_updates: Task<()>, +} + +impl LiveKitRoom { + fn stop_publishing(&mut self, cx: &mut ModelContext) { + if let LocalTrack::Published { + track_publication, .. + } = mem::replace(&mut self.microphone_track, LocalTrack::None) + { + self.room.unpublish_track(track_publication); + cx.notify(); + } + + if let LocalTrack::Published { + track_publication, .. + } = mem::replace(&mut self.screen_track, LocalTrack::None) + { + self.room.unpublish_track(track_publication); + cx.notify(); + } + } +} + +enum LocalTrack { + None, + Pending { + publish_id: usize, + }, + Published { + track_publication: LocalTrackPublication, + }, +} + +impl Default for LocalTrack { + fn default() -> Self { + Self::None + } +} + +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum RoomStatus { + Online, + Rejoining, + Offline, +} + +impl RoomStatus { + pub fn is_offline(&self) -> bool { + matches!(self, RoomStatus::Offline) + } + + pub fn is_online(&self) -> bool { + matches!(self, RoomStatus::Online) + } +} diff --git a/crates/channel/Cargo.toml b/crates/channel/Cargo.toml new file mode 100644 index 0000000..5aed4a6 --- /dev/null +++ b/crates/channel/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "channel" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/channel.rs" +doctest = false + +[features] +test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"] + +[dependencies] +anyhow.workspace = true +client.workspace = true +clock.workspace = true +collections.workspace = true +futures.workspace = true +gpui.workspace = true +language.workspace = true +log.workspace = true +rand.workspace = true +release_channel.workspace = true +rpc.workspace = true +settings.workspace = true +sum_tree.workspace = true +text.workspace = true +time.workspace = true +util.workspace = true + +[dev-dependencies] +collections = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +rpc = { workspace = true, features = ["test-support"] } +client = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } diff --git a/crates/channel/LICENSE-GPL b/crates/channel/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/channel/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/channel/src/channel.rs b/crates/channel/src/channel.rs new file mode 100644 index 0000000..aee92d0 --- /dev/null +++ b/crates/channel/src/channel.rs @@ -0,0 +1,23 @@ +mod channel_buffer; +mod channel_chat; +mod channel_store; + +use client::{Client, UserStore}; +use gpui::{AppContext, Model}; +use std::sync::Arc; + +pub use channel_buffer::{ChannelBuffer, ChannelBufferEvent, ACKNOWLEDGE_DEBOUNCE_INTERVAL}; +pub use channel_chat::{ + mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId, + MessageParams, +}; +pub use channel_store::{Channel, ChannelEvent, ChannelMembership, ChannelStore}; + +#[cfg(test)] +mod channel_store_tests; + +pub fn init(client: &Arc, user_store: Model, cx: &mut AppContext) { + channel_store::init(client, user_store, cx); + channel_buffer::init(client); + channel_chat::init(client); +} diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs new file mode 100644 index 0000000..c2115a7 --- /dev/null +++ b/crates/channel/src/channel_buffer.rs @@ -0,0 +1,269 @@ +use crate::{Channel, ChannelStore}; +use anyhow::Result; +use client::{ChannelId, Client, Collaborator, UserStore, ZED_ALWAYS_ACTIVE}; +use collections::HashMap; +use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task}; +use language::proto::serialize_version; +use rpc::{ + proto::{self, PeerId}, + TypedEnvelope, +}; +use std::{sync::Arc, time::Duration}; +use text::BufferId; +use util::ResultExt; + +pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250); + +pub(crate) fn init(client: &Arc) { + client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer); + client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborators); +} + +pub struct ChannelBuffer { + pub channel_id: ChannelId, + connected: bool, + collaborators: HashMap, + user_store: Model, + channel_store: Model, + buffer: Model, + buffer_epoch: u64, + client: Arc, + subscription: Option, + acknowledge_task: Option>>, +} + +pub enum ChannelBufferEvent { + CollaboratorsChanged, + Disconnected, + BufferEdited, + ChannelChanged, +} + +impl EventEmitter for ChannelBuffer {} + +impl ChannelBuffer { + pub(crate) async fn new( + channel: Arc, + client: Arc, + user_store: Model, + channel_store: Model, + mut cx: AsyncAppContext, + ) -> Result> { + let response = client + .request(proto::JoinChannelBuffer { + channel_id: channel.id.0, + }) + .await?; + let buffer_id = BufferId::new(response.buffer_id)?; + let base_text = response.base_text; + let operations = response + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::, _>>()?; + + let buffer = cx.new_model(|cx| { + let capability = channel_store.read(cx).channel_capability(channel.id); + language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text) + })?; + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??; + + let subscription = client.subscribe_to_entity(channel.id.0)?; + + anyhow::Ok(cx.new_model(|cx| { + cx.subscribe(&buffer, Self::on_buffer_update).detach(); + cx.on_release(Self::release).detach(); + let mut this = Self { + buffer, + buffer_epoch: response.epoch, + client, + connected: true, + collaborators: Default::default(), + acknowledge_task: None, + channel_id: channel.id, + subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())), + user_store, + channel_store, + }; + this.replace_collaborators(response.collaborators, cx); + this + })?) + } + + fn release(&mut self, _: &mut AppContext) { + if self.connected { + if let Some(task) = self.acknowledge_task.take() { + task.detach(); + } + self.client + .send(proto::LeaveChannelBuffer { + channel_id: self.channel_id.0, + }) + .log_err(); + } + } + + pub fn remote_id(&self, cx: &AppContext) -> BufferId { + self.buffer.read(cx).remote_id() + } + + pub fn user_store(&self) -> &Model { + &self.user_store + } + + pub(crate) fn replace_collaborators( + &mut self, + collaborators: Vec, + cx: &mut ModelContext, + ) { + let mut new_collaborators = HashMap::default(); + for collaborator in collaborators { + if let Ok(collaborator) = Collaborator::from_proto(collaborator) { + new_collaborators.insert(collaborator.peer_id, collaborator); + } + } + + for (_, old_collaborator) in &self.collaborators { + if !new_collaborators.contains_key(&old_collaborator.peer_id) { + self.buffer.update(cx, |buffer, cx| { + buffer.remove_peer(old_collaborator.replica_id, cx) + }); + } + } + self.collaborators = new_collaborators; + cx.emit(ChannelBufferEvent::CollaboratorsChanged); + cx.notify(); + } + + async fn handle_update_channel_buffer( + this: Model, + update_channel_buffer: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let ops = update_channel_buffer + .payload + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::, _>>()?; + + this.update(&mut cx, |this, cx| { + cx.notify(); + this.buffer + .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) + })??; + + Ok(()) + } + + async fn handle_update_channel_buffer_collaborators( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + this.replace_collaborators(message.payload.collaborators, cx); + cx.emit(ChannelBufferEvent::CollaboratorsChanged); + cx.notify(); + }) + } + + fn on_buffer_update( + &mut self, + _: Model, + event: &language::Event, + cx: &mut ModelContext, + ) { + match event { + language::Event::Operation(operation) => { + if *ZED_ALWAYS_ACTIVE { + match operation { + language::Operation::UpdateSelections { selections, .. } => { + if selections.is_empty() { + return; + } + } + _ => {} + } + } + let operation = language::proto::serialize_operation(operation); + self.client + .send(proto::UpdateChannelBuffer { + channel_id: self.channel_id.0, + operations: vec![operation], + }) + .log_err(); + } + language::Event::Edited => { + cx.emit(ChannelBufferEvent::BufferEdited); + } + _ => {} + } + } + + pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) { + let buffer = self.buffer.read(cx); + let version = buffer.version(); + let buffer_id = buffer.remote_id().into(); + let client = self.client.clone(); + let epoch = self.epoch(); + + self.acknowledge_task = Some(cx.spawn(move |_, cx| async move { + cx.background_executor() + .timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL) + .await; + client + .send(proto::AckBufferOperation { + buffer_id, + epoch, + version: serialize_version(&version), + }) + .ok(); + Ok(()) + })); + } + + pub fn epoch(&self) -> u64 { + self.buffer_epoch + } + + pub fn buffer(&self) -> Model { + self.buffer.clone() + } + + pub fn collaborators(&self) -> &HashMap { + &self.collaborators + } + + pub fn channel(&self, cx: &AppContext) -> Option> { + self.channel_store + .read(cx) + .channel_for_id(self.channel_id) + .cloned() + } + + pub(crate) fn disconnect(&mut self, cx: &mut ModelContext) { + log::info!("channel buffer {} disconnected", self.channel_id); + if self.connected { + self.connected = false; + self.subscription.take(); + cx.emit(ChannelBufferEvent::Disconnected); + cx.notify() + } + } + + pub(crate) fn channel_changed(&mut self, cx: &mut ModelContext) { + cx.emit(ChannelBufferEvent::ChannelChanged); + cx.notify() + } + + pub fn is_connected(&self) -> bool { + self.connected + } + + pub fn replica_id(&self, cx: &AppContext) -> u16 { + self.buffer.read(cx).replica_id() + } +} diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs new file mode 100644 index 0000000..4607466 --- /dev/null +++ b/crates/channel/src/channel_chat.rs @@ -0,0 +1,857 @@ +use crate::{Channel, ChannelStore}; +use anyhow::{anyhow, Result}; +use client::{ + proto, + user::{User, UserStore}, + ChannelId, Client, Subscription, TypedEnvelope, UserId, +}; +use collections::HashSet; +use futures::lock::Mutex; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel, +}; +use rand::prelude::*; +use std::{ + ops::{ControlFlow, Range}, + sync::Arc, +}; +use sum_tree::{Bias, SumTree}; +use time::OffsetDateTime; +use util::{post_inc, ResultExt as _, TryFutureExt}; + +pub struct ChannelChat { + pub channel_id: ChannelId, + messages: SumTree, + acknowledged_message_ids: HashSet, + channel_store: Model, + loaded_all_messages: bool, + last_acknowledged_id: Option, + next_pending_message_id: usize, + first_loaded_message_id: Option, + user_store: Model, + rpc: Arc, + outgoing_messages_lock: Arc>, + rng: StdRng, + _subscription: Subscription, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct MessageParams { + pub text: String, + pub mentions: Vec<(Range, UserId)>, + pub reply_to_message_id: Option, +} + +#[derive(Clone, Debug)] +pub struct ChannelMessage { + pub id: ChannelMessageId, + pub body: String, + pub timestamp: OffsetDateTime, + pub sender: Arc, + pub nonce: u128, + pub mentions: Vec<(Range, UserId)>, + pub reply_to_message_id: Option, + pub edited_at: Option, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ChannelMessageId { + Saved(u64), + Pending(usize), +} + +impl Into> for ChannelMessageId { + fn into(self) -> Option { + match self { + ChannelMessageId::Saved(id) => Some(id), + ChannelMessageId::Pending(_) => None, + } + } +} + +#[derive(Clone, Debug, Default)] +pub struct ChannelMessageSummary { + max_id: ChannelMessageId, + count: usize, +} + +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +struct Count(usize); + +#[derive(Clone, Debug, PartialEq)] +pub enum ChannelChatEvent { + MessagesUpdated { + old_range: Range, + new_count: usize, + }, + UpdateMessage { + message_id: ChannelMessageId, + message_ix: usize, + }, + NewMessage { + channel_id: ChannelId, + message_id: u64, + }, +} + +impl EventEmitter for ChannelChat {} +pub fn init(client: &Arc) { + client.add_model_message_handler(ChannelChat::handle_message_sent); + client.add_model_message_handler(ChannelChat::handle_message_removed); + client.add_model_message_handler(ChannelChat::handle_message_updated); +} + +impl ChannelChat { + pub async fn new( + channel: Arc, + channel_store: Model, + user_store: Model, + client: Arc, + mut cx: AsyncAppContext, + ) -> Result> { + let channel_id = channel.id; + let subscription = client.subscribe_to_entity(channel_id.0).unwrap(); + + let response = client + .request(proto::JoinChannelChat { + channel_id: channel_id.0, + }) + .await?; + + let handle = cx.new_model(|cx| { + cx.on_release(Self::release).detach(); + Self { + channel_id: channel.id, + user_store: user_store.clone(), + channel_store, + rpc: client.clone(), + outgoing_messages_lock: Default::default(), + messages: Default::default(), + acknowledged_message_ids: Default::default(), + loaded_all_messages: false, + next_pending_message_id: 0, + last_acknowledged_id: None, + rng: StdRng::from_entropy(), + first_loaded_message_id: None, + _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()), + } + })?; + Self::handle_loaded_messages( + handle.downgrade(), + user_store, + client, + response.messages, + response.done, + &mut cx, + ) + .await?; + Ok(handle) + } + + fn release(&mut self, _: &mut AppContext) { + self.rpc + .send(proto::LeaveChannelChat { + channel_id: self.channel_id.0, + }) + .log_err(); + } + + pub fn channel(&self, cx: &AppContext) -> Option> { + self.channel_store + .read(cx) + .channel_for_id(self.channel_id) + .cloned() + } + + pub fn client(&self) -> &Arc { + &self.rpc + } + + pub fn send_message( + &mut self, + message: MessageParams, + cx: &mut ModelContext, + ) -> Result>> { + if message.text.trim().is_empty() { + Err(anyhow!("message body can't be empty"))?; + } + + let current_user = self + .user_store + .read(cx) + .current_user() + .ok_or_else(|| anyhow!("current_user is not present"))?; + + let channel_id = self.channel_id; + let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id)); + let nonce = self.rng.gen(); + self.insert_messages( + SumTree::from_item( + ChannelMessage { + id: pending_id, + body: message.text.clone(), + sender: current_user, + timestamp: OffsetDateTime::now_utc(), + mentions: message.mentions.clone(), + nonce, + reply_to_message_id: message.reply_to_message_id, + edited_at: None, + }, + &(), + ), + cx, + ); + let user_store = self.user_store.clone(); + let rpc = self.rpc.clone(); + let outgoing_messages_lock = self.outgoing_messages_lock.clone(); + + // todo - handle messages that fail to send (e.g. >1024 chars) + Ok(cx.spawn(move |this, mut cx| async move { + let outgoing_message_guard = outgoing_messages_lock.lock().await; + let request = rpc.request(proto::SendChannelMessage { + channel_id: channel_id.0, + body: message.text, + nonce: Some(nonce.into()), + mentions: mentions_to_proto(&message.mentions), + reply_to_message_id: message.reply_to_message_id, + }); + let response = request.await?; + drop(outgoing_message_guard); + let response = response.message.ok_or_else(|| anyhow!("invalid message"))?; + let id = response.id; + let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?; + this.update(&mut cx, |this, cx| { + this.insert_messages(SumTree::from_item(message, &()), cx); + if this.first_loaded_message_id.is_none() { + this.first_loaded_message_id = Some(id); + } + })?; + Ok(id) + })) + } + + pub fn remove_message(&mut self, id: u64, cx: &mut ModelContext) -> Task> { + let response = self.rpc.request(proto::RemoveChannelMessage { + channel_id: self.channel_id.0, + message_id: id, + }); + cx.spawn(move |this, mut cx| async move { + response.await?; + this.update(&mut cx, |this, cx| { + this.message_removed(id, cx); + })?; + Ok(()) + }) + } + + pub fn update_message( + &mut self, + id: u64, + message: MessageParams, + cx: &mut ModelContext, + ) -> Result>> { + self.message_update( + ChannelMessageId::Saved(id), + message.text.clone(), + message.mentions.clone(), + Some(OffsetDateTime::now_utc()), + cx, + ); + + let nonce: u128 = self.rng.gen(); + + let request = self.rpc.request(proto::UpdateChannelMessage { + channel_id: self.channel_id.0, + message_id: id, + body: message.text, + nonce: Some(nonce.into()), + mentions: mentions_to_proto(&message.mentions), + }); + Ok(cx.spawn(move |_, _| async move { + request.await?; + Ok(()) + })) + } + + pub fn load_more_messages(&mut self, cx: &mut ModelContext) -> Option>> { + if self.loaded_all_messages { + return None; + } + + let rpc = self.rpc.clone(); + let user_store = self.user_store.clone(); + let channel_id = self.channel_id; + let before_message_id = self.first_loaded_message_id()?; + Some(cx.spawn(move |this, mut cx| { + async move { + let response = rpc + .request(proto::GetChannelMessages { + channel_id: channel_id.0, + before_message_id, + }) + .await?; + Self::handle_loaded_messages( + this, + user_store, + rpc, + response.messages, + response.done, + &mut cx, + ) + .await?; + + anyhow::Ok(()) + } + .log_err() + })) + } + + pub fn first_loaded_message_id(&mut self) -> Option { + self.first_loaded_message_id + } + + /// Load a message by its id, if it's already stored locally. + pub fn find_loaded_message(&self, id: u64) -> Option<&ChannelMessage> { + self.messages.iter().find(|message| match message.id { + ChannelMessageId::Saved(message_id) => message_id == id, + ChannelMessageId::Pending(_) => false, + }) + } + + /// Load all of the chat messages since a certain message id. + /// + /// For now, we always maintain a suffix of the channel's messages. + pub async fn load_history_since_message( + chat: Model, + message_id: u64, + mut cx: AsyncAppContext, + ) -> Option { + loop { + let step = chat + .update(&mut cx, |chat, cx| { + if let Some(first_id) = chat.first_loaded_message_id() { + if first_id <= message_id { + let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(); + let message_id = ChannelMessageId::Saved(message_id); + cursor.seek(&message_id, Bias::Left, &()); + return ControlFlow::Break( + if cursor + .item() + .map_or(false, |message| message.id == message_id) + { + Some(cursor.start().1 .0) + } else { + None + }, + ); + } + } + ControlFlow::Continue(chat.load_more_messages(cx)) + }) + .log_err()?; + match step { + ControlFlow::Break(ix) => return ix, + ControlFlow::Continue(task) => task?.await?, + } + } + } + + pub fn acknowledge_last_message(&mut self, cx: &mut ModelContext) { + if let ChannelMessageId::Saved(latest_message_id) = self.messages.summary().max_id { + if self + .last_acknowledged_id + .map_or(true, |acknowledged_id| acknowledged_id < latest_message_id) + { + self.rpc + .send(proto::AckChannelMessage { + channel_id: self.channel_id.0, + message_id: latest_message_id, + }) + .ok(); + self.last_acknowledged_id = Some(latest_message_id); + self.channel_store.update(cx, |store, cx| { + store.acknowledge_message_id(self.channel_id, latest_message_id, cx); + }); + } + } + } + + async fn handle_loaded_messages( + this: WeakModel, + user_store: Model, + rpc: Arc, + proto_messages: Vec, + loaded_all_messages: bool, + cx: &mut AsyncAppContext, + ) -> Result<()> { + let loaded_messages = messages_from_proto(proto_messages, &user_store, cx).await?; + + let first_loaded_message_id = loaded_messages.first().map(|m| m.id); + let loaded_message_ids = this.update(cx, |this, _| { + let mut loaded_message_ids: HashSet = HashSet::default(); + for message in loaded_messages.iter() { + if let Some(saved_message_id) = message.id.into() { + loaded_message_ids.insert(saved_message_id); + } + } + for message in this.messages.iter() { + if let Some(saved_message_id) = message.id.into() { + loaded_message_ids.insert(saved_message_id); + } + } + loaded_message_ids + })?; + + let missing_ancestors = loaded_messages + .iter() + .filter_map(|message| { + if let Some(ancestor_id) = message.reply_to_message_id { + if !loaded_message_ids.contains(&ancestor_id) { + return Some(ancestor_id); + } + } + None + }) + .collect::>(); + + let loaded_ancestors = if missing_ancestors.is_empty() { + None + } else { + let response = rpc + .request(proto::GetChannelMessagesById { + message_ids: missing_ancestors, + }) + .await?; + Some(messages_from_proto(response.messages, &user_store, cx).await?) + }; + this.update(cx, |this, cx| { + this.first_loaded_message_id = first_loaded_message_id.and_then(|msg_id| msg_id.into()); + this.loaded_all_messages = loaded_all_messages; + this.insert_messages(loaded_messages, cx); + if let Some(loaded_ancestors) = loaded_ancestors { + this.insert_messages(loaded_ancestors, cx); + } + })?; + + Ok(()) + } + + pub fn rejoin(&mut self, cx: &mut ModelContext) { + let user_store = self.user_store.clone(); + let rpc = self.rpc.clone(); + let channel_id = self.channel_id; + cx.spawn(move |this, mut cx| { + async move { + let response = rpc + .request(proto::JoinChannelChat { + channel_id: channel_id.0, + }) + .await?; + Self::handle_loaded_messages( + this.clone(), + user_store.clone(), + rpc.clone(), + response.messages, + response.done, + &mut cx, + ) + .await?; + + let pending_messages = this.update(&mut cx, |this, _| { + this.pending_messages().cloned().collect::>() + })?; + + for pending_message in pending_messages { + let request = rpc.request(proto::SendChannelMessage { + channel_id: channel_id.0, + body: pending_message.body, + mentions: mentions_to_proto(&pending_message.mentions), + nonce: Some(pending_message.nonce.into()), + reply_to_message_id: pending_message.reply_to_message_id, + }); + let response = request.await?; + let message = ChannelMessage::from_proto( + response.message.ok_or_else(|| anyhow!("invalid message"))?, + &user_store, + &mut cx, + ) + .await?; + this.update(&mut cx, |this, cx| { + this.insert_messages(SumTree::from_item(message, &()), cx); + })?; + } + + anyhow::Ok(()) + } + .log_err() + }) + .detach(); + } + + pub fn message_count(&self) -> usize { + self.messages.summary().count + } + + pub fn messages(&self) -> &SumTree { + &self.messages + } + + pub fn message(&self, ix: usize) -> &ChannelMessage { + let mut cursor = self.messages.cursor::(); + cursor.seek(&Count(ix), Bias::Right, &()); + cursor.item().unwrap() + } + + pub fn acknowledge_message(&mut self, id: u64) { + if self.acknowledged_message_ids.insert(id) { + self.rpc + .send(proto::AckChannelMessage { + channel_id: self.channel_id.0, + message_id: id, + }) + .ok(); + } + } + + pub fn messages_in_range(&self, range: Range) -> impl Iterator { + let mut cursor = self.messages.cursor::(); + cursor.seek(&Count(range.start), Bias::Right, &()); + cursor.take(range.len()) + } + + pub fn pending_messages(&self) -> impl Iterator { + let mut cursor = self.messages.cursor::(); + cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &()); + cursor + } + + async fn handle_message_sent( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?; + let message = message + .payload + .message + .ok_or_else(|| anyhow!("empty message"))?; + let message_id = message.id; + + let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?; + this.update(&mut cx, |this, cx| { + this.insert_messages(SumTree::from_item(message, &()), cx); + cx.emit(ChannelChatEvent::NewMessage { + channel_id: this.channel_id, + message_id, + }) + })?; + + Ok(()) + } + + async fn handle_message_removed( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + this.message_removed(message.payload.message_id, cx) + })?; + Ok(()) + } + + async fn handle_message_updated( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?; + let message = message + .payload + .message + .ok_or_else(|| anyhow!("empty message"))?; + + let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?; + + this.update(&mut cx, |this, cx| { + this.message_update( + message.id, + message.body, + message.mentions, + message.edited_at, + cx, + ) + })?; + Ok(()) + } + + fn insert_messages(&mut self, messages: SumTree, cx: &mut ModelContext) { + if let Some((first_message, last_message)) = messages.first().zip(messages.last()) { + let nonces = messages + .cursor::<()>() + .map(|m| m.nonce) + .collect::>(); + + let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(); + let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &()); + let start_ix = old_cursor.start().1 .0; + let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &()); + let removed_count = removed_messages.summary().count; + let new_count = messages.summary().count; + let end_ix = start_ix + removed_count; + + new_messages.append(messages, &()); + + let mut ranges = Vec::>::new(); + if new_messages.last().unwrap().is_pending() { + new_messages.append(old_cursor.suffix(&()), &()); + } else { + new_messages.append( + old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()), + &(), + ); + + while let Some(message) = old_cursor.item() { + let message_ix = old_cursor.start().1 .0; + if nonces.contains(&message.nonce) { + if ranges.last().map_or(false, |r| r.end == message_ix) { + ranges.last_mut().unwrap().end += 1; + } else { + ranges.push(message_ix..message_ix + 1); + } + } else { + new_messages.push(message.clone(), &()); + } + old_cursor.next(&()); + } + } + + drop(old_cursor); + self.messages = new_messages; + + for range in ranges.into_iter().rev() { + cx.emit(ChannelChatEvent::MessagesUpdated { + old_range: range, + new_count: 0, + }); + } + cx.emit(ChannelChatEvent::MessagesUpdated { + old_range: start_ix..end_ix, + new_count, + }); + + cx.notify(); + } + } + + fn message_removed(&mut self, id: u64, cx: &mut ModelContext) { + let mut cursor = self.messages.cursor::(); + let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &()); + if let Some(item) = cursor.item() { + if item.id == ChannelMessageId::Saved(id) { + let deleted_message_ix = messages.summary().count; + cursor.next(&()); + messages.append(cursor.suffix(&()), &()); + drop(cursor); + self.messages = messages; + + // If the message that was deleted was the last acknowledged message, + // replace the acknowledged message with an earlier one. + self.channel_store.update(cx, |store, _| { + let summary = self.messages.summary(); + if summary.count == 0 { + store.set_acknowledged_message_id(self.channel_id, None); + } else if deleted_message_ix == summary.count { + if let ChannelMessageId::Saved(id) = summary.max_id { + store.set_acknowledged_message_id(self.channel_id, Some(id)); + } + } + }); + + cx.emit(ChannelChatEvent::MessagesUpdated { + old_range: deleted_message_ix..deleted_message_ix + 1, + new_count: 0, + }); + } + } + } + + fn message_update( + &mut self, + id: ChannelMessageId, + body: String, + mentions: Vec<(Range, u64)>, + edited_at: Option, + cx: &mut ModelContext, + ) { + let mut cursor = self.messages.cursor::(); + let mut messages = cursor.slice(&id, Bias::Left, &()); + let ix = messages.summary().count; + + if let Some(mut message_to_update) = cursor.item().cloned() { + message_to_update.body = body; + message_to_update.mentions = mentions; + message_to_update.edited_at = edited_at; + messages.push(message_to_update, &()); + cursor.next(&()); + } + + messages.append(cursor.suffix(&()), &()); + drop(cursor); + self.messages = messages; + + cx.emit(ChannelChatEvent::UpdateMessage { + message_ix: ix, + message_id: id, + }); + + cx.notify(); + } +} + +async fn messages_from_proto( + proto_messages: Vec, + user_store: &Model, + cx: &mut AsyncAppContext, +) -> Result> { + let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?; + let mut result = SumTree::new(); + result.extend(messages, &()); + Ok(result) +} + +impl ChannelMessage { + pub async fn from_proto( + message: proto::ChannelMessage, + user_store: &Model, + cx: &mut AsyncAppContext, + ) -> Result { + let sender = user_store + .update(cx, |user_store, cx| { + user_store.get_user(message.sender_id, cx) + })? + .await?; + + let edited_at = message.edited_at.and_then(|t| -> Option { + if let Ok(a) = OffsetDateTime::from_unix_timestamp(t as i64) { + return Some(a); + } + + None + }); + + Ok(ChannelMessage { + id: ChannelMessageId::Saved(message.id), + body: message.body, + mentions: message + .mentions + .into_iter() + .filter_map(|mention| { + let range = mention.range?; + Some((range.start as usize..range.end as usize, mention.user_id)) + }) + .collect(), + timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?, + sender, + nonce: message + .nonce + .ok_or_else(|| anyhow!("nonce is required"))? + .into(), + reply_to_message_id: message.reply_to_message_id, + edited_at, + }) + } + + pub fn is_pending(&self) -> bool { + matches!(self.id, ChannelMessageId::Pending(_)) + } + + pub async fn from_proto_vec( + proto_messages: Vec, + user_store: &Model, + cx: &mut AsyncAppContext, + ) -> Result> { + let unique_user_ids = proto_messages + .iter() + .map(|m| m.sender_id) + .collect::>() + .into_iter() + .collect(); + user_store + .update(cx, |user_store, cx| { + user_store.get_users(unique_user_ids, cx) + })? + .await?; + + let mut messages = Vec::with_capacity(proto_messages.len()); + for message in proto_messages { + messages.push(ChannelMessage::from_proto(message, user_store, cx).await?); + } + Ok(messages) + } +} + +pub fn mentions_to_proto(mentions: &[(Range, UserId)]) -> Vec { + mentions + .iter() + .map(|(range, user_id)| proto::ChatMention { + range: Some(proto::Range { + start: range.start as u64, + end: range.end as u64, + }), + user_id: *user_id, + }) + .collect() +} + +impl sum_tree::Item for ChannelMessage { + type Summary = ChannelMessageSummary; + + fn summary(&self) -> Self::Summary { + ChannelMessageSummary { + max_id: self.id, + count: 1, + } + } +} + +impl Default for ChannelMessageId { + fn default() -> Self { + Self::Saved(0) + } +} + +impl sum_tree::Summary for ChannelMessageSummary { + type Context = (); + + fn add_summary(&mut self, summary: &Self, _: &()) { + self.max_id = summary.max_id; + self.count += summary.count; + } +} + +impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { + debug_assert!(summary.max_id > *self); + *self = summary.max_id; + } +} + +impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count { + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { + self.0 += summary.count; + } +} + +impl<'a> From<&'a str> for MessageParams { + fn from(value: &'a str) -> Self { + Self { + text: value.into(), + mentions: Vec::new(), + reply_to_message_id: None, + } + } +} diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs new file mode 100644 index 0000000..74ff7c7 --- /dev/null +++ b/crates/channel/src/channel_store.rs @@ -0,0 +1,1301 @@ +mod channel_index; + +use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage}; +use anyhow::{anyhow, Result}; +use channel_index::ChannelIndex; +use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore}; +use collections::{hash_map, HashMap, HashSet}; +use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt}; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, SharedString, + Task, WeakModel, +}; +use language::Capability; +use rpc::{ + proto::{self, ChannelRole, ChannelVisibility}, + TypedEnvelope, +}; +use settings::Settings; +use std::{mem, sync::Arc, time::Duration}; +use util::{maybe, ResultExt}; + +pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30); + +pub fn init(client: &Arc, user_store: Model, cx: &mut AppContext) { + let channel_store = + cx.new_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx)); + cx.set_global(GlobalChannelStore(channel_store)); +} + +#[derive(Debug, Clone, Default, PartialEq)] +struct NotesVersion { + epoch: u64, + version: clock::Global, +} + +#[derive(Debug, Clone)] +pub struct HostedProject { + project_id: ProjectId, + channel_id: ChannelId, + name: SharedString, + _visibility: proto::ChannelVisibility, +} +impl From for HostedProject { + fn from(project: proto::HostedProject) -> Self { + Self { + project_id: ProjectId(project.project_id), + channel_id: ChannelId(project.channel_id), + _visibility: project.visibility(), + name: project.name.into(), + } + } +} +pub struct ChannelStore { + pub channel_index: ChannelIndex, + channel_invitations: Vec>, + channel_participants: HashMap>>, + channel_states: HashMap, + hosted_projects: HashMap, + + outgoing_invites: HashSet<(ChannelId, UserId)>, + update_channels_tx: mpsc::UnboundedSender, + opened_buffers: HashMap>, + opened_chats: HashMap>, + client: Arc, + user_store: Model, + _rpc_subscriptions: [Subscription; 2], + _watch_connection_status: Task>, + disconnect_channel_buffers_task: Option>, + _update_channels: Task<()>, +} + +#[derive(Clone, Debug)] +pub struct Channel { + pub id: ChannelId, + pub name: SharedString, + pub visibility: proto::ChannelVisibility, + pub parent_path: Vec, +} + +#[derive(Default, Debug)] +pub struct ChannelState { + latest_chat_message: Option, + latest_notes_version: NotesVersion, + observed_notes_version: NotesVersion, + observed_chat_message: Option, + role: Option, + projects: HashSet, +} + +impl Channel { + pub fn link(&self, cx: &AppContext) -> String { + format!( + "{}/channel/{}-{}", + ClientSettings::get_global(cx).server_url, + Self::slug(&self.name), + self.id + ) + } + + pub fn notes_link(&self, heading: Option, cx: &AppContext) -> String { + self.link(cx) + + "/notes" + + &heading + .map(|h| format!("#{}", Self::slug(&h))) + .unwrap_or_default() + } + + pub fn is_root_channel(&self) -> bool { + self.parent_path.is_empty() + } + + pub fn root_id(&self) -> ChannelId { + self.parent_path.first().copied().unwrap_or(self.id) + } + + pub fn slug(str: &str) -> String { + let slug: String = str + .chars() + .map(|c| if c.is_alphanumeric() { c } else { '-' }) + .collect(); + + slug.trim_matches(|c| c == '-').to_string() + } +} + +#[derive(Debug)] +pub struct ChannelMembership { + pub user: Arc, + pub kind: proto::channel_member::Kind, + pub role: proto::ChannelRole, +} +impl ChannelMembership { + pub fn sort_key(&self) -> MembershipSortKey { + MembershipSortKey { + role_order: match self.role { + proto::ChannelRole::Admin => 0, + proto::ChannelRole::Member => 1, + proto::ChannelRole::Banned => 2, + proto::ChannelRole::Talker => 3, + proto::ChannelRole::Guest => 4, + }, + kind_order: match self.kind { + proto::channel_member::Kind::Member => 0, + proto::channel_member::Kind::Invitee => 1, + }, + username_order: self.user.github_login.as_str(), + } + } +} + +#[derive(PartialOrd, Ord, PartialEq, Eq)] +pub struct MembershipSortKey<'a> { + role_order: u8, + kind_order: u8, + username_order: &'a str, +} + +pub enum ChannelEvent { + ChannelCreated(ChannelId), + ChannelRenamed(ChannelId), +} + +impl EventEmitter for ChannelStore {} + +enum OpenedModelHandle { + Open(WeakModel), + Loading(Shared, Arc>>>), +} + +struct GlobalChannelStore(Model); + +impl Global for GlobalChannelStore {} + +impl ChannelStore { + pub fn global(cx: &AppContext) -> Model { + cx.global::().0.clone() + } + + pub fn new( + client: Arc, + user_store: Model, + cx: &mut ModelContext, + ) -> Self { + let rpc_subscriptions = [ + client.add_message_handler(cx.weak_model(), Self::handle_update_channels), + client.add_message_handler(cx.weak_model(), Self::handle_update_user_channels), + ]; + + let mut connection_status = client.status(); + let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded(); + let watch_connection_status = cx.spawn(|this, mut cx| async move { + while let Some(status) = connection_status.next().await { + let this = this.upgrade()?; + match status { + client::Status::Connected { .. } => { + this.update(&mut cx, |this, cx| this.handle_connect(cx)) + .ok()? + .await + .log_err()?; + } + client::Status::SignedOut | client::Status::UpgradeRequired => { + this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx)) + .ok(); + } + _ => { + this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx)) + .ok(); + } + } + } + Some(()) + }); + + Self { + channel_invitations: Vec::default(), + channel_index: ChannelIndex::default(), + channel_participants: Default::default(), + hosted_projects: Default::default(), + outgoing_invites: Default::default(), + opened_buffers: Default::default(), + opened_chats: Default::default(), + update_channels_tx, + client, + user_store, + _rpc_subscriptions: rpc_subscriptions, + _watch_connection_status: watch_connection_status, + disconnect_channel_buffers_task: None, + _update_channels: cx.spawn(|this, mut cx| async move { + maybe!(async move { + while let Some(update_channels) = update_channels_rx.next().await { + if let Some(this) = this.upgrade() { + let update_task = this.update(&mut cx, |this, cx| { + this.update_channels(update_channels, cx) + })?; + if let Some(update_task) = update_task { + update_task.await.log_err(); + } + } + } + anyhow::Ok(()) + }) + .await + .log_err(); + }), + channel_states: Default::default(), + } + } + + pub fn client(&self) -> Arc { + self.client.clone() + } + + /// Returns the number of unique channels in the store + pub fn channel_count(&self) -> usize { + self.channel_index.by_id().len() + } + + /// Returns the index of a channel ID in the list of unique channels + pub fn index_of_channel(&self, channel_id: ChannelId) -> Option { + self.channel_index + .by_id() + .keys() + .position(|id| *id == channel_id) + } + + /// Returns an iterator over all unique channels + pub fn channels(&self) -> impl '_ + Iterator> { + self.channel_index.by_id().values() + } + + /// Iterate over all entries in the channel DAG + pub fn ordered_channels(&self) -> impl '_ + Iterator)> { + self.channel_index + .ordered_channels() + .iter() + .filter_map(move |id| { + let channel = self.channel_index.by_id().get(id)?; + Some((channel.parent_path.len(), channel)) + }) + } + + pub fn channel_at_index(&self, ix: usize) -> Option<&Arc> { + let channel_id = self.channel_index.ordered_channels().get(ix)?; + self.channel_index.by_id().get(channel_id) + } + + pub fn channel_at(&self, ix: usize) -> Option<&Arc> { + self.channel_index.by_id().values().nth(ix) + } + + pub fn has_channel_invitation(&self, channel_id: ChannelId) -> bool { + self.channel_invitations + .iter() + .any(|channel| channel.id == channel_id) + } + + pub fn channel_invitations(&self) -> &[Arc] { + &self.channel_invitations + } + + pub fn channel_for_id(&self, channel_id: ChannelId) -> Option<&Arc> { + self.channel_index.by_id().get(&channel_id) + } + + pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> { + let mut projects: Vec<(SharedString, ProjectId)> = self + .channel_states + .get(&channel_id) + .map(|state| state.projects.clone()) + .unwrap_or_default() + .into_iter() + .flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id))) + .collect(); + projects.sort(); + projects + } + + pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool { + if let Some(buffer) = self.opened_buffers.get(&channel_id) { + if let OpenedModelHandle::Open(buffer) = buffer { + return buffer.upgrade().is_some(); + } + } + false + } + + pub fn open_channel_buffer( + &mut self, + channel_id: ChannelId, + cx: &mut ModelContext, + ) -> Task>> { + let client = self.client.clone(); + let user_store = self.user_store.clone(); + let channel_store = cx.handle(); + self.open_channel_resource( + channel_id, + |this| &mut this.opened_buffers, + |channel, cx| ChannelBuffer::new(channel, client, user_store, channel_store, cx), + cx, + ) + } + + pub fn fetch_channel_messages( + &self, + message_ids: Vec, + cx: &mut ModelContext, + ) -> Task>> { + let request = if message_ids.is_empty() { + None + } else { + Some( + self.client + .request(proto::GetChannelMessagesById { message_ids }), + ) + }; + cx.spawn(|this, mut cx| async move { + if let Some(request) = request { + let response = request.await?; + let this = this + .upgrade() + .ok_or_else(|| anyhow!("channel store dropped"))?; + let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?; + ChannelMessage::from_proto_vec(response.messages, &user_store, &mut cx).await + } else { + Ok(Vec::new()) + } + }) + } + + pub fn has_channel_buffer_changed(&self, channel_id: ChannelId) -> bool { + self.channel_states + .get(&channel_id) + .is_some_and(|state| state.has_channel_buffer_changed()) + } + + pub fn has_new_messages(&self, channel_id: ChannelId) -> bool { + self.channel_states + .get(&channel_id) + .is_some_and(|state| state.has_new_messages()) + } + + pub fn set_acknowledged_message_id(&mut self, channel_id: ChannelId, message_id: Option) { + if let Some(state) = self.channel_states.get_mut(&channel_id) { + state.latest_chat_message = message_id; + } + } + + pub fn last_acknowledge_message_id(&self, channel_id: ChannelId) -> Option { + self.channel_states.get(&channel_id).and_then(|state| { + if let Some(last_message_id) = state.latest_chat_message { + if state + .last_acknowledged_message_id() + .is_some_and(|id| id < last_message_id) + { + return state.last_acknowledged_message_id(); + } + } + + None + }) + } + + pub fn acknowledge_message_id( + &mut self, + channel_id: ChannelId, + message_id: u64, + cx: &mut ModelContext, + ) { + self.channel_states + .entry(channel_id) + .or_insert_with(|| Default::default()) + .acknowledge_message_id(message_id); + cx.notify(); + } + + pub fn update_latest_message_id( + &mut self, + channel_id: ChannelId, + message_id: u64, + cx: &mut ModelContext, + ) { + self.channel_states + .entry(channel_id) + .or_insert_with(|| Default::default()) + .update_latest_message_id(message_id); + cx.notify(); + } + + pub fn acknowledge_notes_version( + &mut self, + channel_id: ChannelId, + epoch: u64, + version: &clock::Global, + cx: &mut ModelContext, + ) { + self.channel_states + .entry(channel_id) + .or_insert_with(|| Default::default()) + .acknowledge_notes_version(epoch, version); + cx.notify() + } + + pub fn update_latest_notes_version( + &mut self, + channel_id: ChannelId, + epoch: u64, + version: &clock::Global, + cx: &mut ModelContext, + ) { + self.channel_states + .entry(channel_id) + .or_insert_with(|| Default::default()) + .update_latest_notes_version(epoch, version); + cx.notify() + } + + pub fn open_channel_chat( + &mut self, + channel_id: ChannelId, + cx: &mut ModelContext, + ) -> Task>> { + let client = self.client.clone(); + let user_store = self.user_store.clone(); + let this = cx.handle(); + self.open_channel_resource( + channel_id, + |this| &mut this.opened_chats, + |channel, cx| ChannelChat::new(channel, this, user_store, client, cx), + cx, + ) + } + + /// Asynchronously open a given resource associated with a channel. + /// + /// Make sure that the resource is only opened once, even if this method + /// is called multiple times with the same channel id while the first task + /// is still running. + fn open_channel_resource( + &mut self, + channel_id: ChannelId, + get_map: fn(&mut Self) -> &mut HashMap>, + load: F, + cx: &mut ModelContext, + ) -> Task>> + where + F: 'static + FnOnce(Arc, AsyncAppContext) -> Fut, + Fut: Future>>, + T: 'static, + { + let task = loop { + match get_map(self).entry(channel_id) { + hash_map::Entry::Occupied(e) => match e.get() { + OpenedModelHandle::Open(model) => { + if let Some(model) = model.upgrade() { + break Task::ready(Ok(model)).shared(); + } else { + get_map(self).remove(&channel_id); + continue; + } + } + OpenedModelHandle::Loading(task) => { + break task.clone(); + } + }, + hash_map::Entry::Vacant(e) => { + let task = cx + .spawn(move |this, mut cx| async move { + let channel = this.update(&mut cx, |this, _| { + this.channel_for_id(channel_id).cloned().ok_or_else(|| { + Arc::new(anyhow!("no channel for id: {}", channel_id)) + }) + })??; + + load(channel, cx).await.map_err(Arc::new) + }) + .shared(); + + e.insert(OpenedModelHandle::Loading(task.clone())); + cx.spawn({ + let task = task.clone(); + move |this, mut cx| async move { + let result = task.await; + this.update(&mut cx, |this, _| match result { + Ok(model) => { + get_map(this).insert( + channel_id, + OpenedModelHandle::Open(model.downgrade()), + ); + } + Err(_) => { + get_map(this).remove(&channel_id); + } + }) + .ok(); + } + }) + .detach(); + break task; + } + } + }; + cx.background_executor() + .spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) }) + } + + pub fn is_channel_admin(&self, channel_id: ChannelId) -> bool { + self.channel_role(channel_id) == proto::ChannelRole::Admin + } + + pub fn is_root_channel(&self, channel_id: ChannelId) -> bool { + self.channel_index + .by_id() + .get(&channel_id) + .map_or(false, |channel| channel.is_root_channel()) + } + + pub fn is_public_channel(&self, channel_id: ChannelId) -> bool { + self.channel_index + .by_id() + .get(&channel_id) + .map_or(false, |channel| { + channel.visibility == ChannelVisibility::Public + }) + } + + pub fn channel_capability(&self, channel_id: ChannelId) -> Capability { + match self.channel_role(channel_id) { + ChannelRole::Admin | ChannelRole::Member => Capability::ReadWrite, + _ => Capability::ReadOnly, + } + } + + pub fn channel_role(&self, channel_id: ChannelId) -> proto::ChannelRole { + maybe!({ + let mut channel = self.channel_for_id(channel_id)?; + if !channel.is_root_channel() { + channel = self.channel_for_id(channel.root_id())?; + } + let root_channel_state = self.channel_states.get(&channel.id); + root_channel_state?.role + }) + .unwrap_or(proto::ChannelRole::Guest) + } + + pub fn channel_participants(&self, channel_id: ChannelId) -> &[Arc] { + self.channel_participants + .get(&channel_id) + .map_or(&[], |v| v.as_slice()) + } + + pub fn create_channel( + &self, + name: &str, + parent_id: Option, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + let name = name.trim_start_matches('#').to_owned(); + cx.spawn(move |this, mut cx| async move { + let response = client + .request(proto::CreateChannel { + name, + parent_id: parent_id.map(|cid| cid.0), + }) + .await?; + + let channel = response + .channel + .ok_or_else(|| anyhow!("missing channel in response"))?; + let channel_id = ChannelId(channel.id); + + this.update(&mut cx, |this, cx| { + let task = this.update_channels( + proto::UpdateChannels { + channels: vec![channel], + ..Default::default() + }, + cx, + ); + assert!(task.is_none()); + + // This event is emitted because the collab panel wants to clear the pending edit state + // before this frame is rendered. But we can't guarantee that the collab panel's future + // will resolve before this flush_effects finishes. Synchronously emitting this event + // ensures that the collab panel will observe this creation before the frame completes + cx.emit(ChannelEvent::ChannelCreated(channel_id)); + })?; + + Ok(channel_id) + }) + } + + pub fn move_channel( + &mut self, + channel_id: ChannelId, + to: ChannelId, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.spawn(move |_, _| async move { + let _ = client + .request(proto::MoveChannel { + channel_id: channel_id.0, + to: to.0, + }) + .await?; + + Ok(()) + }) + } + + pub fn set_channel_visibility( + &mut self, + channel_id: ChannelId, + visibility: ChannelVisibility, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.spawn(move |_, _| async move { + let _ = client + .request(proto::SetChannelVisibility { + channel_id: channel_id.0, + visibility: visibility.into(), + }) + .await?; + + Ok(()) + }) + } + + pub fn invite_member( + &mut self, + channel_id: ChannelId, + user_id: UserId, + role: proto::ChannelRole, + cx: &mut ModelContext, + ) -> Task> { + if !self.outgoing_invites.insert((channel_id, user_id)) { + return Task::ready(Err(anyhow!("invite request already in progress"))); + } + + cx.notify(); + let client = self.client.clone(); + cx.spawn(move |this, mut cx| async move { + let result = client + .request(proto::InviteChannelMember { + channel_id: channel_id.0, + user_id, + role: role.into(), + }) + .await; + + this.update(&mut cx, |this, cx| { + this.outgoing_invites.remove(&(channel_id, user_id)); + cx.notify(); + })?; + + result?; + + Ok(()) + }) + } + + pub fn remove_member( + &mut self, + channel_id: ChannelId, + user_id: u64, + cx: &mut ModelContext, + ) -> Task> { + if !self.outgoing_invites.insert((channel_id, user_id)) { + return Task::ready(Err(anyhow!("invite request already in progress"))); + } + + cx.notify(); + let client = self.client.clone(); + cx.spawn(move |this, mut cx| async move { + let result = client + .request(proto::RemoveChannelMember { + channel_id: channel_id.0, + user_id, + }) + .await; + + this.update(&mut cx, |this, cx| { + this.outgoing_invites.remove(&(channel_id, user_id)); + cx.notify(); + })?; + result?; + Ok(()) + }) + } + + pub fn set_member_role( + &mut self, + channel_id: ChannelId, + user_id: UserId, + role: proto::ChannelRole, + cx: &mut ModelContext, + ) -> Task> { + if !self.outgoing_invites.insert((channel_id, user_id)) { + return Task::ready(Err(anyhow!("member request already in progress"))); + } + + cx.notify(); + let client = self.client.clone(); + cx.spawn(move |this, mut cx| async move { + let result = client + .request(proto::SetChannelMemberRole { + channel_id: channel_id.0, + user_id, + role: role.into(), + }) + .await; + + this.update(&mut cx, |this, cx| { + this.outgoing_invites.remove(&(channel_id, user_id)); + cx.notify(); + })?; + + result?; + Ok(()) + }) + } + + pub fn rename( + &mut self, + channel_id: ChannelId, + new_name: &str, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + let name = new_name.to_string(); + cx.spawn(move |this, mut cx| async move { + let channel = client + .request(proto::RenameChannel { + channel_id: channel_id.0, + name, + }) + .await? + .channel + .ok_or_else(|| anyhow!("missing channel in response"))?; + this.update(&mut cx, |this, cx| { + let task = this.update_channels( + proto::UpdateChannels { + channels: vec![channel], + ..Default::default() + }, + cx, + ); + assert!(task.is_none()); + + // This event is emitted because the collab panel wants to clear the pending edit state + // before this frame is rendered. But we can't guarantee that the collab panel's future + // will resolve before this flush_effects finishes. Synchronously emitting this event + // ensures that the collab panel will observe this creation before the frame complete + cx.emit(ChannelEvent::ChannelRenamed(channel_id)) + })?; + Ok(()) + }) + } + + pub fn respond_to_channel_invite( + &mut self, + channel_id: ChannelId, + accept: bool, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::RespondToChannelInvite { + channel_id: channel_id.0, + accept, + }) + .await?; + Ok(()) + }) + } + pub fn fuzzy_search_members( + &self, + channel_id: ChannelId, + query: String, + limit: u16, + cx: &mut ModelContext, + ) -> Task>> { + let client = self.client.clone(); + let user_store = self.user_store.downgrade(); + cx.spawn(move |_, mut cx| async move { + let response = client + .request(proto::GetChannelMembers { + channel_id: channel_id.0, + query, + limit: limit as u64, + }) + .await?; + user_store.update(&mut cx, |user_store, _| { + user_store.insert(response.users); + response + .members + .into_iter() + .filter_map(|member| { + Some(ChannelMembership { + user: user_store.get_cached_user(member.user_id)?, + role: member.role(), + kind: member.kind(), + }) + }) + .collect() + }) + }) + } + + pub fn remove_channel(&self, channel_id: ChannelId) -> impl Future> { + let client = self.client.clone(); + async move { + client + .request(proto::DeleteChannel { + channel_id: channel_id.0, + }) + .await?; + Ok(()) + } + } + + pub fn has_pending_channel_invite_response(&self, _: &Arc) -> bool { + false + } + + pub fn has_pending_channel_invite(&self, channel_id: ChannelId, user_id: UserId) -> bool { + self.outgoing_invites.contains(&(channel_id, user_id)) + } + + async fn handle_update_channels( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, _| { + this.update_channels_tx + .unbounded_send(message.payload) + .unwrap(); + })?; + Ok(()) + } + + async fn handle_update_user_channels( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + for buffer_version in message.payload.observed_channel_buffer_version { + let version = language::proto::deserialize_version(&buffer_version.version); + this.acknowledge_notes_version( + ChannelId(buffer_version.channel_id), + buffer_version.epoch, + &version, + cx, + ); + } + for message_id in message.payload.observed_channel_message_id { + this.acknowledge_message_id( + ChannelId(message_id.channel_id), + message_id.message_id, + cx, + ); + } + for membership in message.payload.channel_memberships { + if let Some(role) = ChannelRole::from_i32(membership.role) { + this.channel_states + .entry(ChannelId(membership.channel_id)) + .or_insert_with(|| ChannelState::default()) + .set_role(role) + } + } + }) + } + + fn handle_connect(&mut self, cx: &mut ModelContext) -> Task> { + self.channel_index.clear(); + self.channel_invitations.clear(); + self.channel_participants.clear(); + self.channel_index.clear(); + self.outgoing_invites.clear(); + self.disconnect_channel_buffers_task.take(); + + for chat in self.opened_chats.values() { + if let OpenedModelHandle::Open(chat) = chat { + if let Some(chat) = chat.upgrade() { + chat.update(cx, |chat, cx| { + chat.rejoin(cx); + }); + } + } + } + + let mut buffer_versions = Vec::new(); + for buffer in self.opened_buffers.values() { + if let OpenedModelHandle::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade() { + let channel_buffer = buffer.read(cx); + let buffer = channel_buffer.buffer().read(cx); + buffer_versions.push(proto::ChannelBufferVersion { + channel_id: channel_buffer.channel_id.0, + epoch: channel_buffer.epoch(), + version: language::proto::serialize_version(&buffer.version()), + }); + } + } + } + + if buffer_versions.is_empty() { + return Task::ready(Ok(())); + } + + let response = self.client.request(proto::RejoinChannelBuffers { + buffers: buffer_versions, + }); + + cx.spawn(|this, mut cx| async move { + let mut response = response.await?; + + this.update(&mut cx, |this, cx| { + this.opened_buffers.retain(|_, buffer| match buffer { + OpenedModelHandle::Open(channel_buffer) => { + let Some(channel_buffer) = channel_buffer.upgrade() else { + return false; + }; + + channel_buffer.update(cx, |channel_buffer, cx| { + let channel_id = channel_buffer.channel_id; + if let Some(remote_buffer) = response + .buffers + .iter_mut() + .find(|buffer| buffer.channel_id == channel_id.0) + { + let channel_id = channel_buffer.channel_id; + let remote_version = + language::proto::deserialize_version(&remote_buffer.version); + + channel_buffer.replace_collaborators( + mem::take(&mut remote_buffer.collaborators), + cx, + ); + + let operations = channel_buffer + .buffer() + .update(cx, |buffer, cx| { + let outgoing_operations = + buffer.serialize_ops(Some(remote_version), cx); + let incoming_operations = + mem::take(&mut remote_buffer.operations) + .into_iter() + .map(language::proto::deserialize_operation) + .collect::>>()?; + buffer.apply_ops(incoming_operations, cx)?; + anyhow::Ok(outgoing_operations) + }) + .log_err(); + + if let Some(operations) = operations { + let client = this.client.clone(); + cx.background_executor() + .spawn(async move { + let operations = operations.await; + for chunk in + language::proto::split_operations(operations) + { + client + .send(proto::UpdateChannelBuffer { + channel_id: channel_id.0, + operations: chunk, + }) + .ok(); + } + }) + .detach(); + return true; + } + } + + channel_buffer.disconnect(cx); + false + }) + } + OpenedModelHandle::Loading(_) => true, + }); + }) + .ok(); + anyhow::Ok(()) + }) + } + + fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut ModelContext) { + cx.notify(); + + self.disconnect_channel_buffers_task.get_or_insert_with(|| { + cx.spawn(move |this, mut cx| async move { + if wait_for_reconnect { + cx.background_executor().timer(RECONNECT_TIMEOUT).await; + } + + if let Some(this) = this.upgrade() { + this.update(&mut cx, |this, cx| { + for (_, buffer) in this.opened_buffers.drain() { + if let OpenedModelHandle::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, |buffer, cx| buffer.disconnect(cx)); + } + } + } + }) + .ok(); + } + }) + }); + } + + pub(crate) fn update_channels( + &mut self, + payload: proto::UpdateChannels, + cx: &mut ModelContext, + ) -> Option>> { + if !payload.remove_channel_invitations.is_empty() { + self.channel_invitations + .retain(|channel| !payload.remove_channel_invitations.contains(&channel.id.0)); + } + for channel in payload.channel_invitations { + match self + .channel_invitations + .binary_search_by_key(&channel.id, |c| c.id.0) + { + Ok(ix) => { + Arc::make_mut(&mut self.channel_invitations[ix]).name = channel.name.into() + } + Err(ix) => self.channel_invitations.insert( + ix, + Arc::new(Channel { + id: ChannelId(channel.id), + visibility: channel.visibility(), + name: channel.name.into(), + parent_path: channel + .parent_path + .into_iter() + .map(|cid| ChannelId(cid)) + .collect(), + }), + ), + } + } + + let channels_changed = !payload.channels.is_empty() + || !payload.delete_channels.is_empty() + || !payload.latest_channel_message_ids.is_empty() + || !payload.latest_channel_buffer_versions.is_empty() + || !payload.hosted_projects.is_empty() + || !payload.deleted_hosted_projects.is_empty(); + + if channels_changed { + if !payload.delete_channels.is_empty() { + let delete_channels: Vec = payload + .delete_channels + .into_iter() + .map(|cid| ChannelId(cid)) + .collect(); + self.channel_index.delete_channels(&delete_channels); + self.channel_participants + .retain(|channel_id, _| !delete_channels.contains(&channel_id)); + + for channel_id in &delete_channels { + let channel_id = *channel_id; + if payload + .channels + .iter() + .any(|channel| channel.id == channel_id.0) + { + continue; + } + if let Some(OpenedModelHandle::Open(buffer)) = + self.opened_buffers.remove(&channel_id) + { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, ChannelBuffer::disconnect); + } + } + } + } + + let mut index = self.channel_index.bulk_insert(); + for channel in payload.channels { + let id = ChannelId(channel.id); + let channel_changed = index.insert(channel); + + if channel_changed { + if let Some(OpenedModelHandle::Open(buffer)) = self.opened_buffers.get(&id) { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, ChannelBuffer::channel_changed); + } + } + } + } + + for latest_buffer_version in payload.latest_channel_buffer_versions { + let version = language::proto::deserialize_version(&latest_buffer_version.version); + self.channel_states + .entry(ChannelId(latest_buffer_version.channel_id)) + .or_default() + .update_latest_notes_version(latest_buffer_version.epoch, &version) + } + + for latest_channel_message in payload.latest_channel_message_ids { + self.channel_states + .entry(ChannelId(latest_channel_message.channel_id)) + .or_default() + .update_latest_message_id(latest_channel_message.message_id); + } + + for hosted_project in payload.hosted_projects { + let hosted_project: HostedProject = hosted_project.into(); + if let Some(old_project) = self + .hosted_projects + .insert(hosted_project.project_id, hosted_project.clone()) + { + self.channel_states + .entry(old_project.channel_id) + .or_default() + .remove_hosted_project(old_project.project_id); + } + self.channel_states + .entry(hosted_project.channel_id) + .or_default() + .add_hosted_project(hosted_project.project_id); + } + + for hosted_project_id in payload.deleted_hosted_projects { + let hosted_project_id = ProjectId(hosted_project_id); + + if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) { + self.channel_states + .entry(old_project.channel_id) + .or_default() + .remove_hosted_project(old_project.project_id); + } + } + } + + cx.notify(); + if payload.channel_participants.is_empty() { + return None; + } + + let mut all_user_ids = Vec::new(); + let channel_participants = payload.channel_participants; + for entry in &channel_participants { + for user_id in entry.participant_user_ids.iter() { + if let Err(ix) = all_user_ids.binary_search(user_id) { + all_user_ids.insert(ix, *user_id); + } + } + } + + let users = self + .user_store + .update(cx, |user_store, cx| user_store.get_users(all_user_ids, cx)); + Some(cx.spawn(|this, mut cx| async move { + let users = users.await?; + + this.update(&mut cx, |this, cx| { + for entry in &channel_participants { + let mut participants: Vec<_> = entry + .participant_user_ids + .iter() + .filter_map(|user_id| { + users + .binary_search_by_key(&user_id, |user| &user.id) + .ok() + .map(|ix| users[ix].clone()) + }) + .collect(); + + participants.sort_by_key(|u| u.id); + + this.channel_participants + .insert(ChannelId(entry.channel_id), participants); + } + + cx.notify(); + }) + })) + } +} + +impl ChannelState { + fn set_role(&mut self, role: ChannelRole) { + self.role = Some(role); + } + + fn has_channel_buffer_changed(&self) -> bool { + self.latest_notes_version.epoch > self.observed_notes_version.epoch + || (self.latest_notes_version.epoch == self.observed_notes_version.epoch + && self + .latest_notes_version + .version + .changed_since(&self.observed_notes_version.version)) + } + + fn has_new_messages(&self) -> bool { + let latest_message_id = self.latest_chat_message; + let observed_message_id = self.observed_chat_message; + + latest_message_id.is_some_and(|latest_message_id| { + latest_message_id > observed_message_id.unwrap_or_default() + }) + } + + fn last_acknowledged_message_id(&self) -> Option { + self.observed_chat_message + } + + fn acknowledge_message_id(&mut self, message_id: u64) { + let observed = self.observed_chat_message.get_or_insert(message_id); + *observed = (*observed).max(message_id); + } + + fn update_latest_message_id(&mut self, message_id: u64) { + self.latest_chat_message = + Some(message_id.max(self.latest_chat_message.unwrap_or_default())); + } + + fn acknowledge_notes_version(&mut self, epoch: u64, version: &clock::Global) { + if self.observed_notes_version.epoch == epoch { + self.observed_notes_version.version.join(version); + } else { + self.observed_notes_version = NotesVersion { + epoch, + version: version.clone(), + }; + } + } + + fn update_latest_notes_version(&mut self, epoch: u64, version: &clock::Global) { + if self.latest_notes_version.epoch == epoch { + self.latest_notes_version.version.join(version); + } else { + self.latest_notes_version = NotesVersion { + epoch, + version: version.clone(), + }; + } + } + + fn add_hosted_project(&mut self, project_id: ProjectId) { + self.projects.insert(project_id); + } + + fn remove_hosted_project(&mut self, project_id: ProjectId) { + self.projects.remove(&project_id); + } +} diff --git a/crates/channel/src/channel_store/channel_index.rs b/crates/channel/src/channel_store/channel_index.rs new file mode 100644 index 0000000..02a8cd3 --- /dev/null +++ b/crates/channel/src/channel_store/channel_index.rs @@ -0,0 +1,116 @@ +use crate::Channel; +use client::ChannelId; +use collections::BTreeMap; +use rpc::proto; +use std::sync::Arc; + +#[derive(Default, Debug)] +pub struct ChannelIndex { + channels_ordered: Vec, + channels_by_id: BTreeMap>, +} + +impl ChannelIndex { + pub fn by_id(&self) -> &BTreeMap> { + &self.channels_by_id + } + + pub fn ordered_channels(&self) -> &[ChannelId] { + &self.channels_ordered + } + + pub fn clear(&mut self) { + self.channels_ordered.clear(); + self.channels_by_id.clear(); + } + + /// Delete the given channels from this index. + pub fn delete_channels(&mut self, channels: &[ChannelId]) { + self.channels_by_id + .retain(|channel_id, _| !channels.contains(channel_id)); + self.channels_ordered + .retain(|channel_id| !channels.contains(channel_id)); + } + + pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard { + ChannelPathsInsertGuard { + channels_ordered: &mut self.channels_ordered, + channels_by_id: &mut self.channels_by_id, + } + } +} + +/// A guard for ensuring that the paths index maintains its sort and uniqueness +/// invariants after a series of insertions +#[derive(Debug)] +pub struct ChannelPathsInsertGuard<'a> { + channels_ordered: &'a mut Vec, + channels_by_id: &'a mut BTreeMap>, +} + +impl<'a> ChannelPathsInsertGuard<'a> { + pub fn insert(&mut self, channel_proto: proto::Channel) -> bool { + let mut ret = false; + let parent_path = channel_proto + .parent_path + .iter() + .map(|cid| ChannelId(*cid)) + .collect(); + if let Some(existing_channel) = self.channels_by_id.get_mut(&ChannelId(channel_proto.id)) { + let existing_channel = Arc::make_mut(existing_channel); + + ret = existing_channel.visibility != channel_proto.visibility() + || existing_channel.name != channel_proto.name + || existing_channel.parent_path != parent_path; + + existing_channel.visibility = channel_proto.visibility(); + existing_channel.name = channel_proto.name.into(); + existing_channel.parent_path = parent_path; + } else { + self.channels_by_id.insert( + ChannelId(channel_proto.id), + Arc::new(Channel { + id: ChannelId(channel_proto.id), + visibility: channel_proto.visibility(), + name: channel_proto.name.into(), + parent_path, + }), + ); + self.insert_root(ChannelId(channel_proto.id)); + } + ret + } + + fn insert_root(&mut self, channel_id: ChannelId) { + self.channels_ordered.push(channel_id); + } +} + +impl<'a> Drop for ChannelPathsInsertGuard<'a> { + fn drop(&mut self) { + self.channels_ordered.sort_by(|a, b| { + let a = channel_path_sorting_key(*a, self.channels_by_id); + let b = channel_path_sorting_key(*b, self.channels_by_id); + a.cmp(b) + }); + self.channels_ordered.dedup(); + } +} + +fn channel_path_sorting_key( + id: ChannelId, + channels_by_id: &BTreeMap>, +) -> impl Iterator { + let (parent_path, name) = channels_by_id + .get(&id) + .map_or((&[] as &[_], None), |channel| { + ( + channel.parent_path.as_slice(), + Some((channel.name.as_ref(), channel.id)), + ) + }); + parent_path + .iter() + .filter_map(|id| Some((channels_by_id.get(id)?.name.as_ref(), *id))) + .chain(name) +} diff --git a/crates/channel/src/channel_store_tests.rs b/crates/channel/src/channel_store_tests.rs new file mode 100644 index 0000000..566a4eb --- /dev/null +++ b/crates/channel/src/channel_store_tests.rs @@ -0,0 +1,379 @@ +use crate::channel_chat::ChannelChatEvent; + +use super::*; +use client::{test::FakeServer, Client, UserStore}; +use clock::FakeSystemClock; +use gpui::{AppContext, Context, Model, TestAppContext}; +use http::FakeHttpClient; +use rpc::proto::{self}; +use settings::SettingsStore; + +#[gpui::test] +fn test_update_channels(cx: &mut AppContext) { + let channel_store = init_test(cx); + + update_channels( + &channel_store, + proto::UpdateChannels { + channels: vec![ + proto::Channel { + id: 1, + name: "b".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: Vec::new(), + }, + proto::Channel { + id: 2, + name: "a".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: Vec::new(), + }, + ], + ..Default::default() + }, + cx, + ); + assert_channels( + &channel_store, + &[ + // + (0, "a".to_string()), + (0, "b".to_string()), + ], + cx, + ); + + update_channels( + &channel_store, + proto::UpdateChannels { + channels: vec![ + proto::Channel { + id: 3, + name: "x".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: vec![1], + }, + proto::Channel { + id: 4, + name: "y".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: vec![2], + }, + ], + ..Default::default() + }, + cx, + ); + assert_channels( + &channel_store, + &[ + (0, "a".to_string()), + (1, "y".to_string()), + (0, "b".to_string()), + (1, "x".to_string()), + ], + cx, + ); +} + +#[gpui::test] +fn test_dangling_channel_paths(cx: &mut AppContext) { + let channel_store = init_test(cx); + + update_channels( + &channel_store, + proto::UpdateChannels { + channels: vec![ + proto::Channel { + id: 0, + name: "a".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: vec![], + }, + proto::Channel { + id: 1, + name: "b".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: vec![0], + }, + proto::Channel { + id: 2, + name: "c".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: vec![0, 1], + }, + ], + ..Default::default() + }, + cx, + ); + // Sanity check + assert_channels( + &channel_store, + &[ + // + (0, "a".to_string()), + (1, "b".to_string()), + (2, "c".to_string()), + ], + cx, + ); + + update_channels( + &channel_store, + proto::UpdateChannels { + delete_channels: vec![1, 2], + ..Default::default() + }, + cx, + ); + + // Make sure that the 1/2/3 path is gone + assert_channels(&channel_store, &[(0, "a".to_string())], cx); +} + +#[gpui::test] +async fn test_channel_messages(cx: &mut TestAppContext) { + let user_id = 5; + let channel_id = 5; + let channel_store = cx.update(init_test); + let client = channel_store.update(cx, |s, _| s.client()); + let server = FakeServer::for_client(user_id, &client, cx).await; + + // Get the available channels. + server.send(proto::UpdateChannels { + channels: vec![proto::Channel { + id: channel_id, + name: "the-channel".to_string(), + visibility: proto::ChannelVisibility::Members as i32, + parent_path: vec![], + }], + ..Default::default() + }); + cx.executor().run_until_parked(); + cx.update(|cx| { + assert_channels(&channel_store, &[(0, "the-channel".to_string())], cx); + }); + + let get_users = server.receive::().await.unwrap(); + assert_eq!(get_users.payload.user_ids, vec![5]); + server.respond( + get_users.receipt(), + proto::UsersResponse { + users: vec![proto::User { + id: 5, + github_login: "nathansobo".into(), + avatar_url: "http://avatar.com/nathansobo".into(), + }], + }, + ); + + // Join a channel and populate its existing messages. + let channel = channel_store.update(cx, |store, cx| { + let channel_id = store.ordered_channels().next().unwrap().1.id; + store.open_channel_chat(channel_id, cx) + }); + let join_channel = server.receive::().await.unwrap(); + server.respond( + join_channel.receipt(), + proto::JoinChannelChatResponse { + messages: vec![ + proto::ChannelMessage { + id: 10, + body: "a".into(), + timestamp: 1000, + sender_id: 5, + mentions: vec![], + nonce: Some(1.into()), + reply_to_message_id: None, + edited_at: None, + }, + proto::ChannelMessage { + id: 11, + body: "b".into(), + timestamp: 1001, + sender_id: 6, + mentions: vec![], + nonce: Some(2.into()), + reply_to_message_id: None, + edited_at: None, + }, + ], + done: false, + }, + ); + + cx.executor().start_waiting(); + + // Client requests all users for the received messages + let mut get_users = server.receive::().await.unwrap(); + get_users.payload.user_ids.sort(); + assert_eq!(get_users.payload.user_ids, vec![6]); + server.respond( + get_users.receipt(), + proto::UsersResponse { + users: vec![proto::User { + id: 6, + github_login: "maxbrunsfeld".into(), + avatar_url: "http://avatar.com/maxbrunsfeld".into(), + }], + }, + ); + + let channel = channel.await.unwrap(); + channel.update(cx, |channel, _| { + assert_eq!( + channel + .messages_in_range(0..2) + .map(|message| (message.sender.github_login.clone(), message.body.clone())) + .collect::>(), + &[ + ("nathansobo".into(), "a".into()), + ("maxbrunsfeld".into(), "b".into()) + ] + ); + }); + + // Receive a new message. + server.send(proto::ChannelMessageSent { + channel_id, + message: Some(proto::ChannelMessage { + id: 12, + body: "c".into(), + timestamp: 1002, + sender_id: 7, + mentions: vec![], + nonce: Some(3.into()), + reply_to_message_id: None, + edited_at: None, + }), + }); + + // Client requests user for message since they haven't seen them yet + let get_users = server.receive::().await.unwrap(); + assert_eq!(get_users.payload.user_ids, vec![7]); + server.respond( + get_users.receipt(), + proto::UsersResponse { + users: vec![proto::User { + id: 7, + github_login: "as-cii".into(), + avatar_url: "http://avatar.com/as-cii".into(), + }], + }, + ); + + assert_eq!( + channel.next_event(cx).await, + ChannelChatEvent::MessagesUpdated { + old_range: 2..2, + new_count: 1, + } + ); + channel.update(cx, |channel, _| { + assert_eq!( + channel + .messages_in_range(2..3) + .map(|message| (message.sender.github_login.clone(), message.body.clone())) + .collect::>(), + &[("as-cii".into(), "c".into())] + ) + }); + + // Scroll up to view older messages. + channel.update(cx, |channel, cx| { + channel.load_more_messages(cx).unwrap().detach(); + }); + let get_messages = server.receive::().await.unwrap(); + assert_eq!(get_messages.payload.channel_id, 5); + assert_eq!(get_messages.payload.before_message_id, 10); + server.respond( + get_messages.receipt(), + proto::GetChannelMessagesResponse { + done: true, + messages: vec![ + proto::ChannelMessage { + id: 8, + body: "y".into(), + timestamp: 998, + sender_id: 5, + nonce: Some(4.into()), + mentions: vec![], + reply_to_message_id: None, + edited_at: None, + }, + proto::ChannelMessage { + id: 9, + body: "z".into(), + timestamp: 999, + sender_id: 6, + nonce: Some(5.into()), + mentions: vec![], + reply_to_message_id: None, + edited_at: None, + }, + ], + }, + ); + + assert_eq!( + channel.next_event(cx).await, + ChannelChatEvent::MessagesUpdated { + old_range: 0..0, + new_count: 2, + } + ); + channel.update(cx, |channel, _| { + assert_eq!( + channel + .messages_in_range(0..2) + .map(|message| (message.sender.github_login.clone(), message.body.clone())) + .collect::>(), + &[ + ("nathansobo".into(), "y".into()), + ("maxbrunsfeld".into(), "z".into()) + ] + ); + }); +} + +fn init_test(cx: &mut AppContext) -> Model { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + release_channel::init("0.0.0", cx); + client::init_settings(cx); + + let clock = Arc::new(FakeSystemClock::default()); + let http = FakeHttpClient::with_404_response(); + let client = Client::new(clock, http.clone(), cx); + let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); + + client::init(&client, cx); + crate::init(&client, user_store, cx); + + ChannelStore::global(cx) +} + +fn update_channels( + channel_store: &Model, + message: proto::UpdateChannels, + cx: &mut AppContext, +) { + let task = channel_store.update(cx, |store, cx| store.update_channels(message, cx)); + assert!(task.is_none()); +} + +#[track_caller] +fn assert_channels( + channel_store: &Model, + expected_channels: &[(usize, String)], + cx: &mut AppContext, +) { + let actual = channel_store.update(cx, |store, _| { + store + .ordered_channels() + .map(|(depth, channel)| (depth, channel.name.to_string())) + .collect::>() + }); + assert_eq!(actual, expected_channels); +} diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml new file mode 100644 index 0000000..199b6bb --- /dev/null +++ b/crates/cli/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "cli" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/cli.rs" +doctest = false + +[[bin]] +name = "cli" +path = "src/main.rs" + +[dependencies] +anyhow.workspace = true +clap.workspace = true +libc.workspace = true +ipc-channel = "0.18" +once_cell.workspace = true +release_channel.workspace = true +serde.workspace = true +util.workspace = true + +[target.'cfg(target_os = "linux")'.dependencies] +exec.workspace = true +fork.workspace = true + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation.workspace = true +core-services = "0.2" +plist = "1.3" diff --git a/crates/cli/LICENSE-GPL b/crates/cli/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/cli/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs new file mode 100644 index 0000000..8b5faaf --- /dev/null +++ b/crates/cli/src/cli.rs @@ -0,0 +1,30 @@ +pub use ipc_channel::ipc; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct IpcHandshake { + pub requests: ipc::IpcSender, + pub responses: ipc::IpcReceiver, +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum CliRequest { + Open { + paths: Vec, + wait: bool, + open_new_workspace: Option, + dev_server_token: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum CliResponse { + Ping, + Stdout { message: String }, + Stderr { message: String }, + Exit { status: i32 }, +} + +/// When Zed started not as an *.app but as a binary (e.g. local development), +/// there's a possibility to tell it to behave "regularly". +pub const FORCE_CLI_MODE_ENV_VAR_NAME: &str = "ZED_FORCE_CLI_MODE"; diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs new file mode 100644 index 0000000..c1752a4 --- /dev/null +++ b/crates/cli/src/main.rs @@ -0,0 +1,527 @@ +#![cfg_attr(any(target_os = "linux", target_os = "windows"), allow(dead_code))] + +use anyhow::{Context, Result}; +use clap::Parser; +use cli::{ipc::IpcOneShotServer, CliRequest, CliResponse, IpcHandshake}; +use std::{ + env, fs, io, + path::{Path, PathBuf}, + process::ExitStatus, + thread::{self, JoinHandle}, +}; +use util::paths::PathLikeWithPosition; + +struct Detect; + +trait InstalledApp { + fn zed_version_string(&self) -> String; + fn launch(&self, ipc_url: String) -> anyhow::Result<()>; + fn run_foreground(&self, ipc_url: String) -> io::Result; +} + +#[derive(Parser, Debug)] +#[command(name = "zed", disable_version_flag = true)] +struct Args { + /// Wait for all of the given paths to be opened/closed before exiting. + #[arg(short, long)] + wait: bool, + /// Add files to the currently open workspace + #[arg(short, long, overrides_with = "new")] + add: bool, + /// Create a new workspace + #[arg(short, long, overrides_with = "add")] + new: bool, + /// A sequence of space-separated paths that you want to open. + /// + /// Use `path:line:row` syntax to open a file at a specific location. + /// Non-existing paths and directories will ignore `:line:row` suffix. + #[arg(value_parser = parse_path_with_position)] + paths_with_position: Vec>, + /// Print Zed's version and the app path. + #[arg(short, long)] + version: bool, + /// Run zed in the foreground (useful for debugging) + #[arg(long)] + foreground: bool, + /// Custom path to Zed.app or the zed binary + #[arg(long)] + zed: Option, + /// Run zed in dev-server mode + #[arg(long)] + dev_server_token: Option, +} + +fn parse_path_with_position( + argument_str: &str, +) -> Result, std::convert::Infallible> { + PathLikeWithPosition::parse_str(argument_str, |path_str| { + Ok(Path::new(path_str).to_path_buf()) + }) +} + +fn main() -> Result<()> { + // Intercept version designators + #[cfg(target_os = "macos")] + if let Some(channel) = std::env::args().nth(1).filter(|arg| arg.starts_with("--")) { + // When the first argument is a name of a release channel, we're gonna spawn off a cli of that version, with trailing args passed along. + use std::str::FromStr as _; + + if let Ok(channel) = release_channel::ReleaseChannel::from_str(&channel[2..]) { + return mac_os::spawn_channel_cli(channel, std::env::args().skip(2).collect()); + } + } + let args = Args::parse(); + + let app = Detect::detect(args.zed.as_deref()).context("Bundle detection")?; + + if args.version { + println!("{}", app.zed_version_string()); + return Ok(()); + } + + let curdir = env::current_dir()?; + let mut paths = vec![]; + for path in args.paths_with_position { + let canonicalized = path.map_path_like(|path| match fs::canonicalize(&path) { + Ok(path) => Ok(path), + Err(e) => { + if let Some(mut parent) = path.parent() { + if parent == Path::new("") { + parent = &curdir; + } + match fs::canonicalize(parent) { + Ok(parent) => Ok(parent.join(path.file_name().unwrap())), + Err(_) => Err(e), + } + } else { + Err(e) + } + } + })?; + paths.push(canonicalized.to_string(|path| path.display().to_string())) + } + + let (server, server_name) = + IpcOneShotServer::::new().context("Handshake before Zed spawn")?; + let url = format!("zed-cli://{server_name}"); + + let open_new_workspace = if args.new { + Some(true) + } else if args.add { + Some(false) + } else { + None + }; + + let sender: JoinHandle> = thread::spawn(move || { + let (_, handshake) = server.accept().context("Handshake after Zed spawn")?; + let (tx, rx) = (handshake.requests, handshake.responses); + tx.send(CliRequest::Open { + paths, + wait: args.wait, + open_new_workspace, + dev_server_token: args.dev_server_token, + })?; + + while let Ok(response) = rx.recv() { + match response { + CliResponse::Ping => {} + CliResponse::Stdout { message } => println!("{message}"), + CliResponse::Stderr { message } => eprintln!("{message}"), + CliResponse::Exit { status } => std::process::exit(status), + } + } + + Ok(()) + }); + + if args.foreground { + app.run_foreground(url)?; + } else { + app.launch(url)?; + sender.join().unwrap()?; + } + + Ok(()) +} + +#[cfg(target_os = "linux")] +mod linux { + use std::{ + env, + ffi::OsString, + io, + os::{ + linux::net::SocketAddrExt, + unix::net::{SocketAddr, UnixDatagram}, + }, + path::{Path, PathBuf}, + process::{self, ExitStatus}, + thread, + time::Duration, + }; + + use anyhow::anyhow; + use cli::FORCE_CLI_MODE_ENV_VAR_NAME; + use fork::Fork; + use once_cell::sync::Lazy; + + use crate::{Detect, InstalledApp}; + + static RELEASE_CHANNEL: Lazy = + Lazy::new(|| include_str!("../../zed/RELEASE_CHANNEL").trim().to_string()); + + struct App(PathBuf); + + impl Detect { + pub fn detect(path: Option<&Path>) -> anyhow::Result { + let path = if let Some(path) = path { + path.to_path_buf().canonicalize() + } else { + let cli = env::current_exe()?; + let dir = cli + .parent() + .ok_or_else(|| anyhow!("no parent path for cli"))?; + + match dir.join("zed").canonicalize() { + Ok(path) => Ok(path), + // development builds have Zed capitalized + Err(e) => match dir.join("Zed").canonicalize() { + Ok(path) => Ok(path), + Err(_) => Err(e), + }, + } + }?; + + Ok(App(path)) + } + } + + impl InstalledApp for App { + fn zed_version_string(&self) -> String { + format!( + "Zed {}{} – {}", + if *RELEASE_CHANNEL == "stable" { + "".to_string() + } else { + format!(" {} ", *RELEASE_CHANNEL) + }, + option_env!("RELEASE_VERSION").unwrap_or_default(), + self.0.display(), + ) + } + + fn launch(&self, ipc_url: String) -> anyhow::Result<()> { + let uid: u32 = unsafe { libc::getuid() }; + let sock_addr = + SocketAddr::from_abstract_name(format!("zed-{}-{}", *RELEASE_CHANNEL, uid))?; + + let sock = UnixDatagram::unbound()?; + if sock.connect_addr(&sock_addr).is_err() { + self.boot_background(ipc_url)?; + } else { + sock.send(ipc_url.as_bytes())?; + } + Ok(()) + } + + fn run_foreground(&self, ipc_url: String) -> io::Result { + std::process::Command::new(self.0.clone()) + .arg(ipc_url) + .status() + } + } + + impl App { + fn boot_background(&self, ipc_url: String) -> anyhow::Result<()> { + let path = &self.0; + + match fork::fork() { + Ok(Fork::Parent(_)) => Ok(()), + Ok(Fork::Child) => { + std::env::set_var(FORCE_CLI_MODE_ENV_VAR_NAME, ""); + if let Err(_) = fork::setsid() { + eprintln!("failed to setsid: {}", std::io::Error::last_os_error()); + process::exit(1); + } + if std::env::var("ZED_KEEP_FD").is_err() { + if let Err(_) = fork::close_fd() { + eprintln!("failed to close_fd: {}", std::io::Error::last_os_error()); + } + } + let error = + exec::execvp(path.clone(), &[path.as_os_str(), &OsString::from(ipc_url)]); + // if exec succeeded, we never get here. + eprintln!("failed to exec {:?}: {}", path, error); + process::exit(1) + } + Err(_) => Err(anyhow!(io::Error::last_os_error())), + } + } + + fn wait_for_socket( + &self, + sock_addr: &SocketAddr, + sock: &mut UnixDatagram, + ) -> Result<(), std::io::Error> { + for _ in 0..100 { + thread::sleep(Duration::from_millis(10)); + if sock.connect_addr(&sock_addr).is_ok() { + return Ok(()); + } + } + sock.connect_addr(&sock_addr) + } + } +} + +// todo("windows") +#[cfg(target_os = "windows")] +mod windows { + use crate::{Detect, InstalledApp}; + use std::io; + use std::path::Path; + use std::process::ExitStatus; + + struct App; + impl InstalledApp for App { + fn zed_version_string(&self) -> String { + unimplemented!() + } + fn launch(&self, _ipc_url: String) -> anyhow::Result<()> { + unimplemented!() + } + fn run_foreground(&self, _ipc_url: String) -> io::Result { + unimplemented!() + } + } + + impl Detect { + pub fn detect(_path: Option<&Path>) -> anyhow::Result { + Ok(App) + } + } +} + +#[cfg(target_os = "macos")] +mod mac_os { + use anyhow::{anyhow, Context, Result}; + use core_foundation::{ + array::{CFArray, CFIndex}, + string::kCFStringEncodingUTF8, + url::{CFURLCreateWithBytes, CFURL}, + }; + use core_services::{kLSLaunchDefaults, LSLaunchURLSpec, LSOpenFromURLSpec, TCFType}; + use serde::Deserialize; + use std::{ + ffi::OsStr, + fs, io, + path::{Path, PathBuf}, + process::{Command, ExitStatus}, + ptr, + }; + + use cli::FORCE_CLI_MODE_ENV_VAR_NAME; + + use crate::{Detect, InstalledApp}; + + #[derive(Debug, Deserialize)] + struct InfoPlist { + #[serde(rename = "CFBundleShortVersionString")] + bundle_short_version_string: String, + } + + enum Bundle { + App { + app_bundle: PathBuf, + plist: InfoPlist, + }, + LocalPath { + executable: PathBuf, + plist: InfoPlist, + }, + } + + fn locate_bundle() -> Result { + let cli_path = std::env::current_exe()?.canonicalize()?; + let mut app_path = cli_path.clone(); + while app_path.extension() != Some(OsStr::new("app")) { + if !app_path.pop() { + return Err(anyhow!("cannot find app bundle containing {:?}", cli_path)); + } + } + Ok(app_path) + } + + impl Detect { + pub fn detect(path: Option<&Path>) -> anyhow::Result { + let bundle_path = if let Some(bundle_path) = path { + bundle_path + .canonicalize() + .with_context(|| format!("Args bundle path {bundle_path:?} canonicalization"))? + } else { + locate_bundle().context("bundle autodiscovery")? + }; + + match bundle_path.extension().and_then(|ext| ext.to_str()) { + Some("app") => { + let plist_path = bundle_path.join("Contents/Info.plist"); + let plist = + plist::from_file::<_, InfoPlist>(&plist_path).with_context(|| { + format!("Reading *.app bundle plist file at {plist_path:?}") + })?; + Ok(Bundle::App { + app_bundle: bundle_path, + plist, + }) + } + _ => { + println!("Bundle path {bundle_path:?} has no *.app extension, attempting to locate a dev build"); + let plist_path = bundle_path + .parent() + .with_context(|| format!("Bundle path {bundle_path:?} has no parent"))? + .join("WebRTC.framework/Resources/Info.plist"); + let plist = + plist::from_file::<_, InfoPlist>(&plist_path).with_context(|| { + format!("Reading dev bundle plist file at {plist_path:?}") + })?; + Ok(Bundle::LocalPath { + executable: bundle_path, + plist, + }) + } + } + } + } + + impl InstalledApp for Bundle { + fn zed_version_string(&self) -> String { + let is_dev = matches!(self, Self::LocalPath { .. }); + format!( + "Zed {}{} – {}", + self.plist().bundle_short_version_string, + if is_dev { " (dev)" } else { "" }, + self.path().display(), + ) + } + + fn launch(&self, url: String) -> anyhow::Result<()> { + match self { + Self::App { app_bundle, .. } => { + let app_path = app_bundle; + + let status = unsafe { + let app_url = CFURL::from_path(app_path, true) + .with_context(|| format!("invalid app path {app_path:?}"))?; + let url_to_open = CFURL::wrap_under_create_rule(CFURLCreateWithBytes( + ptr::null(), + url.as_ptr(), + url.len() as CFIndex, + kCFStringEncodingUTF8, + ptr::null(), + )); + // equivalent to: open zed-cli:... -a /Applications/Zed\ Preview.app + let urls_to_open = + CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]); + LSOpenFromURLSpec( + &LSLaunchURLSpec { + appURL: app_url.as_concrete_TypeRef(), + itemURLs: urls_to_open.as_concrete_TypeRef(), + passThruParams: ptr::null(), + launchFlags: kLSLaunchDefaults, + asyncRefCon: ptr::null_mut(), + }, + ptr::null_mut(), + ) + }; + + anyhow::ensure!( + status == 0, + "cannot start app bundle {}", + self.zed_version_string() + ); + } + + Self::LocalPath { executable, .. } => { + let executable_parent = executable + .parent() + .with_context(|| format!("Executable {executable:?} path has no parent"))?; + let subprocess_stdout_file = fs::File::create( + executable_parent.join("zed_dev.log"), + ) + .with_context(|| format!("Log file creation in {executable_parent:?}"))?; + let subprocess_stdin_file = + subprocess_stdout_file.try_clone().with_context(|| { + format!("Cloning descriptor for file {subprocess_stdout_file:?}") + })?; + let mut command = std::process::Command::new(executable); + let command = command + .env(FORCE_CLI_MODE_ENV_VAR_NAME, "") + .stderr(subprocess_stdout_file) + .stdout(subprocess_stdin_file) + .arg(url); + + command + .spawn() + .with_context(|| format!("Spawning {command:?}"))?; + } + } + + Ok(()) + } + + fn run_foreground(&self, ipc_url: String) -> io::Result { + let path = match self { + Bundle::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed"), + Bundle::LocalPath { executable, .. } => executable.clone(), + }; + + std::process::Command::new(path).arg(ipc_url).status() + } + } + + impl Bundle { + fn plist(&self) -> &InfoPlist { + match self { + Self::App { plist, .. } => plist, + Self::LocalPath { plist, .. } => plist, + } + } + + fn path(&self) -> &Path { + match self { + Self::App { app_bundle, .. } => app_bundle, + Self::LocalPath { executable, .. } => executable, + } + } + } + + pub(super) fn spawn_channel_cli( + channel: release_channel::ReleaseChannel, + leftover_args: Vec, + ) -> Result<()> { + use anyhow::bail; + + let app_id_prompt = format!("id of app \"{}\"", channel.display_name()); + let app_id_output = Command::new("osascript") + .arg("-e") + .arg(&app_id_prompt) + .output()?; + if !app_id_output.status.success() { + bail!("Could not determine app id for {}", channel.display_name()); + } + let app_name = String::from_utf8(app_id_output.stdout)?.trim().to_owned(); + let app_path_prompt = format!("kMDItemCFBundleIdentifier == '{app_name}'"); + let app_path_output = Command::new("mdfind").arg(app_path_prompt).output()?; + if !app_path_output.status.success() { + bail!( + "Could not determine app path for {}", + channel.display_name() + ); + } + let app_path = String::from_utf8(app_path_output.stdout)?.trim().to_owned(); + let cli_path = format!("{app_path}/Contents/MacOS/cli"); + Command::new(cli_path).args(leftover_args).spawn()?; + Ok(()) + } +} diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml new file mode 100644 index 0000000..b502c2d --- /dev/null +++ b/crates/client/Cargo.toml @@ -0,0 +1,68 @@ +[package] +name = "client" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/client.rs" +doctest = false + +[features] +test-support = ["clock/test-support", "collections/test-support", "gpui/test-support", "rpc/test-support"] + +[dependencies] +anyhow.workspace = true +async-recursion = "0.3" +async-tungstenite = { version = "0.16", features = ["async-std", "async-native-tls"] } +async-native-tls = { version = "0.5.0", features = ["vendored"] } +chrono = { workspace = true, features = ["serde"] } +clock.workspace = true +collections.workspace = true +feature_flags.workspace = true +futures.workspace = true +gpui.workspace = true +http.workspace = true +lazy_static.workspace = true +log.workspace = true +once_cell.workspace = true +parking_lot.workspace = true +postage.workspace = true +rand.workspace = true +release_channel.workspace = true +rpc.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +sha2.workspace = true +smol.workspace = true +sysinfo.workspace = true +telemetry_events.workspace = true +tempfile.workspace = true +text.workspace = true +thiserror.workspace = true +time.workspace = true +tiny_http = "0.8" +url.workspace = true +util.workspace = true + +[dev-dependencies] +clock = { workspace = true, features = ["test-support"] } +collections = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +rpc = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } + +[target.'cfg(target_os = "linux")'.dependencies] +async-native-tls = {"version" = "0.5.0", features = ["vendored"]} +# This is an indirect dependency of async-tungstenite that is included +# here so we can vendor libssl with the feature flag. +[package.metadata.cargo-machete] +ignored = ["async-native-tls"] diff --git a/crates/client/LICENSE-GPL b/crates/client/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs new file mode 100644 index 0000000..c9e61a6 --- /dev/null +++ b/crates/client/src/client.rs @@ -0,0 +1,2014 @@ +#[cfg(any(test, feature = "test-support"))] +pub mod test; + +pub mod telemetry; +pub mod user; + +use anyhow::{anyhow, Context as _, Result}; +use async_recursion::async_recursion; +use async_tungstenite::tungstenite::{ + error::Error as WebsocketError, + http::{Request, StatusCode}, +}; +use clock::SystemClock; +use collections::HashMap; +use futures::{ + channel::oneshot, future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt, + TryFutureExt as _, TryStreamExt, +}; +use gpui::{ + actions, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Global, Model, Task, WeakModel, +}; +use http::{HttpClient, HttpClientWithUrl}; +use lazy_static::lazy_static; +use parking_lot::RwLock; +use postage::watch; +use rand::prelude::*; +use release_channel::{AppVersion, ReleaseChannel}; +use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; +use std::fmt; +use std::pin::Pin; +use std::{ + any::TypeId, + convert::TryFrom, + fmt::Write as _, + future::Future, + marker::PhantomData, + path::PathBuf, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, Weak, + }, + time::{Duration, Instant}, +}; +use telemetry::Telemetry; +use thiserror::Error; +use url::Url; +use util::{ResultExt, TryFutureExt}; + +pub use rpc::*; +pub use telemetry_events::Event; +pub use user::*; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct DevServerToken(pub String); + +impl fmt::Display for DevServerToken { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +lazy_static! { + static ref ZED_SERVER_URL: Option = std::env::var("ZED_SERVER_URL").ok(); + static ref ZED_RPC_URL: Option = std::env::var("ZED_RPC_URL").ok(); + /// An environment variable whose presence indicates that the development auth + /// provider should be used. + /// + /// Only works in development. Setting this environment variable in other release + /// channels is a no-op. + pub static ref ZED_DEVELOPMENT_AUTH: bool = + std::env::var("ZED_DEVELOPMENT_AUTH").map_or(false, |value| !value.is_empty()); + pub static ref IMPERSONATE_LOGIN: Option = std::env::var("ZED_IMPERSONATE") + .ok() + .and_then(|s| if s.is_empty() { None } else { Some(s) }); + pub static ref ADMIN_API_TOKEN: Option = std::env::var("ZED_ADMIN_API_TOKEN") + .ok() + .and_then(|s| if s.is_empty() { None } else { Some(s) }); + pub static ref ZED_APP_PATH: Option = + std::env::var("ZED_APP_PATH").ok().map(PathBuf::from); + pub static ref ZED_ALWAYS_ACTIVE: bool = + std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| !e.is_empty()); +} + +pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(100); +pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20); + +actions!(client, [SignIn, SignOut, Reconnect]); + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct ClientSettingsContent { + server_url: Option, +} + +#[derive(Deserialize)] +pub struct ClientSettings { + pub server_url: String, +} + +impl Settings for ClientSettings { + const KEY: Option<&'static str> = None; + + type FileContent = ClientSettingsContent; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + let mut result = sources.json_merge::()?; + if let Some(server_url) = &*ZED_SERVER_URL { + result.server_url.clone_from(&server_url) + } + Ok(result) + } +} + +#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] +pub struct ProxySettingsContent { + proxy: Option, +} + +#[derive(Deserialize, Default)] +pub struct ProxySettings { + pub proxy: Option, +} + +impl Settings for ProxySettings { + const KEY: Option<&'static str> = None; + + type FileContent = ProxySettingsContent; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + Ok(Self { + proxy: sources + .user + .and_then(|value| value.proxy.clone()) + .or(sources.default.proxy.clone()), + }) + } +} + +pub fn init_settings(cx: &mut AppContext) { + TelemetrySettings::register(cx); + ClientSettings::register(cx); + ProxySettings::register(cx); +} + +pub fn init(client: &Arc, cx: &mut AppContext) { + let client = Arc::downgrade(client); + cx.on_action({ + let client = client.clone(); + move |_: &SignIn, cx| { + if let Some(client) = client.upgrade() { + cx.spawn( + |cx| async move { client.authenticate_and_connect(true, &cx).log_err().await }, + ) + .detach(); + } + } + }); + + cx.on_action({ + let client = client.clone(); + move |_: &SignOut, cx| { + if let Some(client) = client.upgrade() { + cx.spawn(|cx| async move { + client.sign_out(&cx).await; + }) + .detach(); + } + } + }); + + cx.on_action({ + let client = client.clone(); + move |_: &Reconnect, cx| { + if let Some(client) = client.upgrade() { + cx.spawn(|cx| async move { + client.reconnect(&cx); + }) + .detach(); + } + } + }); +} + +struct GlobalClient(Arc); + +impl Global for GlobalClient {} + +pub struct Client { + id: AtomicU64, + peer: Arc, + http: Arc, + telemetry: Arc, + credentials_provider: Arc, + state: RwLock, + + #[allow(clippy::type_complexity)] + #[cfg(any(test, feature = "test-support"))] + authenticate: RwLock< + Option Task>>>, + >, + + #[allow(clippy::type_complexity)] + #[cfg(any(test, feature = "test-support"))] + establish_connection: RwLock< + Option< + Box< + dyn 'static + + Send + + Sync + + Fn( + &Credentials, + &AsyncAppContext, + ) -> Task>, + >, + >, + >, +} + +#[derive(Error, Debug)] +pub enum EstablishConnectionError { + #[error("upgrade required")] + UpgradeRequired, + #[error("unauthorized")] + Unauthorized, + #[error("{0}")] + Other(#[from] anyhow::Error), + #[error("{0}")] + Http(#[from] http::Error), + #[error("{0}")] + Io(#[from] std::io::Error), + #[error("{0}")] + Websocket(#[from] async_tungstenite::tungstenite::http::Error), +} + +impl From for EstablishConnectionError { + fn from(error: WebsocketError) -> Self { + if let WebsocketError::Http(response) = &error { + match response.status() { + StatusCode::UNAUTHORIZED => return EstablishConnectionError::Unauthorized, + StatusCode::UPGRADE_REQUIRED => return EstablishConnectionError::UpgradeRequired, + _ => {} + } + } + EstablishConnectionError::Other(error.into()) + } +} + +impl EstablishConnectionError { + pub fn other(error: impl Into + Send + Sync) -> Self { + Self::Other(error.into()) + } +} + +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum Status { + SignedOut, + UpgradeRequired, + Authenticating, + Connecting, + ConnectionError, + Connected { + peer_id: PeerId, + connection_id: ConnectionId, + }, + ConnectionLost, + Reauthenticating, + Reconnecting, + ReconnectionError { + next_reconnection: Instant, + }, +} + +impl Status { + pub fn is_connected(&self) -> bool { + matches!(self, Self::Connected { .. }) + } + + pub fn is_signed_out(&self) -> bool { + matches!(self, Self::SignedOut | Self::UpgradeRequired) + } +} + +struct ClientState { + credentials: Option, + status: (watch::Sender, watch::Receiver), + entity_id_extractors: HashMap u64>, + _reconnect_task: Option>, + reconnect_interval: Duration, + entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>, + models_by_message_type: HashMap, + entity_types_by_message_type: HashMap, + #[allow(clippy::type_complexity)] + message_handlers: HashMap< + TypeId, + Arc< + dyn Send + + Sync + + Fn( + AnyModel, + Box, + &Arc, + AsyncAppContext, + ) -> LocalBoxFuture<'static, Result<()>>, + >, + >, +} + +enum WeakSubscriber { + Entity { handle: AnyWeakModel }, + Pending(Vec>), +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum Credentials { + DevServer { token: DevServerToken }, + User { user_id: u64, access_token: String }, +} + +impl Credentials { + pub fn authorization_header(&self) -> String { + match self { + Credentials::DevServer { token } => format!("dev-server-token {}", token), + Credentials::User { + user_id, + access_token, + } => format!("{} {}", user_id, access_token), + } + } +} + +/// A provider for [`Credentials`]. +/// +/// Used to abstract over reading and writing credentials to some form of +/// persistence (like the system keychain). +trait CredentialsProvider { + /// Reads the credentials from the provider. + fn read_credentials<'a>( + &'a self, + cx: &'a AsyncAppContext, + ) -> Pin> + 'a>>; + + /// Writes the credentials to the provider. + fn write_credentials<'a>( + &'a self, + user_id: u64, + access_token: String, + cx: &'a AsyncAppContext, + ) -> Pin> + 'a>>; + + /// Deletes the credentials from the provider. + fn delete_credentials<'a>( + &'a self, + cx: &'a AsyncAppContext, + ) -> Pin> + 'a>>; +} + +impl Default for ClientState { + fn default() -> Self { + Self { + credentials: None, + status: watch::channel_with(Status::SignedOut), + entity_id_extractors: Default::default(), + _reconnect_task: None, + reconnect_interval: Duration::from_secs(5), + models_by_message_type: Default::default(), + entities_by_type_and_remote_id: Default::default(), + entity_types_by_message_type: Default::default(), + message_handlers: Default::default(), + } + } +} + +pub enum Subscription { + Entity { + client: Weak, + id: (TypeId, u64), + }, + Message { + client: Weak, + id: TypeId, + }, +} + +impl Drop for Subscription { + fn drop(&mut self) { + match self { + Subscription::Entity { client, id } => { + if let Some(client) = client.upgrade() { + let mut state = client.state.write(); + let _ = state.entities_by_type_and_remote_id.remove(id); + } + } + Subscription::Message { client, id } => { + if let Some(client) = client.upgrade() { + let mut state = client.state.write(); + let _ = state.entity_types_by_message_type.remove(id); + let _ = state.message_handlers.remove(id); + } + } + } + } +} + +pub struct PendingEntitySubscription { + client: Arc, + remote_id: u64, + _entity_type: PhantomData, + consumed: bool, +} + +impl PendingEntitySubscription { + pub fn set_model(mut self, model: &Model, cx: &mut AsyncAppContext) -> Subscription { + self.consumed = true; + let mut state = self.client.state.write(); + let id = (TypeId::of::(), self.remote_id); + let Some(WeakSubscriber::Pending(messages)) = + state.entities_by_type_and_remote_id.remove(&id) + else { + unreachable!() + }; + + state.entities_by_type_and_remote_id.insert( + id, + WeakSubscriber::Entity { + handle: model.downgrade().into(), + }, + ); + drop(state); + for message in messages { + self.client.handle_message(message, cx); + } + Subscription::Entity { + client: Arc::downgrade(&self.client), + id, + } + } +} + +impl Drop for PendingEntitySubscription { + fn drop(&mut self) { + if !self.consumed { + let mut state = self.client.state.write(); + if let Some(WeakSubscriber::Pending(messages)) = state + .entities_by_type_and_remote_id + .remove(&(TypeId::of::(), self.remote_id)) + { + for message in messages { + log::info!("unhandled message {}", message.payload_type_name()); + } + } + } + } +} + +#[derive(Copy, Clone)] +pub struct TelemetrySettings { + pub diagnostics: bool, + pub metrics: bool, +} + +/// Control what info is collected by Zed. +#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] +pub struct TelemetrySettingsContent { + /// Send debug info like crash reports. + /// + /// Default: true + pub diagnostics: Option, + /// Send anonymized usage data like what languages you're using Zed with. + /// + /// Default: true + pub metrics: Option, +} + +impl settings::Settings for TelemetrySettings { + const KEY: Option<&'static str> = Some("telemetry"); + + type FileContent = TelemetrySettingsContent; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + Ok(Self { + diagnostics: sources.user.as_ref().and_then(|v| v.diagnostics).unwrap_or( + sources + .default + .diagnostics + .ok_or_else(Self::missing_default)?, + ), + metrics: sources + .user + .as_ref() + .and_then(|v| v.metrics) + .unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?), + }) + } +} + +impl Client { + pub fn new( + clock: Arc, + http: Arc, + cx: &mut AppContext, + ) -> Arc { + let use_zed_development_auth = match ReleaseChannel::try_global(cx) { + Some(ReleaseChannel::Dev) => *ZED_DEVELOPMENT_AUTH, + Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) + | None => false, + }; + + let credentials_provider: Arc = + if use_zed_development_auth { + Arc::new(DevelopmentCredentialsProvider { + path: util::paths::CONFIG_DIR.join("development_auth"), + }) + } else { + Arc::new(KeychainCredentialsProvider) + }; + + Arc::new(Self { + id: AtomicU64::new(0), + peer: Peer::new(0), + telemetry: Telemetry::new(clock, http.clone(), cx), + http, + credentials_provider, + state: Default::default(), + + #[cfg(any(test, feature = "test-support"))] + authenticate: Default::default(), + #[cfg(any(test, feature = "test-support"))] + establish_connection: Default::default(), + }) + } + + pub fn production(cx: &mut AppContext) -> Arc { + let clock = Arc::new(clock::RealSystemClock); + let http = Arc::new(HttpClientWithUrl::new( + &ClientSettings::get_global(cx).server_url, + ProxySettings::get_global(cx).proxy.clone(), + )); + Self::new(clock, http.clone(), cx) + } + + pub fn id(&self) -> u64 { + self.id.load(Ordering::SeqCst) + } + + pub fn http_client(&self) -> Arc { + self.http.clone() + } + + pub fn set_id(&self, id: u64) -> &Self { + self.id.store(id, Ordering::SeqCst); + self + } + + #[cfg(any(test, feature = "test-support"))] + pub fn teardown(&self) { + let mut state = self.state.write(); + state._reconnect_task.take(); + state.message_handlers.clear(); + state.models_by_message_type.clear(); + state.entities_by_type_and_remote_id.clear(); + state.entity_id_extractors.clear(); + self.peer.teardown(); + } + + #[cfg(any(test, feature = "test-support"))] + pub fn override_authenticate(&self, authenticate: F) -> &Self + where + F: 'static + Send + Sync + Fn(&AsyncAppContext) -> Task>, + { + *self.authenticate.write() = Some(Box::new(authenticate)); + self + } + + #[cfg(any(test, feature = "test-support"))] + pub fn override_establish_connection(&self, connect: F) -> &Self + where + F: 'static + + Send + + Sync + + Fn(&Credentials, &AsyncAppContext) -> Task>, + { + *self.establish_connection.write() = Some(Box::new(connect)); + self + } + + pub fn global(cx: &AppContext) -> Arc { + cx.global::().0.clone() + } + pub fn set_global(client: Arc, cx: &mut AppContext) { + cx.set_global(GlobalClient(client)) + } + + pub fn user_id(&self) -> Option { + if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() { + Some(*user_id) + } else { + None + } + } + + pub fn peer_id(&self) -> Option { + if let Status::Connected { peer_id, .. } = &*self.status().borrow() { + Some(*peer_id) + } else { + None + } + } + + pub fn status(&self) -> watch::Receiver { + self.state.read().status.1.clone() + } + + fn set_status(self: &Arc, status: Status, cx: &AsyncAppContext) { + log::info!("set status on client {}: {:?}", self.id(), status); + let mut state = self.state.write(); + *state.status.0.borrow_mut() = status; + + match status { + Status::Connected { .. } => { + state._reconnect_task = None; + } + Status::ConnectionLost => { + let this = self.clone(); + let reconnect_interval = state.reconnect_interval; + state._reconnect_task = Some(cx.spawn(move |cx| async move { + #[cfg(any(test, feature = "test-support"))] + let mut rng = StdRng::seed_from_u64(0); + #[cfg(not(any(test, feature = "test-support")))] + let mut rng = StdRng::from_entropy(); + + let mut delay = INITIAL_RECONNECTION_DELAY; + while let Err(error) = this.authenticate_and_connect(true, &cx).await { + log::error!("failed to connect {}", error); + if matches!(*this.status().borrow(), Status::ConnectionError) { + this.set_status( + Status::ReconnectionError { + next_reconnection: Instant::now() + delay, + }, + &cx, + ); + cx.background_executor().timer(delay).await; + delay = delay + .mul_f32(rng.gen_range(1.0..=2.0)) + .min(reconnect_interval); + } else { + break; + } + } + })); + } + Status::SignedOut | Status::UpgradeRequired => { + self.telemetry.set_authenticated_user_info(None, false); + state._reconnect_task.take(); + } + _ => {} + } + } + + pub fn subscribe_to_entity( + self: &Arc, + remote_id: u64, + ) -> Result> + where + T: 'static, + { + let id = (TypeId::of::(), remote_id); + + let mut state = self.state.write(); + if state.entities_by_type_and_remote_id.contains_key(&id) { + return Err(anyhow!("already subscribed to entity")); + } + + state + .entities_by_type_and_remote_id + .insert(id, WeakSubscriber::Pending(Default::default())); + + Ok(PendingEntitySubscription { + client: self.clone(), + remote_id, + consumed: false, + _entity_type: PhantomData, + }) + } + + #[track_caller] + pub fn add_message_handler( + self: &Arc, + entity: WeakModel, + handler: H, + ) -> Subscription + where + M: EnvelopedMessage, + E: 'static, + H: 'static + + Sync + + Fn(Model, TypedEnvelope, Arc, AsyncAppContext) -> F + + Send + + Sync, + F: 'static + Future>, + { + let message_type_id = TypeId::of::(); + let mut state = self.state.write(); + state + .models_by_message_type + .insert(message_type_id, entity.into()); + + let prev_handler = state.message_handlers.insert( + message_type_id, + Arc::new(move |subscriber, envelope, client, cx| { + let subscriber = subscriber.downcast::().unwrap(); + let envelope = envelope.into_any().downcast::>().unwrap(); + handler(subscriber, *envelope, client.clone(), cx).boxed_local() + }), + ); + if prev_handler.is_some() { + let location = std::panic::Location::caller(); + panic!( + "{}:{} registered handler for the same message {} twice", + location.file(), + location.line(), + std::any::type_name::() + ); + } + + Subscription::Message { + client: Arc::downgrade(self), + id: message_type_id, + } + } + + pub fn add_request_handler( + self: &Arc, + model: WeakModel, + handler: H, + ) -> Subscription + where + M: RequestMessage, + E: 'static, + H: 'static + + Sync + + Fn(Model, TypedEnvelope, Arc, AsyncAppContext) -> F + + Send + + Sync, + F: 'static + Future>, + { + self.add_message_handler(model, move |handle, envelope, this, cx| { + Self::respond_to_request( + envelope.receipt(), + handler(handle, envelope, this.clone(), cx), + this, + ) + }) + } + + pub fn add_model_message_handler(self: &Arc, handler: H) + where + M: EntityMessage, + E: 'static, + H: 'static + Fn(Model, TypedEnvelope, Arc, AsyncAppContext) -> F + Send + Sync, + F: 'static + Future>, + { + self.add_entity_message_handler::(move |subscriber, message, client, cx| { + handler(subscriber.downcast::().unwrap(), message, client, cx) + }) + } + + fn add_entity_message_handler(self: &Arc, handler: H) + where + M: EntityMessage, + E: 'static, + H: 'static + Fn(AnyModel, TypedEnvelope, Arc, AsyncAppContext) -> F + Send + Sync, + F: 'static + Future>, + { + let model_type_id = TypeId::of::(); + let message_type_id = TypeId::of::(); + + let mut state = self.state.write(); + state + .entity_types_by_message_type + .insert(message_type_id, model_type_id); + state + .entity_id_extractors + .entry(message_type_id) + .or_insert_with(|| { + |envelope| { + envelope + .as_any() + .downcast_ref::>() + .unwrap() + .payload + .remote_entity_id() + } + }); + let prev_handler = state.message_handlers.insert( + message_type_id, + Arc::new(move |handle, envelope, client, cx| { + let envelope = envelope.into_any().downcast::>().unwrap(); + handler(handle, *envelope, client.clone(), cx).boxed_local() + }), + ); + if prev_handler.is_some() { + panic!("registered handler for the same message twice"); + } + } + + pub fn add_model_request_handler(self: &Arc, handler: H) + where + M: EntityMessage + RequestMessage, + E: 'static, + H: 'static + Fn(Model, TypedEnvelope, Arc, AsyncAppContext) -> F + Send + Sync, + F: 'static + Future>, + { + self.add_model_message_handler(move |entity, envelope, client, cx| { + Self::respond_to_request::( + envelope.receipt(), + handler(entity, envelope, client.clone(), cx), + client, + ) + }) + } + + async fn respond_to_request>>( + receipt: Receipt, + response: F, + client: Arc, + ) -> Result<()> { + match response.await { + Ok(response) => { + client.respond(receipt, response)?; + Ok(()) + } + Err(error) => { + client.respond_with_error(receipt, error.to_proto())?; + Err(error) + } + } + } + + pub async fn has_credentials(&self, cx: &AsyncAppContext) -> bool { + self.credentials_provider + .read_credentials(cx) + .await + .is_some() + } + + pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self { + self.state.write().credentials = Some(Credentials::DevServer { token }); + self + } + + #[async_recursion(?Send)] + pub async fn authenticate_and_connect( + self: &Arc, + try_provider: bool, + cx: &AsyncAppContext, + ) -> anyhow::Result<()> { + let was_disconnected = match *self.status().borrow() { + Status::SignedOut => true, + Status::ConnectionError + | Status::ConnectionLost + | Status::Authenticating { .. } + | Status::Reauthenticating { .. } + | Status::ReconnectionError { .. } => false, + Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => { + return Ok(()) + } + Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?, + }; + if was_disconnected { + self.set_status(Status::Authenticating, cx); + } else { + self.set_status(Status::Reauthenticating, cx) + } + + let mut read_from_provider = false; + let mut credentials = self.state.read().credentials.clone(); + if credentials.is_none() && try_provider { + credentials = self.credentials_provider.read_credentials(cx).await; + read_from_provider = credentials.is_some(); + } + + if credentials.is_none() { + let mut status_rx = self.status(); + let _ = status_rx.next().await; + futures::select_biased! { + authenticate = self.authenticate(cx).fuse() => { + match authenticate { + Ok(creds) => credentials = Some(creds), + Err(err) => { + self.set_status(Status::ConnectionError, cx); + return Err(err); + } + } + } + _ = status_rx.next().fuse() => { + return Err(anyhow!("authentication canceled")); + } + } + } + let credentials = credentials.unwrap(); + if let Credentials::User { user_id, .. } = &credentials { + self.set_id(*user_id); + } + + if was_disconnected { + self.set_status(Status::Connecting, cx); + } else { + self.set_status(Status::Reconnecting, cx); + } + + let mut timeout = + futures::FutureExt::fuse(cx.background_executor().timer(CONNECTION_TIMEOUT)); + futures::select_biased! { + connection = self.establish_connection(&credentials, cx).fuse() => { + match connection { + Ok(conn) => { + self.state.write().credentials = Some(credentials.clone()); + if !read_from_provider && IMPERSONATE_LOGIN.is_none() { + if let Credentials::User{user_id, access_token} = credentials { + self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err(); + } + } + + futures::select_biased! { + result = self.set_connection(conn, cx).fuse() => result, + _ = timeout => { + self.set_status(Status::ConnectionError, cx); + Err(anyhow!("timed out waiting on hello message from server")) + } + } + } + Err(EstablishConnectionError::Unauthorized) => { + self.state.write().credentials.take(); + if read_from_provider { + self.credentials_provider.delete_credentials(cx).await.log_err(); + self.set_status(Status::SignedOut, cx); + self.authenticate_and_connect(false, cx).await + } else { + self.set_status(Status::ConnectionError, cx); + Err(EstablishConnectionError::Unauthorized)? + } + } + Err(EstablishConnectionError::UpgradeRequired) => { + self.set_status(Status::UpgradeRequired, cx); + Err(EstablishConnectionError::UpgradeRequired)? + } + Err(error) => { + self.set_status(Status::ConnectionError, cx); + Err(error)? + } + } + } + _ = &mut timeout => { + self.set_status(Status::ConnectionError, cx); + Err(anyhow!("timed out trying to establish connection")) + } + } + } + + async fn set_connection( + self: &Arc, + conn: Connection, + cx: &AsyncAppContext, + ) -> Result<()> { + let executor = cx.background_executor(); + log::info!("add connection to peer"); + let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn, { + let executor = executor.clone(); + move |duration| executor.timer(duration) + }); + let handle_io = executor.spawn(handle_io); + + let peer_id = async { + log::info!("waiting for server hello"); + let message = incoming + .next() + .await + .ok_or_else(|| anyhow!("no hello message received"))?; + log::info!("got server hello"); + let hello_message_type_name = message.payload_type_name().to_string(); + let hello = message + .into_any() + .downcast::>() + .map_err(|_| { + anyhow!( + "invalid hello message received: {:?}", + hello_message_type_name + ) + })?; + let peer_id = hello + .payload + .peer_id + .ok_or_else(|| anyhow!("invalid peer id"))?; + Ok(peer_id) + }; + + let peer_id = match peer_id.await { + Ok(peer_id) => peer_id, + Err(error) => { + self.peer.disconnect(connection_id); + return Err(error); + } + }; + + log::info!( + "set status to connected (connection id: {:?}, peer id: {:?})", + connection_id, + peer_id + ); + self.set_status( + Status::Connected { + peer_id, + connection_id, + }, + cx, + ); + + cx.spawn({ + let this = self.clone(); + |cx| { + async move { + while let Some(message) = incoming.next().await { + this.handle_message(message, &cx); + // Don't starve the main thread when receiving lots of messages at once. + smol::future::yield_now().await; + } + } + } + }) + .detach(); + + cx.spawn({ + let this = self.clone(); + move |cx| async move { + match handle_io.await { + Ok(()) => { + if *this.status().borrow() + == (Status::Connected { + connection_id, + peer_id, + }) + { + this.set_status(Status::SignedOut, &cx); + } + } + Err(err) => { + log::error!("connection error: {:?}", err); + this.set_status(Status::ConnectionLost, &cx); + } + } + } + }) + .detach(); + + Ok(()) + } + + fn authenticate(self: &Arc, cx: &AsyncAppContext) -> Task> { + #[cfg(any(test, feature = "test-support"))] + if let Some(callback) = self.authenticate.read().as_ref() { + return callback(cx); + } + + self.authenticate_with_browser(cx) + } + + fn establish_connection( + self: &Arc, + credentials: &Credentials, + cx: &AsyncAppContext, + ) -> Task> { + #[cfg(any(test, feature = "test-support"))] + if let Some(callback) = self.establish_connection.read().as_ref() { + return callback(credentials, cx); + } + + self.establish_websocket_connection(credentials, cx) + } + + async fn get_rpc_url( + http: Arc, + release_channel: Option, + ) -> Result { + if let Some(url) = &*ZED_RPC_URL { + return Url::parse(url).context("invalid rpc url"); + } + + let mut url = http.build_url("/rpc"); + if let Some(preview_param) = + release_channel.and_then(|channel| channel.release_query_param()) + { + url += "?"; + url += preview_param; + } + let response = http.get(&url, Default::default(), false).await?; + let collab_url = if response.status().is_redirection() { + response + .headers() + .get("Location") + .ok_or_else(|| anyhow!("missing location header in /rpc response"))? + .to_str() + .map_err(EstablishConnectionError::other)? + .to_string() + } else { + Err(anyhow!( + "unexpected /rpc response status {}", + response.status() + ))? + }; + + Url::parse(&collab_url).context("invalid rpc url") + } + + fn establish_websocket_connection( + self: &Arc, + credentials: &Credentials, + cx: &AsyncAppContext, + ) -> Task> { + let release_channel = cx + .update(|cx| ReleaseChannel::try_global(cx)) + .ok() + .flatten(); + let app_version = cx + .update(|cx| AppVersion::global(cx).to_string()) + .ok() + .unwrap_or_default(); + + let request = Request::builder() + .header("Authorization", credentials.authorization_header()) + .header("x-zed-protocol-version", rpc::PROTOCOL_VERSION) + .header("x-zed-app-version", app_version) + .header( + "x-zed-release-channel", + release_channel.map(|r| r.dev_name()).unwrap_or("unknown"), + ); + + let http = self.http.clone(); + cx.background_executor().spawn(async move { + let mut rpc_url = Self::get_rpc_url(http, release_channel).await?; + let rpc_host = rpc_url + .host_str() + .zip(rpc_url.port_or_known_default()) + .ok_or_else(|| anyhow!("missing host in rpc url"))?; + let stream = smol::net::TcpStream::connect(rpc_host).await?; + + log::info!("connected to rpc endpoint {}", rpc_url); + + match rpc_url.scheme() { + "https" => { + rpc_url.set_scheme("wss").unwrap(); + let request = request.uri(rpc_url.as_str()).body(())?; + let (stream, _) = + async_tungstenite::async_std::client_async_tls(request, stream).await?; + Ok(Connection::new( + stream + .map_err(|error| anyhow!(error)) + .sink_map_err(|error| anyhow!(error)), + )) + } + "http" => { + rpc_url.set_scheme("ws").unwrap(); + let request = request.uri(rpc_url.as_str()).body(())?; + let (stream, _) = async_tungstenite::client_async(request, stream).await?; + Ok(Connection::new( + stream + .map_err(|error| anyhow!(error)) + .sink_map_err(|error| anyhow!(error)), + )) + } + _ => Err(anyhow!("invalid rpc url: {}", rpc_url))?, + } + }) + } + + pub fn authenticate_with_browser( + self: &Arc, + cx: &AsyncAppContext, + ) -> Task> { + let http = self.http.clone(); + cx.spawn(|cx| async move { + let background = cx.background_executor().clone(); + + let (open_url_tx, open_url_rx) = oneshot::channel::(); + cx.update(|cx| { + cx.spawn(move |cx| async move { + let url = open_url_rx.await?; + cx.update(|cx| cx.open_url(&url)) + }) + .detach_and_log_err(cx); + }) + .log_err(); + + let credentials = background + .clone() + .spawn(async move { + // Generate a pair of asymmetric encryption keys. The public key will be used by the + // zed server to encrypt the user's access token, so that it can'be intercepted by + // any other app running on the user's device. + let (public_key, private_key) = + rpc::auth::keypair().expect("failed to generate keypair for auth"); + let public_key_string = String::try_from(public_key) + .expect("failed to serialize public key for auth"); + + if let Some((login, token)) = + IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref()) + { + eprintln!("authenticate as admin {login}, {token}"); + + return Self::authenticate_as_admin(http, login.clone(), token.clone()) + .await; + } + + // Start an HTTP server to receive the redirect from Zed's sign-in page. + let server = + tiny_http::Server::http("127.0.0.1:0").expect("failed to find open port"); + let port = server.server_addr().port(); + + // Open the Zed sign-in page in the user's browser, with query parameters that indicate + // that the user is signing in from a Zed app running on the same device. + let mut url = http.build_url(&format!( + "/native_app_signin?native_app_port={}&native_app_public_key={}", + port, public_key_string + )); + + if let Some(impersonate_login) = IMPERSONATE_LOGIN.as_ref() { + log::info!("impersonating user @{}", impersonate_login); + write!(&mut url, "&impersonate={}", impersonate_login).unwrap(); + } + + open_url_tx.send(url).log_err(); + + // Receive the HTTP request from the user's browser. Retrieve the user id and encrypted + // access token from the query params. + // + // TODO - Avoid ever starting more than one HTTP server. Maybe switch to using a + // custom URL scheme instead of this local HTTP server. + let (user_id, access_token) = background + .spawn(async move { + for _ in 0..100 { + if let Some(req) = server.recv_timeout(Duration::from_secs(1))? { + let path = req.url(); + let mut user_id = None; + let mut access_token = None; + let url = Url::parse(&format!("http://example.com{}", path)) + .context("failed to parse login notification url")?; + for (key, value) in url.query_pairs() { + if key == "access_token" { + access_token = Some(value.to_string()); + } else if key == "user_id" { + user_id = Some(value.to_string()); + } + } + + let post_auth_url = + http.build_url("/native_app_signin_succeeded"); + req.respond( + tiny_http::Response::empty(302).with_header( + tiny_http::Header::from_bytes( + &b"Location"[..], + post_auth_url.as_bytes(), + ) + .unwrap(), + ), + ) + .context("failed to respond to login http request")?; + return Ok(( + user_id + .ok_or_else(|| anyhow!("missing user_id parameter"))?, + access_token.ok_or_else(|| { + anyhow!("missing access_token parameter") + })?, + )); + } + } + + Err(anyhow!("didn't receive login redirect")) + }) + .await?; + + let access_token = private_key + .decrypt_string(&access_token) + .context("failed to decrypt access token")?; + + Ok(Credentials::User { + user_id: user_id.parse()?, + access_token, + }) + }) + .await?; + + cx.update(|cx| cx.activate(true))?; + Ok(credentials) + }) + } + + async fn authenticate_as_admin( + http: Arc, + login: String, + mut api_token: String, + ) -> Result { + #[derive(Deserialize)] + struct AuthenticatedUserResponse { + user: User, + } + + #[derive(Deserialize)] + struct User { + id: u64, + } + + // Use the collab server's admin API to retrieve the id + // of the impersonated user. + let mut url = Self::get_rpc_url(http.clone(), None).await?; + url.set_path("/user"); + url.set_query(Some(&format!("github_login={login}"))); + let request = Request::get(url.as_str()) + .header("Authorization", format!("token {api_token}")) + .body("".into())?; + + let mut response = http.send(request).await?; + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + if !response.status().is_success() { + Err(anyhow!( + "admin user request failed {} - {}", + response.status().as_u16(), + body, + ))?; + } + let response: AuthenticatedUserResponse = serde_json::from_str(&body)?; + + // Use the admin API token to authenticate as the impersonated user. + api_token.insert_str(0, "ADMIN_TOKEN:"); + Ok(Credentials::User { + user_id: response.user.id, + access_token: api_token, + }) + } + + pub async fn sign_out(self: &Arc, cx: &AsyncAppContext) { + self.state.write().credentials = None; + self.disconnect(&cx); + + if self.has_credentials(cx).await { + self.credentials_provider + .delete_credentials(cx) + .await + .log_err(); + } + } + + pub fn disconnect(self: &Arc, cx: &AsyncAppContext) { + self.peer.teardown(); + self.set_status(Status::SignedOut, cx); + } + + pub fn reconnect(self: &Arc, cx: &AsyncAppContext) { + self.peer.teardown(); + self.set_status(Status::ConnectionLost, cx); + } + + fn connection_id(&self) -> Result { + if let Status::Connected { connection_id, .. } = *self.status().borrow() { + Ok(connection_id) + } else { + Err(anyhow!("not connected")) + } + } + + pub fn send(&self, message: T) -> Result<()> { + log::debug!("rpc send. client_id:{}, name:{}", self.id(), T::NAME); + self.peer.send(self.connection_id()?, message) + } + + pub fn request( + &self, + request: T, + ) -> impl Future> { + self.request_envelope(request) + .map_ok(|envelope| envelope.payload) + } + + pub fn request_stream( + &self, + request: T, + ) -> impl Future>>> { + let client_id = self.id.load(Ordering::SeqCst); + log::debug!( + "rpc request start. client_id:{}. name:{}", + client_id, + T::NAME + ); + let response = self + .connection_id() + .map(|conn_id| self.peer.request_stream(conn_id, request)); + async move { + let response = response?.await; + log::debug!( + "rpc request finish. client_id:{}. name:{}", + client_id, + T::NAME + ); + response + } + } + + pub fn request_envelope( + &self, + request: T, + ) -> impl Future>> { + let client_id = self.id(); + log::debug!( + "rpc request start. client_id:{}. name:{}", + client_id, + T::NAME + ); + let response = self + .connection_id() + .map(|conn_id| self.peer.request_envelope(conn_id, request)); + async move { + let response = response?.await; + log::debug!( + "rpc request finish. client_id:{}. name:{}", + client_id, + T::NAME + ); + response + } + } + + fn respond(&self, receipt: Receipt, response: T::Response) -> Result<()> { + log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME); + self.peer.respond(receipt, response) + } + + fn respond_with_error( + &self, + receipt: Receipt, + error: proto::Error, + ) -> Result<()> { + log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME); + self.peer.respond_with_error(receipt, error) + } + + fn handle_message( + self: &Arc, + message: Box, + cx: &AsyncAppContext, + ) { + let mut state = self.state.write(); + let type_name = message.payload_type_name(); + let payload_type_id = message.payload_type_id(); + let sender_id = message.original_sender_id(); + + let mut subscriber = None; + + if let Some(handle) = state + .models_by_message_type + .get(&payload_type_id) + .and_then(|handle| handle.upgrade()) + { + subscriber = Some(handle); + } else if let Some((extract_entity_id, entity_type_id)) = + state.entity_id_extractors.get(&payload_type_id).zip( + state + .entity_types_by_message_type + .get(&payload_type_id) + .copied(), + ) + { + let entity_id = (extract_entity_id)(message.as_ref()); + + match state + .entities_by_type_and_remote_id + .get_mut(&(entity_type_id, entity_id)) + { + Some(WeakSubscriber::Pending(pending)) => { + pending.push(message); + return; + } + Some(weak_subscriber) => match weak_subscriber { + WeakSubscriber::Entity { handle } => { + subscriber = handle.upgrade(); + } + + WeakSubscriber::Pending(_) => {} + }, + _ => {} + } + } + + let subscriber = if let Some(subscriber) = subscriber { + subscriber + } else { + log::info!("unhandled message {}", type_name); + self.peer.respond_with_unhandled_message(message).log_err(); + return; + }; + + let handler = state.message_handlers.get(&payload_type_id).cloned(); + // Dropping the state prevents deadlocks if the handler interacts with rpc::Client. + // It also ensures we don't hold the lock while yielding back to the executor, as + // that might cause the executor thread driving this future to block indefinitely. + drop(state); + + if let Some(handler) = handler { + let future = handler(subscriber, message, self, cx.clone()); + let client_id = self.id(); + log::debug!( + "rpc message received. client_id:{}, sender_id:{:?}, type:{}", + client_id, + sender_id, + type_name + ); + cx.spawn(move |_| async move { + match future.await { + Ok(()) => { + log::debug!( + "rpc message handled. client_id:{}, sender_id:{:?}, type:{}", + client_id, + sender_id, + type_name + ); + } + Err(error) => { + log::error!( + "error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}", + client_id, + sender_id, + type_name, + error + ); + } + } + }) + .detach(); + } else { + log::info!("unhandled message {}", type_name); + self.peer.respond_with_unhandled_message(message).log_err(); + } + } + + pub fn telemetry(&self) -> &Arc { + &self.telemetry + } +} + +#[derive(Serialize, Deserialize)] +struct DevelopmentCredentials { + user_id: u64, + access_token: String, +} + +/// A credentials provider that stores credentials in a local file. +/// +/// This MUST only be used in development, as this is not a secure way of storing +/// credentials on user machines. +/// +/// Its existence is purely to work around the annoyance of having to constantly +/// re-allow access to the system keychain when developing Zed. +struct DevelopmentCredentialsProvider { + path: PathBuf, +} + +impl CredentialsProvider for DevelopmentCredentialsProvider { + fn read_credentials<'a>( + &'a self, + _cx: &'a AsyncAppContext, + ) -> Pin> + 'a>> { + async move { + if IMPERSONATE_LOGIN.is_some() { + return None; + } + + let json = std::fs::read(&self.path).log_err()?; + + let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?; + + Some(Credentials::User { + user_id: credentials.user_id, + access_token: credentials.access_token, + }) + } + .boxed_local() + } + + fn write_credentials<'a>( + &'a self, + user_id: u64, + access_token: String, + _cx: &'a AsyncAppContext, + ) -> Pin> + 'a>> { + async move { + let json = serde_json::to_string(&DevelopmentCredentials { + user_id, + access_token, + })?; + + std::fs::write(&self.path, json)?; + + Ok(()) + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + _cx: &'a AsyncAppContext, + ) -> Pin> + 'a>> { + async move { Ok(std::fs::remove_file(&self.path)?) }.boxed_local() + } +} + +/// A credentials provider that stores credentials in the system keychain. +struct KeychainCredentialsProvider; + +impl CredentialsProvider for KeychainCredentialsProvider { + fn read_credentials<'a>( + &'a self, + cx: &'a AsyncAppContext, + ) -> Pin> + 'a>> { + async move { + if IMPERSONATE_LOGIN.is_some() { + return None; + } + + let (user_id, access_token) = cx + .update(|cx| cx.read_credentials(&ClientSettings::get_global(cx).server_url)) + .log_err()? + .await + .log_err()??; + + Some(Credentials::User { + user_id: user_id.parse().ok()?, + access_token: String::from_utf8(access_token).ok()?, + }) + } + .boxed_local() + } + + fn write_credentials<'a>( + &'a self, + user_id: u64, + access_token: String, + cx: &'a AsyncAppContext, + ) -> Pin> + 'a>> { + async move { + cx.update(move |cx| { + cx.write_credentials( + &ClientSettings::get_global(cx).server_url, + &user_id.to_string(), + access_token.as_bytes(), + ) + })? + .await + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + cx: &'a AsyncAppContext, + ) -> Pin> + 'a>> { + async move { + cx.update(move |cx| cx.delete_credentials(&ClientSettings::get_global(cx).server_url))? + .await + } + .boxed_local() + } +} + +/// prefix for the zed:// url scheme +pub static ZED_URL_SCHEME: &str = "zed"; + +/// Parses the given link into a Zed link. +/// +/// Returns a [`Some`] containing the unprefixed link if the link is a Zed link. +/// Returns [`None`] otherwise. +pub fn parse_zed_link<'a>(link: &'a str, cx: &AppContext) -> Option<&'a str> { + let server_url = &ClientSettings::get_global(cx).server_url; + if let Some(stripped) = link + .strip_prefix(server_url) + .and_then(|result| result.strip_prefix('/')) + { + return Some(stripped); + } + if let Some(stripped) = link + .strip_prefix(ZED_URL_SCHEME) + .and_then(|result| result.strip_prefix("://")) + { + return Some(stripped); + } + + None +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test::FakeServer; + + use clock::FakeSystemClock; + use gpui::{BackgroundExecutor, Context, TestAppContext}; + use http::FakeHttpClient; + use parking_lot::Mutex; + use settings::SettingsStore; + use std::future; + + #[gpui::test(iterations = 10)] + async fn test_reconnection(cx: &mut TestAppContext) { + init_test(cx); + let user_id = 5; + let client = cx.update(|cx| { + Client::new( + Arc::new(FakeSystemClock::default()), + FakeHttpClient::with_404_response(), + cx, + ) + }); + let server = FakeServer::for_client(user_id, &client, cx).await; + let mut status = client.status(); + assert!(matches!( + status.next().await, + Some(Status::Connected { .. }) + )); + assert_eq!(server.auth_count(), 1); + + server.forbid_connections(); + server.disconnect(); + while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {} + + server.allow_connections(); + cx.executor().advance_clock(Duration::from_secs(10)); + while !matches!(status.next().await, Some(Status::Connected { .. })) {} + assert_eq!(server.auth_count(), 1); // Client reused the cached credentials when reconnecting + + server.forbid_connections(); + server.disconnect(); + while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {} + + // Clear cached credentials after authentication fails + server.roll_access_token(); + server.allow_connections(); + cx.executor().run_until_parked(); + cx.executor().advance_clock(Duration::from_secs(10)); + while !matches!(status.next().await, Some(Status::Connected { .. })) {} + assert_eq!(server.auth_count(), 2); // Client re-authenticated due to an invalid token + } + + #[gpui::test(iterations = 10)] + async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx); + let user_id = 5; + let client = cx.update(|cx| { + Client::new( + Arc::new(FakeSystemClock::default()), + FakeHttpClient::with_404_response(), + cx, + ) + }); + let mut status = client.status(); + + // Time out when client tries to connect. + client.override_authenticate(move |cx| { + cx.background_executor().spawn(async move { + Ok(Credentials::User { + user_id, + access_token: "token".into(), + }) + }) + }); + client.override_establish_connection(|_, cx| { + cx.background_executor().spawn(async move { + future::pending::<()>().await; + unreachable!() + }) + }); + let auth_and_connect = cx.spawn({ + let client = client.clone(); + |cx| async move { client.authenticate_and_connect(false, &cx).await } + }); + executor.run_until_parked(); + assert!(matches!(status.next().await, Some(Status::Connecting))); + + executor.advance_clock(CONNECTION_TIMEOUT); + assert!(matches!( + status.next().await, + Some(Status::ConnectionError { .. }) + )); + auth_and_connect.await.unwrap_err(); + + // Allow the connection to be established. + let server = FakeServer::for_client(user_id, &client, cx).await; + assert!(matches!( + status.next().await, + Some(Status::Connected { .. }) + )); + + // Disconnect client. + server.forbid_connections(); + server.disconnect(); + while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {} + + // Time out when re-establishing the connection. + server.allow_connections(); + client.override_establish_connection(|_, cx| { + cx.background_executor().spawn(async move { + future::pending::<()>().await; + unreachable!() + }) + }); + executor.advance_clock(2 * INITIAL_RECONNECTION_DELAY); + assert!(matches!( + status.next().await, + Some(Status::Reconnecting { .. }) + )); + + executor.advance_clock(CONNECTION_TIMEOUT); + assert!(matches!( + status.next().await, + Some(Status::ReconnectionError { .. }) + )); + } + + #[gpui::test(iterations = 10)] + async fn test_authenticating_more_than_once( + cx: &mut TestAppContext, + executor: BackgroundExecutor, + ) { + init_test(cx); + let auth_count = Arc::new(Mutex::new(0)); + let dropped_auth_count = Arc::new(Mutex::new(0)); + let client = cx.update(|cx| { + Client::new( + Arc::new(FakeSystemClock::default()), + FakeHttpClient::with_404_response(), + cx, + ) + }); + client.override_authenticate({ + let auth_count = auth_count.clone(); + let dropped_auth_count = dropped_auth_count.clone(); + move |cx| { + let auth_count = auth_count.clone(); + let dropped_auth_count = dropped_auth_count.clone(); + cx.background_executor().spawn(async move { + *auth_count.lock() += 1; + let _drop = util::defer(move || *dropped_auth_count.lock() += 1); + future::pending::<()>().await; + unreachable!() + }) + } + }); + + let _authenticate = cx.spawn({ + let client = client.clone(); + move |cx| async move { client.authenticate_and_connect(false, &cx).await } + }); + executor.run_until_parked(); + assert_eq!(*auth_count.lock(), 1); + assert_eq!(*dropped_auth_count.lock(), 0); + + let _authenticate = cx.spawn({ + let client = client.clone(); + |cx| async move { client.authenticate_and_connect(false, &cx).await } + }); + executor.run_until_parked(); + assert_eq!(*auth_count.lock(), 2); + assert_eq!(*dropped_auth_count.lock(), 1); + } + + #[gpui::test] + async fn test_subscribing_to_entity(cx: &mut TestAppContext) { + init_test(cx); + let user_id = 5; + let client = cx.update(|cx| { + Client::new( + Arc::new(FakeSystemClock::default()), + FakeHttpClient::with_404_response(), + cx, + ) + }); + let server = FakeServer::for_client(user_id, &client, cx).await; + + let (done_tx1, mut done_rx1) = smol::channel::unbounded(); + let (done_tx2, mut done_rx2) = smol::channel::unbounded(); + client.add_model_message_handler( + move |model: Model, _: TypedEnvelope, _, mut cx| { + match model.update(&mut cx, |model, _| model.id).unwrap() { + 1 => done_tx1.try_send(()).unwrap(), + 2 => done_tx2.try_send(()).unwrap(), + _ => unreachable!(), + } + async { Ok(()) } + }, + ); + let model1 = cx.new_model(|_| TestModel { + id: 1, + subscription: None, + }); + let model2 = cx.new_model(|_| TestModel { + id: 2, + subscription: None, + }); + let model3 = cx.new_model(|_| TestModel { + id: 3, + subscription: None, + }); + + let _subscription1 = client + .subscribe_to_entity(1) + .unwrap() + .set_model(&model1, &mut cx.to_async()); + let _subscription2 = client + .subscribe_to_entity(2) + .unwrap() + .set_model(&model2, &mut cx.to_async()); + // Ensure dropping a subscription for the same entity type still allows receiving of + // messages for other entity IDs of the same type. + let subscription3 = client + .subscribe_to_entity(3) + .unwrap() + .set_model(&model3, &mut cx.to_async()); + drop(subscription3); + + server.send(proto::JoinProject { project_id: 1 }); + server.send(proto::JoinProject { project_id: 2 }); + done_rx1.next().await.unwrap(); + done_rx2.next().await.unwrap(); + } + + #[gpui::test] + async fn test_subscribing_after_dropping_subscription(cx: &mut TestAppContext) { + init_test(cx); + let user_id = 5; + let client = cx.update(|cx| { + Client::new( + Arc::new(FakeSystemClock::default()), + FakeHttpClient::with_404_response(), + cx, + ) + }); + let server = FakeServer::for_client(user_id, &client, cx).await; + + let model = cx.new_model(|_| TestModel::default()); + let (done_tx1, _done_rx1) = smol::channel::unbounded(); + let (done_tx2, mut done_rx2) = smol::channel::unbounded(); + let subscription1 = client.add_message_handler( + model.downgrade(), + move |_, _: TypedEnvelope, _, _| { + done_tx1.try_send(()).unwrap(); + async { Ok(()) } + }, + ); + drop(subscription1); + let _subscription2 = client.add_message_handler( + model.downgrade(), + move |_, _: TypedEnvelope, _, _| { + done_tx2.try_send(()).unwrap(); + async { Ok(()) } + }, + ); + server.send(proto::Ping {}); + done_rx2.next().await.unwrap(); + } + + #[gpui::test] + async fn test_dropping_subscription_in_handler(cx: &mut TestAppContext) { + init_test(cx); + let user_id = 5; + let client = cx.update(|cx| { + Client::new( + Arc::new(FakeSystemClock::default()), + FakeHttpClient::with_404_response(), + cx, + ) + }); + let server = FakeServer::for_client(user_id, &client, cx).await; + + let model = cx.new_model(|_| TestModel::default()); + let (done_tx, mut done_rx) = smol::channel::unbounded(); + let subscription = client.add_message_handler( + model.clone().downgrade(), + move |model: Model, _: TypedEnvelope, _, mut cx| { + model + .update(&mut cx, |model, _| model.subscription.take()) + .unwrap(); + done_tx.try_send(()).unwrap(); + async { Ok(()) } + }, + ); + model.update(cx, |model, _| { + model.subscription = Some(subscription); + }); + server.send(proto::Ping {}); + done_rx.next().await.unwrap(); + } + + #[derive(Default)] + struct TestModel { + id: usize, + subscription: Option, + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + init_settings(cx); + }); + } +} diff --git a/crates/client/src/http.rs b/crates/client/src/http.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/crates/client/src/http.rs @@ -0,0 +1 @@ + diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs new file mode 100644 index 0000000..feaa11d --- /dev/null +++ b/crates/client/src/telemetry.rs @@ -0,0 +1,673 @@ +mod event_coalescer; + +use crate::{ChannelId, TelemetrySettings}; +use chrono::{DateTime, Utc}; +use clock::SystemClock; +use futures::Future; +use gpui::{AppContext, AppMetadata, BackgroundExecutor, Task}; +use http::{self, HttpClient, HttpClientWithUrl, Method}; +use once_cell::sync::Lazy; +use parking_lot::Mutex; +use release_channel::ReleaseChannel; +use settings::{Settings, SettingsStore}; +use sha2::{Digest, Sha256}; +use std::io::Write; +use std::{env, mem, path::PathBuf, sync::Arc, time::Duration}; +use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System}; +use telemetry_events::{ + ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent, + EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, + MemoryEvent, SettingEvent, +}; +use tempfile::NamedTempFile; +#[cfg(not(debug_assertions))] +use util::ResultExt; +use util::TryFutureExt; + +use self::event_coalescer::EventCoalescer; + +pub struct Telemetry { + clock: Arc, + http_client: Arc, + executor: BackgroundExecutor, + state: Arc>, +} + +struct TelemetryState { + settings: TelemetrySettings, + metrics_id: Option>, // Per logged-in user + installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) + session_id: Option, // Per app launch + release_channel: Option<&'static str>, + app_metadata: AppMetadata, + architecture: &'static str, + events_queue: Vec, + flush_events_task: Option>, + log_file: Option, + is_staff: Option, + first_event_date_time: Option>, + event_coalescer: EventCoalescer, + max_queue_size: usize, +} + +#[cfg(debug_assertions)] +const MAX_QUEUE_LEN: usize = 5; + +#[cfg(not(debug_assertions))] +const MAX_QUEUE_LEN: usize = 50; + +#[cfg(debug_assertions)] +const FLUSH_INTERVAL: Duration = Duration::from_secs(1); + +#[cfg(not(debug_assertions))] +const FLUSH_INTERVAL: Duration = Duration::from_secs(60 * 5); +static ZED_CLIENT_CHECKSUM_SEED: Lazy>> = Lazy::new(|| { + option_env!("ZED_CLIENT_CHECKSUM_SEED") + .map(|s| s.as_bytes().into()) + .or_else(|| { + env::var("ZED_CLIENT_CHECKSUM_SEED") + .ok() + .map(|s| s.as_bytes().into()) + }) +}); + +impl Telemetry { + pub fn new( + clock: Arc, + client: Arc, + cx: &mut AppContext, + ) -> Arc { + let release_channel = + ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name()); + + TelemetrySettings::register(cx); + + let state = Arc::new(Mutex::new(TelemetryState { + settings: *TelemetrySettings::get_global(cx), + app_metadata: cx.app_metadata(), + architecture: env::consts::ARCH, + release_channel, + installation_id: None, + metrics_id: None, + session_id: None, + events_queue: Vec::new(), + flush_events_task: None, + log_file: None, + is_staff: None, + first_event_date_time: None, + event_coalescer: EventCoalescer::new(clock.clone()), + max_queue_size: MAX_QUEUE_LEN, + })); + + #[cfg(not(debug_assertions))] + cx.background_executor() + .spawn({ + let state = state.clone(); + async move { + if let Some(tempfile) = + NamedTempFile::new_in(util::paths::CONFIG_DIR.as_path()).log_err() + { + state.lock().log_file = Some(tempfile); + } + } + }) + .detach(); + + cx.observe_global::({ + let state = state.clone(); + + move |cx| { + let mut state = state.lock(); + state.settings = *TelemetrySettings::get_global(cx); + } + }) + .detach(); + + // TODO: Replace all hardware stuff with nested SystemSpecs json + let this = Arc::new(Self { + clock, + http_client: client, + executor: cx.background_executor().clone(), + state, + }); + + // We should only ever have one instance of Telemetry, leak the subscription to keep it alive + // rather than store in TelemetryState, complicating spawn as subscriptions are not Send + std::mem::forget(cx.on_app_quit({ + let this = this.clone(); + move |_| this.shutdown_telemetry() + })); + + this + } + + #[cfg(any(test, feature = "test-support"))] + fn shutdown_telemetry(self: &Arc) -> impl Future { + Task::ready(()) + } + + // Skip calling this function in tests. + // TestAppContext ends up calling this function on shutdown and it panics when trying to find the TelemetrySettings + #[cfg(not(any(test, feature = "test-support")))] + fn shutdown_telemetry(self: &Arc) -> impl Future { + self.report_app_event("close".to_string()); + // TODO: close final edit period and make sure it's sent + Task::ready(()) + } + + pub fn log_file_path(&self) -> Option { + Some(self.state.lock().log_file.as_ref()?.path().to_path_buf()) + } + + pub fn start( + self: &Arc, + installation_id: Option, + session_id: String, + cx: &mut AppContext, + ) { + let mut state = self.state.lock(); + state.installation_id = installation_id.map(|id| id.into()); + state.session_id = Some(session_id); + drop(state); + + let this = self.clone(); + cx.background_executor() + .spawn(async move { + let mut system = System::new_with_specifics( + RefreshKind::new().with_cpu(CpuRefreshKind::everything()), + ); + + let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory(); + let current_process = Pid::from_u32(std::process::id()); + system.refresh_process_specifics(current_process, refresh_kind); + + // Waiting some amount of time before the first query is important to get a reasonable value + // https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage + const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(4 * 60); + + loop { + smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await; + + let current_process = Pid::from_u32(std::process::id()); + system.refresh_process_specifics(current_process, refresh_kind); + let Some(process) = system.process(current_process) else { + log::error!( + "Failed to find own process {current_process:?} in system process table" + ); + // TODO: Fire an error telemetry event + return; + }; + + this.report_memory_event(process.memory(), process.virtual_memory()); + this.report_cpu_event(process.cpu_usage(), system.cpus().len() as u32); + } + }) + .detach(); + } + + pub fn set_authenticated_user_info( + self: &Arc, + metrics_id: Option, + is_staff: bool, + ) { + let mut state = self.state.lock(); + + if !state.settings.metrics { + return; + } + + let metrics_id: Option> = metrics_id.map(|id| id.into()); + state.metrics_id.clone_from(&metrics_id); + state.is_staff = Some(is_staff); + drop(state); + } + + pub fn report_editor_event( + self: &Arc, + file_extension: Option, + vim_mode: bool, + operation: &'static str, + copilot_enabled: bool, + copilot_enabled_for_language: bool, + ) { + let event = Event::Editor(EditorEvent { + file_extension, + vim_mode, + operation: operation.into(), + copilot_enabled, + copilot_enabled_for_language, + }); + + self.report_event(event) + } + + pub fn report_inline_completion_event( + self: &Arc, + provider: String, + suggestion_accepted: bool, + file_extension: Option, + ) { + let event = Event::InlineCompletion(InlineCompletionEvent { + provider, + suggestion_accepted, + file_extension, + }); + + self.report_event(event) + } + + pub fn report_assistant_event( + self: &Arc, + conversation_id: Option, + kind: AssistantKind, + model: String, + response_latency: Option, + error_message: Option, + ) { + let event = Event::Assistant(AssistantEvent { + conversation_id, + kind, + model: model.to_string(), + response_latency, + error_message, + }); + + self.report_event(event) + } + + pub fn report_call_event( + self: &Arc, + operation: &'static str, + room_id: Option, + channel_id: Option, + ) { + let event = Event::Call(CallEvent { + operation: operation.to_string(), + room_id, + channel_id: channel_id.map(|cid| cid.0), + }); + + self.report_event(event) + } + + pub fn report_cpu_event(self: &Arc, usage_as_percentage: f32, core_count: u32) { + let event = Event::Cpu(CpuEvent { + usage_as_percentage, + core_count, + }); + + self.report_event(event) + } + + pub fn report_memory_event( + self: &Arc, + memory_in_bytes: u64, + virtual_memory_in_bytes: u64, + ) { + let event = Event::Memory(MemoryEvent { + memory_in_bytes, + virtual_memory_in_bytes, + }); + + self.report_event(event) + } + + pub fn report_app_event(self: &Arc, operation: String) -> Event { + let event = Event::App(AppEvent { operation }); + + self.report_event(event.clone()); + + event + } + + pub fn report_setting_event(self: &Arc, setting: &'static str, value: String) { + let event = Event::Setting(SettingEvent { + setting: setting.to_string(), + value, + }); + + self.report_event(event) + } + + pub fn report_extension_event(self: &Arc, extension_id: Arc, version: Arc) { + self.report_event(Event::Extension(ExtensionEvent { + extension_id, + version, + })) + } + + pub fn log_edit_event(self: &Arc, environment: &'static str) { + let mut state = self.state.lock(); + let period_data = state.event_coalescer.log_event(environment); + drop(state); + + if let Some((start, end, environment)) = period_data { + let event = Event::Edit(EditEvent { + duration: end.timestamp_millis() - start.timestamp_millis(), + environment: environment.to_string(), + }); + + self.report_event(event); + } + } + + pub fn report_action_event(self: &Arc, source: &'static str, action: String) { + let event = Event::Action(ActionEvent { + source: source.to_string(), + action, + }); + + self.report_event(event) + } + + fn report_event(self: &Arc, event: Event) { + let mut state = self.state.lock(); + + if !state.settings.metrics { + return; + } + + if state.flush_events_task.is_none() { + let this = self.clone(); + let executor = self.executor.clone(); + state.flush_events_task = Some(self.executor.spawn(async move { + executor.timer(FLUSH_INTERVAL).await; + this.flush_events(); + })); + } + + let date_time = self.clock.utc_now(); + + let milliseconds_since_first_event = match state.first_event_date_time { + Some(first_event_date_time) => { + date_time.timestamp_millis() - first_event_date_time.timestamp_millis() + } + None => { + state.first_event_date_time = Some(date_time); + 0 + } + }; + + let signed_in = state.metrics_id.is_some(); + state.events_queue.push(EventWrapper { + signed_in, + milliseconds_since_first_event, + event, + }); + + if state.installation_id.is_some() && state.events_queue.len() >= state.max_queue_size { + drop(state); + self.flush_events(); + } + } + + pub fn metrics_id(self: &Arc) -> Option> { + self.state.lock().metrics_id.clone() + } + + pub fn installation_id(self: &Arc) -> Option> { + self.state.lock().installation_id.clone() + } + + pub fn is_staff(self: &Arc) -> Option { + self.state.lock().is_staff + } + + pub fn flush_events(self: &Arc) { + let mut state = self.state.lock(); + state.first_event_date_time = None; + let mut events = mem::take(&mut state.events_queue); + state.flush_events_task.take(); + drop(state); + if events.is_empty() { + return; + } + + if ZED_CLIENT_CHECKSUM_SEED.is_none() { + return; + }; + + let this = self.clone(); + self.executor + .spawn( + async move { + let mut json_bytes = Vec::new(); + + if let Some(file) = &mut this.state.lock().log_file { + let file = file.as_file_mut(); + for event in &mut events { + json_bytes.clear(); + serde_json::to_writer(&mut json_bytes, event)?; + file.write_all(&json_bytes)?; + file.write_all(b"\n")?; + } + } + + { + let state = this.state.lock(); + let request_body = EventRequestBody { + installation_id: state.installation_id.as_deref().map(Into::into), + session_id: state.session_id.clone(), + is_staff: state.is_staff, + app_version: state + .app_metadata + .app_version + .unwrap_or_default() + .to_string(), + os_name: state.app_metadata.os_name.to_string(), + os_version: state + .app_metadata + .os_version + .map(|version| version.to_string()), + architecture: state.architecture.to_string(), + + release_channel: state.release_channel.map(Into::into), + events, + }; + json_bytes.clear(); + serde_json::to_writer(&mut json_bytes, &request_body)?; + } + + let Some(checksum) = calculate_json_checksum(&json_bytes) else { + return Ok(()); + }; + + let request = http::Request::builder() + .method(Method::POST) + .uri( + this.http_client + .build_zed_api_url("/telemetry/events", &[])? + .as_ref(), + ) + .header("Content-Type", "text/plain") + .header("x-zed-checksum", checksum) + .body(json_bytes.into()); + + let response = this.http_client.send(request?).await?; + if response.status() != 200 { + log::error!("Failed to send events: HTTP {:?}", response.status()); + } + anyhow::Ok(()) + } + .log_err(), + ) + .detach(); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::TimeZone; + use clock::FakeSystemClock; + use gpui::TestAppContext; + use http::FakeHttpClient; + + #[gpui::test] + fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) { + init_test(cx); + let clock = Arc::new(FakeSystemClock::new( + Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), + )); + let http = FakeHttpClient::with_200_response(); + let installation_id = Some("installation_id".to_string()); + let session_id = "session_id".to_string(); + + cx.update(|cx| { + let telemetry = Telemetry::new(clock.clone(), http, cx); + + telemetry.state.lock().max_queue_size = 4; + telemetry.start(installation_id, session_id, cx); + + assert!(is_empty_state(&telemetry)); + + let first_date_time = clock.utc_now(); + let operation = "test".to_string(); + + let event = telemetry.report_app_event(operation.clone()); + assert_eq!( + event, + Event::App(AppEvent { + operation: operation.clone(), + }) + ); + assert_eq!(telemetry.state.lock().events_queue.len(), 1); + assert!(telemetry.state.lock().flush_events_task.is_some()); + assert_eq!( + telemetry.state.lock().first_event_date_time, + Some(first_date_time) + ); + + clock.advance(chrono::Duration::milliseconds(100)); + + let event = telemetry.report_app_event(operation.clone()); + assert_eq!( + event, + Event::App(AppEvent { + operation: operation.clone(), + }) + ); + assert_eq!(telemetry.state.lock().events_queue.len(), 2); + assert!(telemetry.state.lock().flush_events_task.is_some()); + assert_eq!( + telemetry.state.lock().first_event_date_time, + Some(first_date_time) + ); + + clock.advance(chrono::Duration::milliseconds(100)); + + let event = telemetry.report_app_event(operation.clone()); + assert_eq!( + event, + Event::App(AppEvent { + operation: operation.clone(), + }) + ); + assert_eq!(telemetry.state.lock().events_queue.len(), 3); + assert!(telemetry.state.lock().flush_events_task.is_some()); + assert_eq!( + telemetry.state.lock().first_event_date_time, + Some(first_date_time) + ); + + clock.advance(chrono::Duration::milliseconds(100)); + + // Adding a 4th event should cause a flush + let event = telemetry.report_app_event(operation.clone()); + assert_eq!( + event, + Event::App(AppEvent { + operation: operation.clone(), + }) + ); + + assert!(is_empty_state(&telemetry)); + }); + } + + #[gpui::test] + async fn test_telemetry_flush_on_flush_interval( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + init_test(cx); + let clock = Arc::new(FakeSystemClock::new( + Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), + )); + let http = FakeHttpClient::with_200_response(); + let installation_id = Some("installation_id".to_string()); + let session_id = "session_id".to_string(); + + cx.update(|cx| { + let telemetry = Telemetry::new(clock.clone(), http, cx); + telemetry.state.lock().max_queue_size = 4; + telemetry.start(installation_id, session_id, cx); + + assert!(is_empty_state(&telemetry)); + + let first_date_time = clock.utc_now(); + let operation = "test".to_string(); + + let event = telemetry.report_app_event(operation.clone()); + assert_eq!( + event, + Event::App(AppEvent { + operation: operation.clone(), + }) + ); + assert_eq!(telemetry.state.lock().events_queue.len(), 1); + assert!(telemetry.state.lock().flush_events_task.is_some()); + assert_eq!( + telemetry.state.lock().first_event_date_time, + Some(first_date_time) + ); + + let duration = Duration::from_millis(1); + + // Test 1 millisecond before the flush interval limit is met + executor.advance_clock(FLUSH_INTERVAL - duration); + + assert!(!is_empty_state(&telemetry)); + + // Test the exact moment the flush interval limit is met + executor.advance_clock(duration); + + assert!(is_empty_state(&telemetry)); + }); + } + + // TODO: + // Test settings + // Update FakeHTTPClient to keep track of the number of requests and assert on it + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + fn is_empty_state(telemetry: &Telemetry) -> bool { + telemetry.state.lock().events_queue.is_empty() + && telemetry.state.lock().flush_events_task.is_none() + && telemetry.state.lock().first_event_date_time.is_none() + } +} + +pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option { + let Some(checksum_seed) = &*ZED_CLIENT_CHECKSUM_SEED else { + return None; + }; + + let mut summer = Sha256::new(); + summer.update(checksum_seed); + summer.update(&json); + summer.update(checksum_seed); + let mut checksum = String::new(); + for byte in summer.finalize().as_slice() { + use std::fmt::Write; + write!(&mut checksum, "{:02x}", byte).unwrap(); + } + + Some(checksum) +} diff --git a/crates/client/src/telemetry/event_coalescer.rs b/crates/client/src/telemetry/event_coalescer.rs new file mode 100644 index 0000000..33bcf49 --- /dev/null +++ b/crates/client/src/telemetry/event_coalescer.rs @@ -0,0 +1,294 @@ +use std::sync::Arc; +use std::time; + +use chrono::{DateTime, Duration, Utc}; +use clock::SystemClock; + +const COALESCE_TIMEOUT: time::Duration = time::Duration::from_secs(20); +const SIMULATED_DURATION_FOR_SINGLE_EVENT: time::Duration = time::Duration::from_millis(1); + +#[derive(Debug, PartialEq)] +struct PeriodData { + environment: &'static str, + start: DateTime, + end: Option>, +} + +pub struct EventCoalescer { + clock: Arc, + state: Option, +} + +impl EventCoalescer { + pub fn new(clock: Arc) -> Self { + Self { clock, state: None } + } + + pub fn log_event( + &mut self, + environment: &'static str, + ) -> Option<(DateTime, DateTime, &'static str)> { + let log_time = self.clock.utc_now(); + let coalesce_timeout = Duration::from_std(COALESCE_TIMEOUT).unwrap(); + + let Some(state) = &mut self.state else { + self.state = Some(PeriodData { + start: log_time, + end: None, + environment, + }); + return None; + }; + + let period_end = state + .end + .unwrap_or(state.start + SIMULATED_DURATION_FOR_SINGLE_EVENT); + let within_timeout = log_time - period_end < coalesce_timeout; + let environment_is_same = state.environment == environment; + let should_coaelesce = !within_timeout || !environment_is_same; + + if should_coaelesce { + let previous_environment = state.environment; + let original_start = state.start; + + state.start = log_time; + state.end = None; + state.environment = environment; + + return Some(( + original_start, + if within_timeout { log_time } else { period_end }, + previous_environment, + )); + } + + state.end = Some(log_time); + + None + } +} + +#[cfg(test)] +mod tests { + use chrono::TimeZone; + use clock::FakeSystemClock; + + use super::*; + + #[test] + fn test_same_context_exceeding_timeout() { + let clock = Arc::new(FakeSystemClock::new( + Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(), + )); + let environment_1 = "environment_1"; + let mut event_coalescer = EventCoalescer::new(clock.clone()); + + assert_eq!(event_coalescer.state, None); + + let period_start = clock.utc_now(); + let period_data = event_coalescer.log_event(environment_1); + + assert_eq!(period_data, None); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_start, + end: None, + environment: environment_1, + }) + ); + + let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap(); + + // Ensure that many calls within the timeout don't start a new period + for _ in 0..100 { + clock.advance(within_timeout_adjustment); + let period_data = event_coalescer.log_event(environment_1); + let period_end = clock.utc_now(); + + assert_eq!(period_data, None); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_start, + end: Some(period_end), + environment: environment_1, + }) + ); + } + + let period_end = clock.utc_now(); + let exceed_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT * 2).unwrap(); + // Logging an event exceeding the timeout should start a new period + clock.advance(exceed_timeout_adjustment); + let new_period_start = clock.utc_now(); + let period_data = event_coalescer.log_event(environment_1); + + assert_eq!(period_data, Some((period_start, period_end, environment_1))); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: new_period_start, + end: None, + environment: environment_1, + }) + ); + } + + #[test] + fn test_different_environment_under_timeout() { + let clock = Arc::new(FakeSystemClock::new( + Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(), + )); + let environment_1 = "environment_1"; + let mut event_coalescer = EventCoalescer::new(clock.clone()); + + assert_eq!(event_coalescer.state, None); + + let period_start = clock.utc_now(); + let period_data = event_coalescer.log_event(environment_1); + + assert_eq!(period_data, None); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_start, + end: None, + environment: environment_1, + }) + ); + + let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap(); + clock.advance(within_timeout_adjustment); + let period_end = clock.utc_now(); + let period_data = event_coalescer.log_event(environment_1); + + assert_eq!(period_data, None); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_start, + end: Some(period_end), + environment: environment_1, + }) + ); + + clock.advance(within_timeout_adjustment); + + // Logging an event within the timeout but with a different environment should start a new period + let period_end = clock.utc_now(); + let environment_2 = "environment_2"; + let period_data = event_coalescer.log_event(environment_2); + + assert_eq!(period_data, Some((period_start, period_end, environment_1))); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_end, + end: None, + environment: environment_2, + }) + ); + } + + #[test] + fn test_switching_environment_while_within_timeout() { + let clock = Arc::new(FakeSystemClock::new( + Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(), + )); + let environment_1 = "environment_1"; + let mut event_coalescer = EventCoalescer::new(clock.clone()); + + assert_eq!(event_coalescer.state, None); + + let period_start = clock.utc_now(); + let period_data = event_coalescer.log_event(environment_1); + + assert_eq!(period_data, None); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_start, + end: None, + environment: environment_1, + }) + ); + + let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap(); + clock.advance(within_timeout_adjustment); + let period_end = clock.utc_now(); + let environment_2 = "environment_2"; + let period_data = event_coalescer.log_event(environment_2); + + assert_eq!(period_data, Some((period_start, period_end, environment_1))); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_end, + end: None, + environment: environment_2, + }) + ); + } + + // 0 20 40 60 + // |-------------------|-------------------|-------------------|------------------- + // |--------|----------env change + // |------------------- + // |period_start |period_end + // |new_period_start + + #[test] + fn test_switching_environment_while_exceeding_timeout() { + let clock = Arc::new(FakeSystemClock::new( + Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(), + )); + let environment_1 = "environment_1"; + let mut event_coalescer = EventCoalescer::new(clock.clone()); + + assert_eq!(event_coalescer.state, None); + + let period_start = clock.utc_now(); + let period_data = event_coalescer.log_event(environment_1); + + assert_eq!(period_data, None); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_start, + end: None, + environment: environment_1, + }) + ); + + let exceed_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT * 2).unwrap(); + clock.advance(exceed_timeout_adjustment); + let period_end = clock.utc_now(); + let environment_2 = "environment_2"; + let period_data = event_coalescer.log_event(environment_2); + + assert_eq!( + period_data, + Some(( + period_start, + period_start + SIMULATED_DURATION_FOR_SINGLE_EVENT, + environment_1 + )) + ); + assert_eq!( + event_coalescer.state, + Some(PeriodData { + start: period_end, + end: None, + environment: environment_2, + }) + ); + } + + // 0 20 40 60 + // |-------------------|-------------------|-------------------|------------------- + // |--------|----------------------------------------env change + // |-------------------| + // |period_start |period_end + // |new_period_start +} diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs new file mode 100644 index 0000000..5e8ad21 --- /dev/null +++ b/crates/client/src/test.rs @@ -0,0 +1,217 @@ +use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore}; +use anyhow::{anyhow, Result}; +use futures::{stream::BoxStream, StreamExt}; +use gpui::{BackgroundExecutor, Context, Model, TestAppContext}; +use parking_lot::Mutex; +use rpc::{ + proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse}, + ConnectionId, Peer, Receipt, TypedEnvelope, +}; +use std::sync::Arc; + +pub struct FakeServer { + peer: Arc, + state: Arc>, + user_id: u64, + executor: BackgroundExecutor, +} + +#[derive(Default)] +struct FakeServerState { + incoming: Option>>, + connection_id: Option, + forbid_connections: bool, + auth_count: usize, + access_token: usize, +} + +impl FakeServer { + pub async fn for_client( + client_user_id: u64, + client: &Arc, + cx: &TestAppContext, + ) -> Self { + let server = Self { + peer: Peer::new(0), + state: Default::default(), + user_id: client_user_id, + executor: cx.executor(), + }; + + client + .override_authenticate({ + let state = Arc::downgrade(&server.state); + move |cx| { + let state = state.clone(); + cx.spawn(move |_| async move { + let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?; + let mut state = state.lock(); + state.auth_count += 1; + let access_token = state.access_token.to_string(); + Ok(Credentials::User { + user_id: client_user_id, + access_token, + }) + }) + } + }) + .override_establish_connection({ + let peer = Arc::downgrade(&server.peer); + let state = Arc::downgrade(&server.state); + move |credentials, cx| { + let peer = peer.clone(); + let state = state.clone(); + let credentials = credentials.clone(); + cx.spawn(move |cx| async move { + let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?; + let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?; + if state.lock().forbid_connections { + Err(EstablishConnectionError::Other(anyhow!( + "server is forbidding connections" + )))? + } + + if credentials + != (Credentials::User { + user_id: client_user_id, + access_token: state.lock().access_token.to_string(), + }) + { + Err(EstablishConnectionError::Unauthorized)? + } + + let (client_conn, server_conn, _) = + Connection::in_memory(cx.background_executor().clone()); + let (connection_id, io, incoming) = + peer.add_test_connection(server_conn, cx.background_executor().clone()); + cx.background_executor().spawn(io).detach(); + { + let mut state = state.lock(); + state.connection_id = Some(connection_id); + state.incoming = Some(incoming); + } + peer.send( + connection_id, + proto::Hello { + peer_id: Some(connection_id.into()), + }, + ) + .unwrap(); + + Ok(client_conn) + }) + } + }); + + client + .authenticate_and_connect(false, &cx.to_async()) + .await + .unwrap(); + + server + } + + pub fn disconnect(&self) { + if self.state.lock().connection_id.is_some() { + self.peer.disconnect(self.connection_id()); + let mut state = self.state.lock(); + state.connection_id.take(); + state.incoming.take(); + } + } + + pub fn auth_count(&self) -> usize { + self.state.lock().auth_count + } + + pub fn roll_access_token(&self) { + self.state.lock().access_token += 1; + } + + pub fn forbid_connections(&self) { + self.state.lock().forbid_connections = true; + } + + pub fn allow_connections(&self) { + self.state.lock().forbid_connections = false; + } + + pub fn send(&self, message: T) { + self.peer.send(self.connection_id(), message).unwrap(); + } + + #[allow(clippy::await_holding_lock)] + pub async fn receive(&self) -> Result> { + self.executor.start_waiting(); + + loop { + let message = self + .state + .lock() + .incoming + .as_mut() + .expect("not connected") + .next() + .await + .ok_or_else(|| anyhow!("other half hung up"))?; + self.executor.finish_waiting(); + let type_name = message.payload_type_name(); + let message = message.into_any(); + + if message.is::>() { + return Ok(*message.downcast().unwrap()); + } + + if message.is::>() { + self.respond( + message + .downcast::>() + .unwrap() + .receipt(), + GetPrivateUserInfoResponse { + metrics_id: "the-metrics-id".into(), + staff: false, + flags: Default::default(), + }, + ); + continue; + } + + panic!( + "fake server received unexpected message type: {:?}", + type_name + ); + } + } + + pub fn respond(&self, receipt: Receipt, response: T::Response) { + self.peer.respond(receipt, response).unwrap() + } + + fn connection_id(&self) -> ConnectionId { + self.state.lock().connection_id.expect("not connected") + } + + pub async fn build_user_store( + &self, + client: Arc, + cx: &mut TestAppContext, + ) -> Model { + let user_store = cx.new_model(|cx| UserStore::new(client, cx)); + assert_eq!( + self.receive::() + .await + .unwrap() + .payload + .user_ids, + &[self.user_id] + ); + user_store + } +} + +impl Drop for FakeServer { + fn drop(&mut self) { + self.disconnect(); + } +} diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs new file mode 100644 index 0000000..f97f45a --- /dev/null +++ b/crates/client/src/user.rs @@ -0,0 +1,776 @@ +use super::{proto, Client, Status, TypedEnvelope}; +use anyhow::{anyhow, Context, Result}; +use collections::{hash_map::Entry, HashMap, HashSet}; +use feature_flags::FeatureFlagAppExt; +use futures::{channel::mpsc, Future, StreamExt}; +use gpui::{ + AppContext, AsyncAppContext, EventEmitter, Model, ModelContext, SharedString, SharedUri, Task, + WeakModel, +}; +use postage::{sink::Sink, watch}; +use rpc::proto::{RequestMessage, UsersResponse}; +use std::sync::{Arc, Weak}; +use text::ReplicaId; +use util::TryFutureExt as _; + +pub type UserId = u64; + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub struct ChannelId(pub u64); + +impl std::fmt::Display for ChannelId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub struct ProjectId(pub u64); + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub struct DevServerId(pub u64); + +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, +)] +pub struct DevServerProjectId(pub u64); + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct ParticipantIndex(pub u32); + +#[derive(Default, Debug)] +pub struct User { + pub id: UserId, + pub github_login: String, + pub avatar_uri: SharedUri, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Collaborator { + pub peer_id: proto::PeerId, + pub replica_id: ReplicaId, + pub user_id: UserId, +} + +impl PartialOrd for User { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for User { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.github_login.cmp(&other.github_login) + } +} + +impl PartialEq for User { + fn eq(&self, other: &Self) -> bool { + self.id == other.id && self.github_login == other.github_login + } +} + +impl Eq for User {} + +#[derive(Debug, PartialEq)] +pub struct Contact { + pub user: Arc, + pub online: bool, + pub busy: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ContactRequestStatus { + None, + RequestSent, + RequestReceived, + RequestAccepted, +} + +pub struct UserStore { + users: HashMap>, + by_github_login: HashMap, + participant_indices: HashMap, + update_contacts_tx: mpsc::UnboundedSender, + current_user: watch::Receiver>>, + contacts: Vec>, + incoming_contact_requests: Vec>, + outgoing_contact_requests: Vec>, + pending_contact_requests: HashMap, + invite_info: Option, + client: Weak, + _maintain_contacts: Task<()>, + _maintain_current_user: Task>, + weak_self: WeakModel, +} + +#[derive(Clone)] +pub struct InviteInfo { + pub count: u32, + pub url: Arc, +} + +pub enum Event { + Contact { + user: Arc, + kind: ContactEventKind, + }, + ShowContacts, + ParticipantIndicesChanged, +} + +#[derive(Clone, Copy)] +pub enum ContactEventKind { + Requested, + Accepted, + Cancelled, +} + +impl EventEmitter for UserStore {} + +enum UpdateContacts { + Update(proto::UpdateContacts), + Wait(postage::barrier::Sender), + Clear(postage::barrier::Sender), +} + +impl UserStore { + pub fn new(client: Arc, cx: &mut ModelContext) -> Self { + let (mut current_user_tx, current_user_rx) = watch::channel(); + let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded(); + let rpc_subscriptions = vec![ + client.add_message_handler(cx.weak_model(), Self::handle_update_contacts), + client.add_message_handler(cx.weak_model(), Self::handle_update_invite_info), + client.add_message_handler(cx.weak_model(), Self::handle_show_contacts), + ]; + Self { + users: Default::default(), + by_github_login: Default::default(), + current_user: current_user_rx, + contacts: Default::default(), + incoming_contact_requests: Default::default(), + participant_indices: Default::default(), + outgoing_contact_requests: Default::default(), + invite_info: None, + client: Arc::downgrade(&client), + update_contacts_tx, + _maintain_contacts: cx.spawn(|this, mut cx| async move { + let _subscriptions = rpc_subscriptions; + while let Some(message) = update_contacts_rx.next().await { + if let Ok(task) = + this.update(&mut cx, |this, cx| this.update_contacts(message, cx)) + { + task.log_err().await; + } else { + break; + } + } + }), + _maintain_current_user: cx.spawn(|this, mut cx| async move { + let mut status = client.status(); + let weak = Arc::downgrade(&client); + drop(client); + while let Some(status) = status.next().await { + // if the client is dropped, the app is shutting down. + let Some(client) = weak.upgrade() else { + return Ok(()); + }; + match status { + Status::Connected { .. } => { + if let Some(user_id) = client.user_id() { + let fetch_user = if let Ok(fetch_user) = this + .update(&mut cx, |this, cx| { + this.get_user(user_id, cx).log_err() + }) { + fetch_user + } else { + break; + }; + let fetch_metrics_id = + client.request(proto::GetPrivateUserInfo {}).log_err(); + let (user, info) = futures::join!(fetch_user, fetch_metrics_id); + + cx.update(|cx| { + if let Some(info) = info { + cx.update_flags(info.staff, info.flags); + client.telemetry.set_authenticated_user_info( + Some(info.metrics_id.clone()), + info.staff, + ) + } + })?; + + current_user_tx.send(user).await.ok(); + + this.update(&mut cx, |_, cx| cx.notify())?; + } + } + Status::SignedOut => { + current_user_tx.send(None).await.ok(); + this.update(&mut cx, |this, cx| { + cx.notify(); + this.clear_contacts() + })? + .await; + } + Status::ConnectionLost => { + this.update(&mut cx, |this, cx| { + cx.notify(); + this.clear_contacts() + })? + .await; + } + _ => {} + } + } + Ok(()) + }), + pending_contact_requests: Default::default(), + weak_self: cx.weak_model(), + } + } + + #[cfg(feature = "test-support")] + pub fn clear_cache(&mut self) { + self.users.clear(); + self.by_github_login.clear(); + } + + async fn handle_update_invite_info( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + this.invite_info = Some(InviteInfo { + url: Arc::from(message.payload.url), + count: message.payload.count, + }); + cx.notify(); + })?; + Ok(()) + } + + async fn handle_show_contacts( + this: Model, + _: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts))?; + Ok(()) + } + + pub fn invite_info(&self) -> Option<&InviteInfo> { + self.invite_info.as_ref() + } + + async fn handle_update_contacts( + this: Model, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, _| { + this.update_contacts_tx + .unbounded_send(UpdateContacts::Update(message.payload)) + .unwrap(); + })?; + Ok(()) + } + + fn update_contacts( + &mut self, + message: UpdateContacts, + cx: &mut ModelContext, + ) -> Task> { + match message { + UpdateContacts::Wait(barrier) => { + drop(barrier); + Task::ready(Ok(())) + } + UpdateContacts::Clear(barrier) => { + self.contacts.clear(); + self.incoming_contact_requests.clear(); + self.outgoing_contact_requests.clear(); + drop(barrier); + Task::ready(Ok(())) + } + UpdateContacts::Update(message) => { + let mut user_ids = HashSet::default(); + for contact in &message.contacts { + user_ids.insert(contact.user_id); + } + user_ids.extend(message.incoming_requests.iter().map(|req| req.requester_id)); + user_ids.extend(message.outgoing_requests.iter()); + + let load_users = self.get_users(user_ids.into_iter().collect(), cx); + cx.spawn(|this, mut cx| async move { + load_users.await?; + + // Users are fetched in parallel above and cached in call to get_users + // No need to parallelize here + let mut updated_contacts = Vec::new(); + let this = this + .upgrade() + .ok_or_else(|| anyhow!("can't upgrade user store handle"))?; + for contact in message.contacts { + updated_contacts.push(Arc::new( + Contact::from_proto(contact, &this, &mut cx).await?, + )); + } + + let mut incoming_requests = Vec::new(); + for request in message.incoming_requests { + incoming_requests.push({ + this.update(&mut cx, |this, cx| { + this.get_user(request.requester_id, cx) + })? + .await? + }); + } + + let mut outgoing_requests = Vec::new(); + for requested_user_id in message.outgoing_requests { + outgoing_requests.push( + this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))? + .await?, + ); + } + + let removed_contacts = + HashSet::::from_iter(message.remove_contacts.iter().copied()); + let removed_incoming_requests = + HashSet::::from_iter(message.remove_incoming_requests.iter().copied()); + let removed_outgoing_requests = + HashSet::::from_iter(message.remove_outgoing_requests.iter().copied()); + + this.update(&mut cx, |this, cx| { + // Remove contacts + this.contacts + .retain(|contact| !removed_contacts.contains(&contact.user.id)); + // Update existing contacts and insert new ones + for updated_contact in updated_contacts { + match this.contacts.binary_search_by_key( + &&updated_contact.user.github_login, + |contact| &contact.user.github_login, + ) { + Ok(ix) => this.contacts[ix] = updated_contact, + Err(ix) => this.contacts.insert(ix, updated_contact), + } + } + + // Remove incoming contact requests + this.incoming_contact_requests.retain(|user| { + if removed_incoming_requests.contains(&user.id) { + cx.emit(Event::Contact { + user: user.clone(), + kind: ContactEventKind::Cancelled, + }); + false + } else { + true + } + }); + // Update existing incoming requests and insert new ones + for user in incoming_requests { + match this + .incoming_contact_requests + .binary_search_by_key(&&user.github_login, |contact| { + &contact.github_login + }) { + Ok(ix) => this.incoming_contact_requests[ix] = user, + Err(ix) => this.incoming_contact_requests.insert(ix, user), + } + } + + // Remove outgoing contact requests + this.outgoing_contact_requests + .retain(|user| !removed_outgoing_requests.contains(&user.id)); + // Update existing incoming requests and insert new ones + for request in outgoing_requests { + match this + .outgoing_contact_requests + .binary_search_by_key(&&request.github_login, |contact| { + &contact.github_login + }) { + Ok(ix) => this.outgoing_contact_requests[ix] = request, + Err(ix) => this.outgoing_contact_requests.insert(ix, request), + } + } + + cx.notify(); + })?; + + Ok(()) + }) + } + } + } + + pub fn contacts(&self) -> &[Arc] { + &self.contacts + } + + pub fn has_contact(&self, user: &Arc) -> bool { + self.contacts + .binary_search_by_key(&&user.github_login, |contact| &contact.user.github_login) + .is_ok() + } + + pub fn incoming_contact_requests(&self) -> &[Arc] { + &self.incoming_contact_requests + } + + pub fn outgoing_contact_requests(&self) -> &[Arc] { + &self.outgoing_contact_requests + } + + pub fn is_contact_request_pending(&self, user: &User) -> bool { + self.pending_contact_requests.contains_key(&user.id) + } + + pub fn contact_request_status(&self, user: &User) -> ContactRequestStatus { + if self + .contacts + .binary_search_by_key(&&user.github_login, |contact| &contact.user.github_login) + .is_ok() + { + ContactRequestStatus::RequestAccepted + } else if self + .outgoing_contact_requests + .binary_search_by_key(&&user.github_login, |user| &user.github_login) + .is_ok() + { + ContactRequestStatus::RequestSent + } else if self + .incoming_contact_requests + .binary_search_by_key(&&user.github_login, |user| &user.github_login) + .is_ok() + { + ContactRequestStatus::RequestReceived + } else { + ContactRequestStatus::None + } + } + + pub fn request_contact( + &mut self, + responder_id: u64, + cx: &mut ModelContext, + ) -> Task> { + self.perform_contact_request(responder_id, proto::RequestContact { responder_id }, cx) + } + + pub fn remove_contact( + &mut self, + user_id: u64, + cx: &mut ModelContext, + ) -> Task> { + self.perform_contact_request(user_id, proto::RemoveContact { user_id }, cx) + } + + pub fn has_incoming_contact_request(&self, user_id: u64) -> bool { + self.incoming_contact_requests + .iter() + .any(|user| user.id == user_id) + } + + pub fn respond_to_contact_request( + &mut self, + requester_id: u64, + accept: bool, + cx: &mut ModelContext, + ) -> Task> { + self.perform_contact_request( + requester_id, + proto::RespondToContactRequest { + requester_id, + response: if accept { + proto::ContactRequestResponse::Accept + } else { + proto::ContactRequestResponse::Decline + } as i32, + }, + cx, + ) + } + + pub fn dismiss_contact_request( + &mut self, + requester_id: u64, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.upgrade(); + cx.spawn(move |_, _| async move { + client + .ok_or_else(|| anyhow!("can't upgrade client reference"))? + .request(proto::RespondToContactRequest { + requester_id, + response: proto::ContactRequestResponse::Dismiss as i32, + }) + .await?; + Ok(()) + }) + } + + fn perform_contact_request( + &mut self, + user_id: u64, + request: T, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.upgrade(); + *self.pending_contact_requests.entry(user_id).or_insert(0) += 1; + cx.notify(); + + cx.spawn(move |this, mut cx| async move { + let response = client + .ok_or_else(|| anyhow!("can't upgrade client reference"))? + .request(request) + .await; + this.update(&mut cx, |this, cx| { + if let Entry::Occupied(mut request_count) = + this.pending_contact_requests.entry(user_id) + { + *request_count.get_mut() -= 1; + if *request_count.get() == 0 { + request_count.remove(); + } + } + cx.notify(); + })?; + response?; + Ok(()) + }) + } + + pub fn clear_contacts(&mut self) -> impl Future { + let (tx, mut rx) = postage::barrier::channel(); + self.update_contacts_tx + .unbounded_send(UpdateContacts::Clear(tx)) + .unwrap(); + async move { + rx.next().await; + } + } + + pub fn contact_updates_done(&mut self) -> impl Future { + let (tx, mut rx) = postage::barrier::channel(); + self.update_contacts_tx + .unbounded_send(UpdateContacts::Wait(tx)) + .unwrap(); + async move { + rx.next().await; + } + } + + pub fn get_users( + &mut self, + user_ids: Vec, + cx: &mut ModelContext, + ) -> Task>>> { + let mut user_ids_to_fetch = user_ids.clone(); + user_ids_to_fetch.retain(|id| !self.users.contains_key(id)); + + cx.spawn(|this, mut cx| async move { + if !user_ids_to_fetch.is_empty() { + this.update(&mut cx, |this, cx| { + this.load_users( + proto::GetUsers { + user_ids: user_ids_to_fetch, + }, + cx, + ) + })? + .await?; + } + + this.update(&mut cx, |this, _| { + user_ids + .iter() + .map(|user_id| { + this.users + .get(user_id) + .cloned() + .ok_or_else(|| anyhow!("user {} not found", user_id)) + }) + .collect() + })? + }) + } + + pub fn fuzzy_search_users( + &mut self, + query: String, + cx: &mut ModelContext, + ) -> Task>>> { + self.load_users(proto::FuzzySearchUsers { query }, cx) + } + + pub fn get_cached_user(&self, user_id: u64) -> Option> { + self.users.get(&user_id).cloned() + } + + pub fn get_user_optimistic( + &mut self, + user_id: u64, + cx: &mut ModelContext, + ) -> Option> { + if let Some(user) = self.users.get(&user_id).cloned() { + return Some(user); + } + + self.get_user(user_id, cx).detach_and_log_err(cx); + None + } + + pub fn get_user( + &mut self, + user_id: u64, + cx: &mut ModelContext, + ) -> Task>> { + if let Some(user) = self.users.get(&user_id).cloned() { + return Task::ready(Ok(user)); + } + + let load_users = self.get_users(vec![user_id], cx); + cx.spawn(move |this, mut cx| async move { + load_users.await?; + this.update(&mut cx, |this, _| { + this.users + .get(&user_id) + .cloned() + .ok_or_else(|| anyhow!("server responded with no users")) + })? + }) + } + + pub fn cached_user_by_github_login(&self, github_login: &str) -> Option> { + self.by_github_login + .get(github_login) + .and_then(|id| self.users.get(id).cloned()) + } + + pub fn current_user(&self) -> Option> { + self.current_user.borrow().clone() + } + + pub fn watch_current_user(&self) -> watch::Receiver>> { + self.current_user.clone() + } + + fn load_users( + &mut self, + request: impl RequestMessage, + cx: &mut ModelContext, + ) -> Task>>> { + let client = self.client.clone(); + cx.spawn(|this, mut cx| async move { + if let Some(rpc) = client.upgrade() { + let response = rpc.request(request).await.context("error loading users")?; + let users = response.users; + + this.update(&mut cx, |this, _| this.insert(users)) + } else { + Ok(Vec::new()) + } + }) + } + + pub fn insert(&mut self, users: Vec) -> Vec> { + let mut ret = Vec::with_capacity(users.len()); + for user in users { + let user = User::new(user); + if let Some(old) = self.users.insert(user.id, user.clone()) { + if old.github_login != user.github_login { + self.by_github_login.remove(&old.github_login); + } + } + self.by_github_login + .insert(user.github_login.clone(), user.id); + ret.push(user) + } + ret + } + + pub fn set_participant_indices( + &mut self, + participant_indices: HashMap, + cx: &mut ModelContext, + ) { + if participant_indices != self.participant_indices { + self.participant_indices = participant_indices; + cx.emit(Event::ParticipantIndicesChanged); + } + } + + pub fn participant_indices(&self) -> &HashMap { + &self.participant_indices + } + + pub fn participant_names( + &self, + user_ids: impl Iterator, + cx: &AppContext, + ) -> HashMap { + let mut ret = HashMap::default(); + let mut missing_user_ids = Vec::new(); + for id in user_ids { + if let Some(github_login) = self.get_cached_user(id).map(|u| u.github_login.clone()) { + ret.insert(id, github_login.into()); + } else { + missing_user_ids.push(id) + } + } + if !missing_user_ids.is_empty() { + let this = self.weak_self.clone(); + cx.spawn(|mut cx| async move { + this.update(&mut cx, |this, cx| this.get_users(missing_user_ids, cx))? + .await + }) + .detach_and_log_err(cx); + } + ret + } +} + +impl User { + fn new(message: proto::User) -> Arc { + Arc::new(User { + id: message.id, + github_login: message.github_login, + avatar_uri: message.avatar_url.into(), + }) + } +} + +impl Contact { + async fn from_proto( + contact: proto::Contact, + user_store: &Model, + cx: &mut AsyncAppContext, + ) -> Result { + let user = user_store + .update(cx, |user_store, cx| { + user_store.get_user(contact.user_id, cx) + })? + .await?; + Ok(Self { + user, + online: contact.online, + busy: contact.busy, + }) + } +} + +impl Collaborator { + pub fn from_proto(message: proto::Collaborator) -> Result { + Ok(Self { + peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?, + replica_id: message.replica_id as ReplicaId, + user_id: message.user_id as UserId, + }) + } +} diff --git a/crates/clock/Cargo.toml b/crates/clock/Cargo.toml new file mode 100644 index 0000000..d1fb217 --- /dev/null +++ b/crates/clock/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "clock" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/clock.rs" +doctest = false + +[features] +test-support = ["dep:parking_lot"] + +[dependencies] +chrono.workspace = true +parking_lot = { workspace = true, optional = true } +smallvec.workspace = true diff --git a/crates/clock/LICENSE-GPL b/crates/clock/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/clock/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs new file mode 100644 index 0000000..7a13779 --- /dev/null +++ b/crates/clock/src/clock.rs @@ -0,0 +1,216 @@ +mod system_clock; + +use smallvec::SmallVec; +use std::{ + cmp::{self, Ordering}, + fmt, iter, +}; + +pub use system_clock::*; + +/// A unique identifier for each distributed node. +pub type ReplicaId = u16; + +/// A [Lamport sequence number](https://en.wikipedia.org/wiki/Lamport_timestamp). +pub type Seq = u32; + +/// A [Lamport timestamp](https://en.wikipedia.org/wiki/Lamport_timestamp), +/// used to determine the ordering of events in the editor. +#[derive(Clone, Copy, Default, Eq, Hash, PartialEq)] +pub struct Lamport { + pub replica_id: ReplicaId, + pub value: Seq, +} + +/// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock). +#[derive(Clone, Default, Hash, Eq, PartialEq)] +pub struct Global(SmallVec<[u32; 8]>); + +impl Global { + pub fn new() -> Self { + Self::default() + } + + pub fn get(&self, replica_id: ReplicaId) -> Seq { + self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq + } + + pub fn observe(&mut self, timestamp: Lamport) { + if timestamp.value > 0 { + let new_len = timestamp.replica_id as usize + 1; + if new_len > self.0.len() { + self.0.resize(new_len, 0); + } + + let entry = &mut self.0[timestamp.replica_id as usize]; + *entry = cmp::max(*entry, timestamp.value); + } + } + + pub fn join(&mut self, other: &Self) { + if other.0.len() > self.0.len() { + self.0.resize(other.0.len(), 0); + } + + for (left, right) in self.0.iter_mut().zip(&other.0) { + *left = cmp::max(*left, *right); + } + } + + pub fn meet(&mut self, other: &Self) { + if other.0.len() > self.0.len() { + self.0.resize(other.0.len(), 0); + } + + let mut new_len = 0; + for (ix, (left, right)) in self + .0 + .iter_mut() + .zip(other.0.iter().chain(iter::repeat(&0))) + .enumerate() + { + if *left == 0 { + *left = *right; + } else if *right > 0 { + *left = cmp::min(*left, *right); + } + + if *left != 0 { + new_len = ix + 1; + } + } + self.0.resize(new_len, 0); + } + + pub fn observed(&self, timestamp: Lamport) -> bool { + self.get(timestamp.replica_id) >= timestamp.value + } + + pub fn observed_any(&self, other: &Self) -> bool { + let mut lhs = self.0.iter(); + let mut rhs = other.0.iter(); + loop { + if let Some(left) = lhs.next() { + if let Some(right) = rhs.next() { + if *right > 0 && left >= right { + return true; + } + } else { + return false; + } + } else { + return false; + } + } + } + + pub fn observed_all(&self, other: &Self) -> bool { + let mut lhs = self.0.iter(); + let mut rhs = other.0.iter(); + loop { + if let Some(left) = lhs.next() { + if let Some(right) = rhs.next() { + if left < right { + return false; + } + } else { + return true; + } + } else { + return rhs.next().is_none(); + } + } + } + + pub fn changed_since(&self, other: &Self) -> bool { + if self.0.len() > other.0.len() { + return true; + } + for (left, right) in self.0.iter().zip(other.0.iter()) { + if left > right { + return true; + } + } + false + } + + pub fn iter(&self) -> impl Iterator + '_ { + self.0.iter().enumerate().map(|(replica_id, seq)| Lamport { + replica_id: replica_id as ReplicaId, + value: *seq, + }) + } +} + +impl FromIterator for Global { + fn from_iter>(locals: T) -> Self { + let mut result = Self::new(); + for local in locals { + result.observe(local); + } + result + } +} + +impl Ord for Lamport { + fn cmp(&self, other: &Self) -> Ordering { + // Use the replica id to break ties between concurrent events. + self.value + .cmp(&other.value) + .then_with(|| self.replica_id.cmp(&other.replica_id)) + } +} + +impl PartialOrd for Lamport { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Lamport { + pub const MIN: Self = Self { + replica_id: ReplicaId::MIN, + value: Seq::MIN, + }; + + pub const MAX: Self = Self { + replica_id: ReplicaId::MAX, + value: Seq::MAX, + }; + + pub fn new(replica_id: ReplicaId) -> Self { + Self { + value: 1, + replica_id, + } + } + + pub fn tick(&mut self) -> Self { + let timestamp = *self; + self.value += 1; + timestamp + } + + pub fn observe(&mut self, timestamp: Self) { + self.value = cmp::max(self.value, timestamp.value) + 1; + } +} + +impl fmt::Debug for Lamport { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Lamport {{{}: {}}}", self.replica_id, self.value) + } +} + +impl fmt::Debug for Global { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Global {{")?; + for timestamp in self.iter() { + if timestamp.replica_id > 0 { + write!(f, ", ")?; + } + write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; + } + write!(f, "}}") + } +} diff --git a/crates/clock/src/system_clock.rs b/crates/clock/src/system_clock.rs new file mode 100644 index 0000000..a462ffc --- /dev/null +++ b/crates/clock/src/system_clock.rs @@ -0,0 +1,59 @@ +use chrono::{DateTime, Utc}; + +pub trait SystemClock: Send + Sync { + /// Returns the current date and time in UTC. + fn utc_now(&self) -> DateTime; +} + +pub struct RealSystemClock; + +impl SystemClock for RealSystemClock { + fn utc_now(&self) -> DateTime { + Utc::now() + } +} + +#[cfg(any(test, feature = "test-support"))] +pub struct FakeSystemClockState { + now: DateTime, +} + +#[cfg(any(test, feature = "test-support"))] +pub struct FakeSystemClock { + // Use an unfair lock to ensure tests are deterministic. + state: parking_lot::Mutex, +} + +#[cfg(any(test, feature = "test-support"))] +impl Default for FakeSystemClock { + fn default() -> Self { + Self::new(Utc::now()) + } +} + +#[cfg(any(test, feature = "test-support"))] +impl FakeSystemClock { + pub fn new(now: DateTime) -> Self { + let state = FakeSystemClockState { now }; + + Self { + state: parking_lot::Mutex::new(state), + } + } + + pub fn set_now(&self, now: DateTime) { + self.state.lock().now = now; + } + + /// Advances the [`FakeSystemClock`] by the specified [`Duration`](chrono::Duration). + pub fn advance(&self, duration: chrono::Duration) { + self.state.lock().now += duration; + } +} + +#[cfg(any(test, feature = "test-support"))] +impl SystemClock for FakeSystemClock { + fn utc_now(&self) -> DateTime { + self.state.lock().now + } +} diff --git a/crates/collab/.env.toml b/crates/collab/.env.toml new file mode 100644 index 0000000..9bfdf29 --- /dev/null +++ b/crates/collab/.env.toml @@ -0,0 +1,27 @@ +DATABASE_URL = "postgres://postgres@localhost/zed" +# DATABASE_URL = "sqlite:////root/0/zed/db.sqlite3?mode=rwc" +DATABASE_MAX_CONNECTIONS = 5 +HTTP_PORT = 8080 +API_TOKEN = "secret" +INVITE_LINK_PREFIX = "http://localhost:3000/invites/" +ZED_ENVIRONMENT = "development" +LIVE_KIT_SERVER = "http://localhost:7880" +LIVE_KIT_KEY = "devkey" +LIVE_KIT_SECRET = "secret" +BLOB_STORE_ACCESS_KEY = "the-blob-store-access-key" +BLOB_STORE_SECRET_KEY = "the-blob-store-secret-key" +BLOB_STORE_BUCKET = "the-extensions-bucket" +BLOB_STORE_URL = "http://127.0.0.1:9000" +BLOB_STORE_REGION = "the-region" +ZED_CLIENT_CHECKSUM_SEED = "development-checksum-seed" +SEED_PATH = "crates/collab/seed.default.json" + +# CLICKHOUSE_URL = "" +# CLICKHOUSE_USER = "default" +# CLICKHOUSE_PASSWORD = "" +# CLICKHOUSE_DATABASE = "default" + +# SLACK_PANICS_WEBHOOK = "" + +# RUST_LOG=info +# LOG_JSON=true diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml new file mode 100644 index 0000000..005d4a2 --- /dev/null +++ b/crates/collab/Cargo.toml @@ -0,0 +1,110 @@ +[package] +authors = ["Nathan Sobo "] +default-run = "collab" +edition = "2021" +name = "collab" +version = "0.44.0" +publish = false +license = "AGPL-3.0-or-later" + +[lints] +workspace = true + +[[bin]] +name = "collab" + +[features] +sqlite = ["sea-orm/sqlx-sqlite", "sqlx/sqlite"] +test-support = ["sqlite"] + +[dependencies] +anthropic.workspace = true +anyhow.workspace = true +async-tungstenite = "0.16" +aws-config = { version = "1.1.5" } +aws-sdk-s3 = { version = "1.15.0" } +axum = { version = "0.6", features = ["json", "headers", "ws"] } +axum-extra = { version = "0.4", features = ["erased-json"] } +base64.workspace = true +chrono.workspace = true +clock.workspace = true +clickhouse.workspace = true +collections.workspace = true +dashmap = "5.4" +envy = "0.4.2" +futures.workspace = true +google_ai.workspace = true +hex.workspace = true +http.workspace = true +live_kit_server.workspace = true +log.workspace = true +nanoid.workspace = true +open_ai.workspace = true +supermaven_api.workspace = true +parking_lot.workspace = true +prometheus = "0.13" +prost.workspace = true +rand.workspace = true +reqwest = { version = "0.11", features = ["json"] } +rpc.workspace = true +scrypt = "0.7" +sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } +semantic_version.workspace = true +semver.workspace = true +serde.workspace = true +serde_derive.workspace = true +serde_json.workspace = true +sha2.workspace = true +sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] } +subtle.workspace = true +rustc-demangle.workspace = true +telemetry_events.workspace = true +text.workspace = true +time.workspace = true +tokio.workspace = true +toml.workspace = true +tower = "0.4" +tower-http = { workspace = true, features = ["trace"] } +tracing = "0.1.40" +tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927 +util.workspace = true +uuid.workspace = true + +[dev-dependencies] +async-trait.workspace = true +audio.workspace = true +call = { workspace = true, features = ["test-support"] } +channel.workspace = true +client = { workspace = true, features = ["test-support"] } +collab_ui = { workspace = true, features = ["test-support"] } +collections = { workspace = true, features = ["test-support"] } +ctor.workspace = true +editor = { workspace = true, features = ["test-support"] } +env_logger.workspace = true +file_finder.workspace = true +fs = { workspace = true, features = ["test-support"] } +git = { workspace = true, features = ["test-support"] } +git_hosting_providers.workspace = true +gpui = { workspace = true, features = ["test-support"] } +indoc.workspace = true +language = { workspace = true, features = ["test-support"] } +live_kit_client = { workspace = true, features = ["test-support"] } +lsp = { workspace = true, features = ["test-support"] } +menu.workspace = true +multi_buffer = { workspace = true, features = ["test-support"] } +node_runtime.workspace = true +notifications = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true +project = { workspace = true, features = ["test-support"] } +release_channel.workspace = true +dev_server_projects.workspace = true +rpc = { workspace = true, features = ["test-support"] } +sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] } +serde_json.workspace = true +settings = { workspace = true, features = ["test-support"] } +sqlx = { version = "0.7", features = ["sqlite"] } +theme.workspace = true +unindent.workspace = true +util.workspace = true +workspace = { workspace = true, features = ["test-support"] } +headless.workspace = true diff --git a/crates/collab/LICENSE-AGPL b/crates/collab/LICENSE-AGPL new file mode 100644 index 0000000..5f5cf25 --- /dev/null +++ b/crates/collab/LICENSE-AGPL @@ -0,0 +1 @@ +../../LICENSE-AGPL \ No newline at end of file diff --git a/crates/collab/README.md b/crates/collab/README.md new file mode 100644 index 0000000..4e73f4b --- /dev/null +++ b/crates/collab/README.md @@ -0,0 +1,73 @@ +# Zed Server + +This crate is what we run at https://collab.zed.dev. + +It contains our back-end logic for collaboration, to which we connect from the Zed client via a websocket after authenticating via https://zed.dev, which is a separate repo running on Vercel. + +# Local Development + +## Database setup + +Before you can run the collab server locally, you'll need to set up a zed Postgres database. + +``` +script/bootstrap +``` + +This script will set up the `zed` Postgres database, and populate it with some users. It requires internet access, because it fetches some users from the GitHub API. + +The script will create several _admin_ users, who you'll sign in as by default when developing locally. The GitHub logins for the default users are specified in the `seed.default.json` file. + +To use a different set of admin users, create `crates/collab/seed.json`. + +```json +{ + "admins": ["yourgithubhere"], + "channels": ["zed"], + "number_of_users": 20 +} +``` + +## Testing collaborative features locally + +In one terminal, run Zed's collaboration server and the livekit dev server: + +``` +foreman start +``` + +In a second terminal, run two or more instances of Zed. + +``` +script/zed-local -2 +``` + +This script starts one to four instances of Zed, depending on the `-2`, `-3` or `-4` flags. Each instance will be connected to the local `collab` server, signed in as a different user from `seed.json` or `seed.default.json`. + +# Deployment + +We run two instances of collab: + +- Staging (https://staging-collab.zed.dev) +- Production (https://collab.zed.dev) + +Both of these run on the Kubernetes cluster hosted in Digital Ocean. + +Deployment is triggered by pushing to the `collab-staging` (or `collab-production`) tag in Github. The best way to do this is: + +- `./script/deploy-collab staging` +- `./script/deploy-collab production` + +You can tell what is currently deployed with `./script/what-is-deployed`. + +# Database Migrations + +To create a new migration: + +``` +./script/create-migration +``` + +Migrations are run automatically on service start, so run `foreman start` again. The service will crash if the migrations fail. + +When you create a new migration, you also need to update the [SQLite schema](./migrations.sqlite/20221109000000_test_schema.sql) that is used for testing. diff --git a/crates/collab/admin_api.conf b/crates/collab/admin_api.conf new file mode 100644 index 0000000..5d3b0e6 --- /dev/null +++ b/crates/collab/admin_api.conf @@ -0,0 +1,4 @@ +db-uri = "postgres://postgres@localhost/zed" +server-port = 8081 +jwt-secret = "the-postgrest-jwt-secret-for-authorization" +log-level = "info" diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml new file mode 100644 index 0000000..271b146 --- /dev/null +++ b/crates/collab/k8s/collab.template.yml @@ -0,0 +1,197 @@ +--- +apiVersion: v1 +kind: Namespace +metadata: + name: ${ZED_KUBE_NAMESPACE} + +--- +kind: Service +apiVersion: v1 +metadata: + namespace: ${ZED_KUBE_NAMESPACE} + name: ${ZED_SERVICE_NAME} + annotations: + service.beta.kubernetes.io/do-loadbalancer-name: "${ZED_SERVICE_NAME}-${ZED_KUBE_NAMESPACE}" + service.beta.kubernetes.io/do-loadbalancer-size-unit: "${ZED_LOAD_BALANCER_SIZE_UNIT}" + service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443" + service.beta.kubernetes.io/do-loadbalancer-certificate-id: ${ZED_DO_CERTIFICATE_ID} + service.beta.kubernetes.io/do-loadbalancer-disable-lets-encrypt-dns-records: "true" +spec: + type: LoadBalancer + selector: + app: ${ZED_SERVICE_NAME} + ports: + - name: web + protocol: TCP + port: 443 + targetPort: 8080 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: ${ZED_KUBE_NAMESPACE} + name: ${ZED_SERVICE_NAME} + +spec: + replicas: 1 + strategy: + type: RollingUpdate + rollingUpdate: + maxSurge: 1 + maxUnavailable: 0 + selector: + matchLabels: + app: ${ZED_SERVICE_NAME} + template: + metadata: + labels: + app: ${ZED_SERVICE_NAME} + spec: + containers: + - name: ${ZED_SERVICE_NAME} + image: "${ZED_IMAGE_ID}" + args: + - serve + - ${ZED_SERVICE_NAME} + ports: + - containerPort: 8080 + protocol: TCP + livenessProbe: + httpGet: + path: /healthz + port: 8080 + initialDelaySeconds: 5 + periodSeconds: 5 + timeoutSeconds: 5 + readinessProbe: + httpGet: + path: / + port: 8080 + initialDelaySeconds: 1 + periodSeconds: 1 + startupProbe: + httpGet: + path: / + port: 8080 + initialDelaySeconds: 1 + periodSeconds: 1 + failureThreshold: 15 + env: + - name: HTTP_PORT + value: "8080" + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: database + key: url + - name: DATABASE_MAX_CONNECTIONS + value: "${DATABASE_MAX_CONNECTIONS}" + - name: API_TOKEN + valueFrom: + secretKeyRef: + name: api + key: token + - name: ZED_CLIENT_CHECKSUM_SEED + valueFrom: + secretKeyRef: + name: zed-client + key: checksum-seed + - name: LIVE_KIT_SERVER + valueFrom: + secretKeyRef: + name: livekit + key: server + - name: LIVE_KIT_KEY + valueFrom: + secretKeyRef: + name: livekit + key: key + - name: LIVE_KIT_SECRET + valueFrom: + secretKeyRef: + name: livekit + key: secret + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: openai + key: api_key + - name: ANTHROPIC_API_KEY + valueFrom: + secretKeyRef: + name: anthropic + key: api_key + - name: BLOB_STORE_ACCESS_KEY + valueFrom: + secretKeyRef: + name: blob-store + key: access_key + - name: BLOB_STORE_SECRET_KEY + valueFrom: + secretKeyRef: + name: blob-store + key: secret_key + - name: BLOB_STORE_URL + valueFrom: + secretKeyRef: + name: blob-store + key: url + - name: BLOB_STORE_REGION + valueFrom: + secretKeyRef: + name: blob-store + key: region + - name: BLOB_STORE_BUCKET + valueFrom: + secretKeyRef: + name: blob-store + key: bucket + - name: CLICKHOUSE_URL + valueFrom: + secretKeyRef: + name: clickhouse + key: url + - name: CLICKHOUSE_USER + valueFrom: + secretKeyRef: + name: clickhouse + key: user + - name: CLICKHOUSE_PASSWORD + valueFrom: + secretKeyRef: + name: clickhouse + key: password + - name: CLICKHOUSE_DATABASE + valueFrom: + secretKeyRef: + name: clickhouse + key: database + - name: SLACK_PANICS_WEBHOOK + valueFrom: + secretKeyRef: + name: slack + key: panics_webhook + - name: SUPERMAVEN_ADMIN_API_KEY + valueFrom: + secretKeyRef: + name: supermaven + key: api_key + - name: INVITE_LINK_PREFIX + value: ${INVITE_LINK_PREFIX} + - name: RUST_BACKTRACE + value: "1" + - name: RUST_LOG + value: ${RUST_LOG} + - name: LOG_JSON + value: "true" + - name: ZED_ENVIRONMENT + value: ${ZED_ENVIRONMENT} + - name: AUTO_JOIN_CHANNEL_ID + value: "${AUTO_JOIN_CHANNEL_ID}" + securityContext: + capabilities: + # FIXME - Switch to the more restrictive `PERFMON` capability. + # This capability isn't yet available in a stable version of Debian. + add: ["SYS_ADMIN"] + terminationGracePeriodSeconds: 10 diff --git a/crates/collab/k8s/environments/production.sh b/crates/collab/k8s/environments/production.sh new file mode 100644 index 0000000..8b83fd3 --- /dev/null +++ b/crates/collab/k8s/environments/production.sh @@ -0,0 +1,5 @@ +ZED_ENVIRONMENT=production +RUST_LOG=info +INVITE_LINK_PREFIX=https://zed.dev/invites/ +AUTO_JOIN_CHANNEL_ID=283 +DATABASE_MAX_CONNECTIONS=85 diff --git a/crates/collab/k8s/environments/staging.sh b/crates/collab/k8s/environments/staging.sh new file mode 100644 index 0000000..a0d6f52 --- /dev/null +++ b/crates/collab/k8s/environments/staging.sh @@ -0,0 +1,5 @@ +ZED_ENVIRONMENT=staging +RUST_LOG=info +INVITE_LINK_PREFIX=https://staging.zed.dev/invites/ +DATABASE_MAX_CONNECTIONS=5 +AUTO_JOIN_CHANNEL_ID=8 diff --git a/crates/collab/k8s/migrate.template.yml b/crates/collab/k8s/migrate.template.yml new file mode 100644 index 0000000..c890d7b --- /dev/null +++ b/crates/collab/k8s/migrate.template.yml @@ -0,0 +1,21 @@ +apiVersion: batch/v1 +kind: Job +metadata: + namespace: ${ZED_KUBE_NAMESPACE} + name: ${ZED_MIGRATE_JOB_NAME} +spec: + template: + spec: + restartPolicy: Never + containers: + - name: migrator + imagePullPolicy: Always + image: ${ZED_IMAGE_ID} + args: + - migrate + env: + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: database + key: url diff --git a/crates/collab/k8s/postgrest.template.yml b/crates/collab/k8s/postgrest.template.yml new file mode 100644 index 0000000..ff83880 --- /dev/null +++ b/crates/collab/k8s/postgrest.template.yml @@ -0,0 +1,57 @@ +--- +kind: Service +apiVersion: v1 +metadata: + namespace: ${ZED_KUBE_NAMESPACE} + name: postgrest + annotations: + service.beta.kubernetes.io/do-loadbalancer-name: "postgrest-${ZED_KUBE_NAMESPACE}" + service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443" + service.beta.kubernetes.io/do-loadbalancer-certificate-id: ${ZED_DO_CERTIFICATE_ID} + service.beta.kubernetes.io/do-loadbalancer-disable-lets-encrypt-dns-records: "true" +spec: + type: LoadBalancer + selector: + app: postgrest + ports: + - name: web + protocol: TCP + port: 443 + targetPort: 8080 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + namespace: ${ZED_KUBE_NAMESPACE} + name: postgrest + +spec: + replicas: 1 + selector: + matchLabels: + app: postgrest + template: + metadata: + labels: + app: postgrest + spec: + containers: + - name: postgrest + image: "postgrest/postgrest" + ports: + - containerPort: 8080 + protocol: TCP + env: + - name: PGRST_SERVER_PORT + value: "8080" + - name: PGRST_DB_URI + valueFrom: + secretKeyRef: + name: database + key: url + - name: PGRST_JWT_SECRET + valueFrom: + secretKeyRef: + name: postgrest + key: jwt_secret diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql new file mode 100644 index 0000000..45c424e --- /dev/null +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -0,0 +1,418 @@ +CREATE TABLE "users" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "github_login" VARCHAR, + "admin" BOOLEAN, + "email_address" VARCHAR(255) DEFAULT NULL, + "invite_code" VARCHAR(64), + "invite_count" INTEGER NOT NULL DEFAULT 0, + "inviter_id" INTEGER REFERENCES users (id), + "connected_once" BOOLEAN NOT NULL DEFAULT false, + "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "metrics_id" TEXT, + "github_user_id" INTEGER +); +CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login"); +CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code"); +CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); +CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id"); + +CREATE TABLE "access_tokens" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "user_id" INTEGER REFERENCES users (id), + "impersonated_user_id" INTEGER REFERENCES users (id), + "hash" VARCHAR(128) +); +CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id"); + +CREATE TABLE "contacts" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "user_id_a" INTEGER REFERENCES users (id) NOT NULL, + "user_id_b" INTEGER REFERENCES users (id) NOT NULL, + "a_to_b" BOOLEAN NOT NULL, + "should_notify" BOOLEAN NOT NULL, + "accepted" BOOLEAN NOT NULL +); +CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b"); +CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b"); + +CREATE TABLE "rooms" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "live_kit_room" VARCHAR NOT NULL, + "environment" VARCHAR, + "channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE +); +CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id"); + +CREATE TABLE "projects" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE, + "host_user_id" INTEGER REFERENCES users (id), + "host_connection_id" INTEGER, + "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, + "unregistered" BOOLEAN NOT NULL DEFAULT FALSE, + "hosted_project_id" INTEGER REFERENCES hosted_projects (id), + "dev_server_project_id" INTEGER REFERENCES dev_server_projects(id) +); +CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); +CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id"); + +CREATE TABLE "worktrees" ( + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "id" INTEGER NOT NULL, + "root_name" VARCHAR NOT NULL, + "abs_path" VARCHAR NOT NULL, + "visible" BOOL NOT NULL, + "scan_id" INTEGER NOT NULL, + "is_complete" BOOL NOT NULL DEFAULT FALSE, + "completed_scan_id" INTEGER NOT NULL, + PRIMARY KEY(project_id, id) +); +CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id"); + +CREATE TABLE "worktree_entries" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INTEGER NOT NULL, + "scan_id" INTEGER NOT NULL, + "id" INTEGER NOT NULL, + "is_dir" BOOL NOT NULL, + "path" VARCHAR NOT NULL, + "inode" INTEGER NOT NULL, + "mtime_seconds" INTEGER NOT NULL, + "mtime_nanos" INTEGER NOT NULL, + "is_symlink" BOOL NOT NULL, + "is_external" BOOL NOT NULL, + "is_ignored" BOOL NOT NULL, + "is_deleted" BOOL NOT NULL, + "git_status" INTEGER, + PRIMARY KEY(project_id, worktree_id, id), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id"); +CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id"); + +CREATE TABLE "worktree_repositories" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INTEGER NOT NULL, + "work_directory_id" INTEGER NOT NULL, + "branch" VARCHAR, + "scan_id" INTEGER NOT NULL, + "is_deleted" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, work_directory_id), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, + FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id"); +CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id"); + +CREATE TABLE "worktree_settings_files" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INTEGER NOT NULL, + "path" VARCHAR NOT NULL, + "content" TEXT, + PRIMARY KEY(project_id, worktree_id, path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_settings_files_on_project_id" ON "worktree_settings_files" ("project_id"); +CREATE INDEX "index_worktree_settings_files_on_project_id_and_worktree_id" ON "worktree_settings_files" ("project_id", "worktree_id"); + +CREATE TABLE "worktree_diagnostic_summaries" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INTEGER NOT NULL, + "path" VARCHAR NOT NULL, + "language_server_id" INTEGER NOT NULL, + "error_count" INTEGER NOT NULL, + "warning_count" INTEGER NOT NULL, + PRIMARY KEY(project_id, worktree_id, path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id"); +CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id"); + +CREATE TABLE "language_servers" ( + "id" INTEGER NOT NULL, + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "name" VARCHAR NOT NULL, + PRIMARY KEY(project_id, id) +); +CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id"); + +CREATE TABLE "project_collaborators" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "user_id" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + "is_host" BOOLEAN NOT NULL +); +CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id"); +CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id"); +CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id"); +CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id"); +CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id"); + +CREATE TABLE "room_participants" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "room_id" INTEGER NOT NULL REFERENCES rooms (id), + "user_id" INTEGER NOT NULL REFERENCES users (id), + "answering_connection_id" INTEGER, + "answering_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, + "answering_connection_lost" BOOLEAN NOT NULL, + "location_kind" INTEGER, + "location_project_id" INTEGER, + "initial_project_id" INTEGER, + "calling_user_id" INTEGER NOT NULL REFERENCES users (id), + "calling_connection_id" INTEGER NOT NULL, + "calling_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE SET NULL, + "participant_index" INTEGER, + "role" TEXT, + "in_call" BOOLEAN NOT NULL DEFAULT FALSE +); +CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id"); +CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id"); +CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id"); +CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id"); +CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id"); +CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id"); + +CREATE TABLE "servers" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "environment" VARCHAR NOT NULL +); + +CREATE TABLE "followers" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "room_id" INTEGER NOT NULL REFERENCES rooms (id) ON DELETE CASCADE, + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "leader_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "leader_connection_id" INTEGER NOT NULL, + "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "follower_connection_id" INTEGER NOT NULL +); +CREATE UNIQUE INDEX + "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id" +ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id"); +CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id"); + +CREATE TABLE "channels" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "name" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "visibility" VARCHAR NOT NULL, + "parent_path" TEXT NOT NULL, + "requires_zed_cla" BOOLEAN NOT NULL DEFAULT FALSE +); + +CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path"); + +CREATE TABLE IF NOT EXISTS "channel_chat_participants" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "user_id" INTEGER NOT NULL REFERENCES users (id), + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE +); +CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id"); + +CREATE TABLE IF NOT EXISTS "channel_messages" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "sender_id" INTEGER NOT NULL REFERENCES users (id), + "body" TEXT NOT NULL, + "sent_at" TIMESTAMP, + "edited_at" TIMESTAMP, + "nonce" BLOB NOT NULL, + "reply_to_message_id" INTEGER DEFAULT NULL +); +CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id"); +CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce"); + +CREATE TABLE "channel_message_mentions" ( + "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE, + "start_offset" INTEGER NOT NULL, + "end_offset" INTEGER NOT NULL, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + PRIMARY KEY(message_id, start_offset) +); + +CREATE TABLE "channel_members" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "role" VARCHAR NOT NULL, + "accepted" BOOLEAN NOT NULL DEFAULT false, + "updated_at" TIMESTAMP NOT NULL DEFAULT now +); + +CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id"); + +CREATE TABLE "buffers" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL DEFAULT 0, + "latest_operation_epoch" INTEGER, + "latest_operation_replica_id" INTEGER, + "latest_operation_lamport_timestamp" INTEGER +); + +CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id"); + +CREATE TABLE "buffer_operations" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + "lamport_timestamp" INTEGER NOT NULL, + "value" BLOB NOT NULL, + PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) +); + +CREATE TABLE "buffer_snapshots" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "text" TEXT NOT NULL, + "operation_serialization_version" INTEGER NOT NULL, + PRIMARY KEY(buffer_id, epoch) +); + +CREATE TABLE "channel_buffer_collaborators" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "connection_lost" BOOLEAN NOT NULL DEFAULT false, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "replica_id" INTEGER NOT NULL +); + +CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id"); + + +CREATE TABLE "feature_flags" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "flag" TEXT NOT NULL UNIQUE +); + +CREATE INDEX "index_feature_flags" ON "feature_flags" ("id"); + + +CREATE TABLE "user_features" ( + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE, + PRIMARY KEY (user_id, feature_id) +); + +CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id"); +CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id"); +CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id"); + + +CREATE TABLE "observed_buffer_edits" ( + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "lamport_timestamp" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + PRIMARY KEY (user_id, buffer_id) +); + +CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id"); + +CREATE TABLE IF NOT EXISTS "observed_channel_messages" ( + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "channel_message_id" INTEGER NOT NULL, + PRIMARY KEY (user_id, channel_id) +); + +CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id"); + +CREATE TABLE "notification_kinds" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "name" VARCHAR NOT NULL +); + +CREATE UNIQUE INDEX "index_notification_kinds_on_name" ON "notification_kinds" ("name"); + +CREATE TABLE "notifications" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "created_at" TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + "recipient_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "kind" INTEGER NOT NULL REFERENCES notification_kinds (id), + "entity_id" INTEGER, + "content" TEXT, + "is_read" BOOLEAN NOT NULL DEFAULT FALSE, + "response" BOOLEAN +); + +CREATE INDEX + "index_notifications_on_recipient_id_is_read_kind_entity_id" + ON "notifications" + ("recipient_id", "is_read", "kind", "entity_id"); + +CREATE TABLE contributors ( + user_id INTEGER REFERENCES users(id), + signed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (user_id) +); + +CREATE TABLE extensions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + external_id TEXT NOT NULL, + name TEXT NOT NULL, + latest_version TEXT NOT NULL, + total_download_count INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE extension_versions ( + extension_id INTEGER REFERENCES extensions(id), + version TEXT NOT NULL, + published_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + authors TEXT NOT NULL, + repository TEXT NOT NULL, + description TEXT NOT NULL, + schema_version INTEGER NOT NULL DEFAULT 0, + wasm_api_version TEXT, + download_count INTEGER NOT NULL DEFAULT 0, + PRIMARY KEY (extension_id, version) +); + +CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id"); +CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count"); + +CREATE TABLE rate_buckets ( + user_id INT NOT NULL, + rate_limit_name VARCHAR(255) NOT NULL, + token_count INT NOT NULL, + last_refill TIMESTAMP WITHOUT TIME ZONE NOT NULL, + PRIMARY KEY (user_id, rate_limit_name), + FOREIGN KEY (user_id) REFERENCES users(id) +); +CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); + +CREATE TABLE hosted_projects ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + channel_id INTEGER NOT NULL REFERENCES channels(id), + name TEXT NOT NULL, + visibility TEXT NOT NULL, + deleted_at TIMESTAMP NULL +); +CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id); +CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL); + +CREATE TABLE dev_servers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL REFERENCES users(id), + name TEXT NOT NULL, + ssh_connection_string TEXT, + hashed_token TEXT NOT NULL +); + +CREATE TABLE dev_server_projects ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id), + path TEXT NOT NULL +); diff --git a/crates/collab/migrations/20210527024318_initial_schema.sql b/crates/collab/migrations/20210527024318_initial_schema.sql new file mode 100644 index 0000000..4b06531 --- /dev/null +++ b/crates/collab/migrations/20210527024318_initial_schema.sql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS "sessions" ( + "id" VARCHAR NOT NULL PRIMARY KEY, + "expires" TIMESTAMP WITH TIME ZONE NULL, + "session" TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS "users" ( + "id" SERIAL PRIMARY KEY, + "github_login" VARCHAR, + "admin" BOOLEAN +); + +CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login"); + +CREATE TABLE IF NOT EXISTS "signups" ( + "id" SERIAL PRIMARY KEY, + "github_login" VARCHAR, + "email_address" VARCHAR, + "about" TEXT +); diff --git a/crates/collab/migrations/20210607190313_create_access_tokens.sql b/crates/collab/migrations/20210607190313_create_access_tokens.sql new file mode 100644 index 0000000..60745a9 --- /dev/null +++ b/crates/collab/migrations/20210607190313_create_access_tokens.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS "access_tokens" ( + "id" SERIAL PRIMARY KEY, + "user_id" INTEGER REFERENCES users (id), + "hash" VARCHAR(128) +); + +CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id"); diff --git a/crates/collab/migrations/20210805175147_create_chat_tables.sql b/crates/collab/migrations/20210805175147_create_chat_tables.sql new file mode 100644 index 0000000..5bba468 --- /dev/null +++ b/crates/collab/migrations/20210805175147_create_chat_tables.sql @@ -0,0 +1,46 @@ +CREATE TABLE IF NOT EXISTS "orgs" ( + "id" SERIAL PRIMARY KEY, + "name" VARCHAR NOT NULL, + "slug" VARCHAR NOT NULL +); + +CREATE UNIQUE INDEX "index_orgs_slug" ON "orgs" ("slug"); + +CREATE TABLE IF NOT EXISTS "org_memberships" ( + "id" SERIAL PRIMARY KEY, + "org_id" INTEGER REFERENCES orgs (id) NOT NULL, + "user_id" INTEGER REFERENCES users (id) NOT NULL, + "admin" BOOLEAN NOT NULL +); + +CREATE INDEX "index_org_memberships_user_id" ON "org_memberships" ("user_id"); +CREATE UNIQUE INDEX "index_org_memberships_org_id_and_user_id" ON "org_memberships" ("org_id", "user_id"); + +CREATE TABLE IF NOT EXISTS "channels" ( + "id" SERIAL PRIMARY KEY, + "owner_id" INTEGER NOT NULL, + "owner_is_user" BOOLEAN NOT NULL, + "name" VARCHAR NOT NULL +); + +CREATE UNIQUE INDEX "index_channels_owner_and_name" ON "channels" ("owner_is_user", "owner_id", "name"); + +CREATE TABLE IF NOT EXISTS "channel_memberships" ( + "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER REFERENCES channels (id) NOT NULL, + "user_id" INTEGER REFERENCES users (id) NOT NULL, + "admin" BOOLEAN NOT NULL +); + +CREATE INDEX "index_channel_memberships_user_id" ON "channel_memberships" ("user_id"); +CREATE UNIQUE INDEX "index_channel_memberships_channel_id_and_user_id" ON "channel_memberships" ("channel_id", "user_id"); + +CREATE TABLE IF NOT EXISTS "channel_messages" ( + "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER REFERENCES channels (id) NOT NULL, + "sender_id" INTEGER REFERENCES users (id) NOT NULL, + "body" TEXT NOT NULL, + "sent_at" TIMESTAMP +); + +CREATE INDEX "index_channel_messages_channel_id" ON "channel_messages" ("channel_id"); diff --git a/crates/collab/migrations/20210916123647_add_nonce_to_channel_messages.sql b/crates/collab/migrations/20210916123647_add_nonce_to_channel_messages.sql new file mode 100644 index 0000000..ee4d4aa --- /dev/null +++ b/crates/collab/migrations/20210916123647_add_nonce_to_channel_messages.sql @@ -0,0 +1,4 @@ +ALTER TABLE "channel_messages" +ADD "nonce" UUID NOT NULL DEFAULT gen_random_uuid(); + +CREATE UNIQUE INDEX "index_channel_messages_nonce" ON "channel_messages" ("nonce"); diff --git a/crates/collab/migrations/20210920192001_add_interests_to_signups.sql b/crates/collab/migrations/20210920192001_add_interests_to_signups.sql new file mode 100644 index 0000000..2457abf --- /dev/null +++ b/crates/collab/migrations/20210920192001_add_interests_to_signups.sql @@ -0,0 +1,4 @@ +ALTER TABLE "signups" + ADD "wants_releases" BOOLEAN, + ADD "wants_updates" BOOLEAN, + ADD "wants_community" BOOLEAN; \ No newline at end of file diff --git a/crates/collab/migrations/20220421165757_drop_signups.sql b/crates/collab/migrations/20220421165757_drop_signups.sql new file mode 100644 index 0000000..d7cd6e2 --- /dev/null +++ b/crates/collab/migrations/20220421165757_drop_signups.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS "signups"; diff --git a/crates/collab/migrations/20220505144506_add_trigram_index_to_users.sql b/crates/collab/migrations/20220505144506_add_trigram_index_to_users.sql new file mode 100644 index 0000000..3d6fd31 --- /dev/null +++ b/crates/collab/migrations/20220505144506_add_trigram_index_to_users.sql @@ -0,0 +1,2 @@ +CREATE EXTENSION IF NOT EXISTS pg_trgm; +CREATE INDEX trigram_index_users_on_github_login ON users USING GIN(github_login gin_trgm_ops); diff --git a/crates/collab/migrations/20220506130724_create_contacts.sql b/crates/collab/migrations/20220506130724_create_contacts.sql new file mode 100644 index 0000000..56beb70 --- /dev/null +++ b/crates/collab/migrations/20220506130724_create_contacts.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS "contacts" ( + "id" SERIAL PRIMARY KEY, + "user_id_a" INTEGER REFERENCES users (id) NOT NULL, + "user_id_b" INTEGER REFERENCES users (id) NOT NULL, + "a_to_b" BOOLEAN NOT NULL, + "should_notify" BOOLEAN NOT NULL, + "accepted" BOOLEAN NOT NULL +); + +CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b"); +CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b"); diff --git a/crates/collab/migrations/20220518151305_add_invites_to_users.sql b/crates/collab/migrations/20220518151305_add_invites_to_users.sql new file mode 100644 index 0000000..2ac89b6 --- /dev/null +++ b/crates/collab/migrations/20220518151305_add_invites_to_users.sql @@ -0,0 +1,9 @@ +ALTER TABLE users +ADD email_address VARCHAR(255) DEFAULT NULL, +ADD invite_code VARCHAR(64), +ADD invite_count INTEGER NOT NULL DEFAULT 0, +ADD inviter_id INTEGER REFERENCES users (id), +ADD connected_once BOOLEAN NOT NULL DEFAULT false, +ADD created_at TIMESTAMP NOT NULL DEFAULT NOW(); + +CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code"); diff --git a/crates/collab/migrations/20220523232954_allow_user_deletes.sql b/crates/collab/migrations/20220523232954_allow_user_deletes.sql new file mode 100644 index 0000000..ddf3f6f --- /dev/null +++ b/crates/collab/migrations/20220523232954_allow_user_deletes.sql @@ -0,0 +1,6 @@ +ALTER TABLE contacts DROP CONSTRAINT contacts_user_id_a_fkey; +ALTER TABLE contacts DROP CONSTRAINT contacts_user_id_b_fkey; +ALTER TABLE contacts ADD CONSTRAINT contacts_user_id_a_fkey FOREIGN KEY (user_id_a) REFERENCES users(id) ON DELETE CASCADE; +ALTER TABLE contacts ADD CONSTRAINT contacts_user_id_b_fkey FOREIGN KEY (user_id_b) REFERENCES users(id) ON DELETE CASCADE; +ALTER TABLE users DROP CONSTRAINT users_inviter_id_fkey; +ALTER TABLE users ADD CONSTRAINT users_inviter_id_fkey FOREIGN KEY (inviter_id) REFERENCES users(id) ON DELETE SET NULL; diff --git a/crates/collab/migrations/20220620211403_create_projects.sql b/crates/collab/migrations/20220620211403_create_projects.sql new file mode 100644 index 0000000..d813c9f --- /dev/null +++ b/crates/collab/migrations/20220620211403_create_projects.sql @@ -0,0 +1,24 @@ +CREATE TABLE IF NOT EXISTS "projects" ( + "id" SERIAL PRIMARY KEY, + "host_user_id" INTEGER REFERENCES users (id) NOT NULL, + "unregistered" BOOLEAN NOT NULL DEFAULT false +); + +CREATE TABLE IF NOT EXISTS "worktree_extensions" ( + "id" SERIAL PRIMARY KEY, + "project_id" INTEGER REFERENCES projects (id) NOT NULL, + "worktree_id" INTEGER NOT NULL, + "extension" VARCHAR(255), + "count" INTEGER NOT NULL +); + +CREATE TABLE IF NOT EXISTS "project_activity_periods" ( + "id" SERIAL PRIMARY KEY, + "duration_millis" INTEGER NOT NULL, + "ended_at" TIMESTAMP NOT NULL, + "user_id" INTEGER REFERENCES users (id) NOT NULL, + "project_id" INTEGER REFERENCES projects (id) NOT NULL +); + +CREATE INDEX "index_project_activity_periods_on_ended_at" ON "project_activity_periods" ("ended_at"); +CREATE UNIQUE INDEX "index_worktree_extensions_on_project_id_and_worktree_id_and_extension" ON "worktree_extensions" ("project_id", "worktree_id", "extension"); \ No newline at end of file diff --git a/crates/collab/migrations/20220913211150_create_signups.sql b/crates/collab/migrations/20220913211150_create_signups.sql new file mode 100644 index 0000000..19559b7 --- /dev/null +++ b/crates/collab/migrations/20220913211150_create_signups.sql @@ -0,0 +1,27 @@ +CREATE TABLE IF NOT EXISTS "signups" ( + "id" SERIAL PRIMARY KEY, + "email_address" VARCHAR NOT NULL, + "email_confirmation_code" VARCHAR(64) NOT NULL, + "email_confirmation_sent" BOOLEAN NOT NULL, + "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + "device_id" VARCHAR, + "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE, + "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL, + + "platform_mac" BOOLEAN NOT NULL, + "platform_linux" BOOLEAN NOT NULL, + "platform_windows" BOOLEAN NOT NULL, + "platform_unknown" BOOLEAN NOT NULL, + + "editor_features" VARCHAR[], + "programming_languages" VARCHAR[] +); + +CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address"); +CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent"); + +ALTER TABLE "users" + ADD "github_user_id" INTEGER; + +CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); +CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id"); diff --git a/crates/collab/migrations/20220929182110_add_metrics_id.sql b/crates/collab/migrations/20220929182110_add_metrics_id.sql new file mode 100644 index 0000000..665d632 --- /dev/null +++ b/crates/collab/migrations/20220929182110_add_metrics_id.sql @@ -0,0 +1,2 @@ +ALTER TABLE "users" + ADD "metrics_id" uuid NOT NULL DEFAULT gen_random_uuid(); diff --git a/crates/collab/migrations/20221111092550_reconnection_support.sql b/crates/collab/migrations/20221111092550_reconnection_support.sql new file mode 100644 index 0000000..3289f6b --- /dev/null +++ b/crates/collab/migrations/20221111092550_reconnection_support.sql @@ -0,0 +1,90 @@ +CREATE TABLE IF NOT EXISTS "rooms" ( + "id" SERIAL PRIMARY KEY, + "live_kit_room" VARCHAR NOT NULL +); + +ALTER TABLE "projects" + ADD "room_id" INTEGER REFERENCES rooms (id), + ADD "host_connection_id" INTEGER, + ADD "host_connection_epoch" UUID; +CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch"); + +CREATE TABLE "worktrees" ( + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "id" INT8 NOT NULL, + "root_name" VARCHAR NOT NULL, + "abs_path" VARCHAR NOT NULL, + "visible" BOOL NOT NULL, + "scan_id" INT8 NOT NULL, + "is_complete" BOOL NOT NULL, + PRIMARY KEY(project_id, id) +); +CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id"); + +CREATE TABLE "worktree_entries" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "id" INT8 NOT NULL, + "is_dir" BOOL NOT NULL, + "path" VARCHAR NOT NULL, + "inode" INT8 NOT NULL, + "mtime_seconds" INT8 NOT NULL, + "mtime_nanos" INTEGER NOT NULL, + "is_symlink" BOOL NOT NULL, + "is_ignored" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, id), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id"); +CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id"); + +CREATE TABLE "worktree_diagnostic_summaries" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "path" VARCHAR NOT NULL, + "language_server_id" INT8 NOT NULL, + "error_count" INTEGER NOT NULL, + "warning_count" INTEGER NOT NULL, + PRIMARY KEY(project_id, worktree_id, path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id"); +CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id"); + +CREATE TABLE "language_servers" ( + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "id" INT8 NOT NULL, + "name" VARCHAR NOT NULL, + PRIMARY KEY(project_id, id) +); +CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id"); + +CREATE TABLE "project_collaborators" ( + "id" SERIAL PRIMARY KEY, + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_epoch" UUID NOT NULL, + "user_id" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + "is_host" BOOLEAN NOT NULL +); +CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id"); +CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id"); +CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch"); + +CREATE TABLE "room_participants" ( + "id" SERIAL PRIMARY KEY, + "room_id" INTEGER NOT NULL REFERENCES rooms (id), + "user_id" INTEGER NOT NULL REFERENCES users (id), + "answering_connection_id" INTEGER, + "answering_connection_epoch" UUID, + "location_kind" INTEGER, + "location_project_id" INTEGER, + "initial_project_id" INTEGER, + "calling_user_id" INTEGER NOT NULL REFERENCES users (id), + "calling_connection_id" INTEGER NOT NULL, + "calling_connection_epoch" UUID NOT NULL +); +CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id"); +CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch"); +CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch"); diff --git a/crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql b/crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql new file mode 100644 index 0000000..b154396 --- /dev/null +++ b/crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql @@ -0,0 +1,2 @@ +ALTER TABLE "signups" + ADD "added_to_mailing_list" BOOLEAN NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql b/crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql new file mode 100644 index 0000000..ed0cf97 --- /dev/null +++ b/crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql @@ -0,0 +1,7 @@ +ALTER TABLE "room_participants" + ADD "answering_connection_lost" BOOLEAN NOT NULL DEFAULT FALSE; + +CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id"); +CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch"); +CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id"); +CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch"); diff --git a/crates/collab/migrations/20221213125710_index_room_participants_on_room_id.sql b/crates/collab/migrations/20221213125710_index_room_participants_on_room_id.sql new file mode 100644 index 0000000..f40ca81 --- /dev/null +++ b/crates/collab/migrations/20221213125710_index_room_participants_on_room_id.sql @@ -0,0 +1 @@ +CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id"); diff --git a/crates/collab/migrations/20221214144346_change_epoch_from_uuid_to_integer.sql b/crates/collab/migrations/20221214144346_change_epoch_from_uuid_to_integer.sql new file mode 100644 index 0000000..5e02f76 --- /dev/null +++ b/crates/collab/migrations/20221214144346_change_epoch_from_uuid_to_integer.sql @@ -0,0 +1,30 @@ +CREATE TABLE servers ( + id SERIAL PRIMARY KEY, + environment VARCHAR NOT NULL +); + +DROP TABLE worktree_extensions; +DROP TABLE project_activity_periods; +DELETE from projects; +ALTER TABLE projects + DROP COLUMN host_connection_epoch, + ADD COLUMN host_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE; +CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); +CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id"); + +DELETE FROM project_collaborators; +ALTER TABLE project_collaborators + DROP COLUMN connection_epoch, + ADD COLUMN connection_server_id INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE; +CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id"); +CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id"); + +DELETE FROM room_participants; +ALTER TABLE room_participants + DROP COLUMN answering_connection_epoch, + DROP COLUMN calling_connection_epoch, + ADD COLUMN answering_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE, + ADD COLUMN calling_connection_server_id INTEGER REFERENCES servers (id) ON DELETE SET NULL; +CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id"); +CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id"); +CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id"); diff --git a/crates/collab/migrations/20221219181850_project_reconnection_support.sql b/crates/collab/migrations/20221219181850_project_reconnection_support.sql new file mode 100644 index 0000000..6efef55 --- /dev/null +++ b/crates/collab/migrations/20221219181850_project_reconnection_support.sql @@ -0,0 +1,3 @@ +ALTER TABLE "worktree_entries" + ADD COLUMN "scan_id" INT8, + ADD COLUMN "is_deleted" BOOL; diff --git a/crates/collab/migrations/20230103200902_replace_is_completed_with_completed_scan_id.sql b/crates/collab/migrations/20230103200902_replace_is_completed_with_completed_scan_id.sql new file mode 100644 index 0000000..1894d88 --- /dev/null +++ b/crates/collab/migrations/20230103200902_replace_is_completed_with_completed_scan_id.sql @@ -0,0 +1,3 @@ +ALTER TABLE worktrees + ALTER COLUMN is_complete SET DEFAULT FALSE, + ADD COLUMN completed_scan_id INT8; diff --git a/crates/collab/migrations/20230202155735_followers.sql b/crates/collab/migrations/20230202155735_followers.sql new file mode 100644 index 0000000..c82d6ba --- /dev/null +++ b/crates/collab/migrations/20230202155735_followers.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS "followers" ( + "id" SERIAL PRIMARY KEY, + "room_id" INTEGER NOT NULL REFERENCES rooms (id) ON DELETE CASCADE, + "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, + "leader_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "leader_connection_id" INTEGER NOT NULL, + "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "follower_connection_id" INTEGER NOT NULL +); + +CREATE UNIQUE INDEX + "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id" +ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id"); + +CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id"); diff --git a/crates/collab/migrations/20230508211523_add-repository-entries.sql b/crates/collab/migrations/20230508211523_add-repository-entries.sql new file mode 100644 index 0000000..1e59347 --- /dev/null +++ b/crates/collab/migrations/20230508211523_add-repository-entries.sql @@ -0,0 +1,13 @@ +CREATE TABLE "worktree_repositories" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "work_directory_id" INT8 NOT NULL, + "scan_id" INT8 NOT NULL, + "branch" VARCHAR, + "is_deleted" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, work_directory_id), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, + FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id"); +CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id"); diff --git a/crates/collab/migrations/20230511004019_add_repository_statuses.sql b/crates/collab/migrations/20230511004019_add_repository_statuses.sql new file mode 100644 index 0000000..862561c --- /dev/null +++ b/crates/collab/migrations/20230511004019_add_repository_statuses.sql @@ -0,0 +1,15 @@ +CREATE TABLE "worktree_repository_statuses" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "work_directory_id" INT8 NOT NULL, + "repo_path" VARCHAR NOT NULL, + "status" INT8 NOT NULL, + "scan_id" INT8 NOT NULL, + "is_deleted" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, + FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id"); +CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id"); +CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id"); diff --git a/crates/collab/migrations/20230529164700_add_worktree_settings_files.sql b/crates/collab/migrations/20230529164700_add_worktree_settings_files.sql new file mode 100644 index 0000000..973a40a --- /dev/null +++ b/crates/collab/migrations/20230529164700_add_worktree_settings_files.sql @@ -0,0 +1,10 @@ +CREATE TABLE "worktree_settings_files" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "path" VARCHAR NOT NULL, + "content" TEXT NOT NULL, + PRIMARY KEY(project_id, worktree_id, path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_settings_files_on_project_id" ON "worktree_settings_files" ("project_id"); +CREATE INDEX "index_settings_files_on_project_id_and_wt_id" ON "worktree_settings_files" ("project_id", "worktree_id"); diff --git a/crates/collab/migrations/20230605191135_remove_repository_statuses.sql b/crates/collab/migrations/20230605191135_remove_repository_statuses.sql new file mode 100644 index 0000000..3e5f907 --- /dev/null +++ b/crates/collab/migrations/20230605191135_remove_repository_statuses.sql @@ -0,0 +1,2 @@ +ALTER TABLE "worktree_entries" +ADD "git_status" INT8; diff --git a/crates/collab/migrations/20230616134535_add_is_external_to_worktree_entries.sql b/crates/collab/migrations/20230616134535_add_is_external_to_worktree_entries.sql new file mode 100644 index 0000000..e4348af --- /dev/null +++ b/crates/collab/migrations/20230616134535_add_is_external_to_worktree_entries.sql @@ -0,0 +1,2 @@ +ALTER TABLE "worktree_entries" +ADD "is_external" BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20230727150500_add_channels.sql b/crates/collab/migrations/20230727150500_add_channels.sql new file mode 100644 index 0000000..df98183 --- /dev/null +++ b/crates/collab/migrations/20230727150500_add_channels.sql @@ -0,0 +1,30 @@ +DROP TABLE "channel_messages"; +DROP TABLE "channel_memberships"; +DROP TABLE "org_memberships"; +DROP TABLE "orgs"; +DROP TABLE "channels"; + +CREATE TABLE "channels" ( + "id" SERIAL PRIMARY KEY, + "name" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL DEFAULT now() +); + +CREATE TABLE "channel_paths" ( + "id_path" VARCHAR NOT NULL PRIMARY KEY, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE +); +CREATE INDEX "index_channel_paths_on_channel_id" ON "channel_paths" ("channel_id"); + +CREATE TABLE "channel_members" ( + "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "admin" BOOLEAN NOT NULL DEFAULT false, + "accepted" BOOLEAN NOT NULL DEFAULT false, + "updated_at" TIMESTAMP NOT NULL DEFAULT now() +); + +CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id"); + +ALTER TABLE rooms ADD COLUMN "channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE; diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql new file mode 100644 index 0000000..5e6e7ce --- /dev/null +++ b/crates/collab/migrations/20230819154600_add_channel_buffers.sql @@ -0,0 +1,40 @@ +CREATE TABLE "buffers" ( + "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL DEFAULT 0 +); + +CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id"); + +CREATE TABLE "buffer_operations" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + "lamport_timestamp" INTEGER NOT NULL, + "value" BYTEA NOT NULL, + PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) +); + +CREATE TABLE "buffer_snapshots" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "text" TEXT NOT NULL, + "operation_serialization_version" INTEGER NOT NULL, + PRIMARY KEY(buffer_id, epoch) +); + +CREATE TABLE "channel_buffer_collaborators" ( + "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "connection_lost" BOOLEAN NOT NULL DEFAULT FALSE, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "replica_id" INTEGER NOT NULL +); + +CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id"); diff --git a/crates/collab/migrations/20230825190322_add_server_feature_flags.sql b/crates/collab/migrations/20230825190322_add_server_feature_flags.sql new file mode 100644 index 0000000..fffde54 --- /dev/null +++ b/crates/collab/migrations/20230825190322_add_server_feature_flags.sql @@ -0,0 +1,16 @@ +CREATE TABLE "feature_flags" ( + "id" SERIAL PRIMARY KEY, + "flag" VARCHAR(255) NOT NULL UNIQUE +); + +CREATE UNIQUE INDEX "index_feature_flags" ON "feature_flags" ("id"); + +CREATE TABLE "user_features" ( + "user_id" INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "feature_id" INTEGER NOT NULL REFERENCES feature_flags(id) ON DELETE CASCADE, + PRIMARY KEY (user_id, feature_id) +); + +CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id"); +CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id"); +CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id"); diff --git a/crates/collab/migrations/20230907114200_add_channel_messages.sql b/crates/collab/migrations/20230907114200_add_channel_messages.sql new file mode 100644 index 0000000..abe7753 --- /dev/null +++ b/crates/collab/migrations/20230907114200_add_channel_messages.sql @@ -0,0 +1,19 @@ +CREATE TABLE IF NOT EXISTS "channel_messages" ( + "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "sender_id" INTEGER NOT NULL REFERENCES users (id), + "body" TEXT NOT NULL, + "sent_at" TIMESTAMP, + "nonce" UUID NOT NULL +); +CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id"); +CREATE UNIQUE INDEX "index_channel_messages_on_nonce" ON "channel_messages" ("nonce"); + +CREATE TABLE IF NOT EXISTS "channel_chat_participants" ( + "id" SERIAL PRIMARY KEY, + "user_id" INTEGER NOT NULL REFERENCES users (id), + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE +); +CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id"); diff --git a/crates/collab/migrations/20230925210437_add_channel_changes.sql b/crates/collab/migrations/20230925210437_add_channel_changes.sql new file mode 100644 index 0000000..250a9ac --- /dev/null +++ b/crates/collab/migrations/20230925210437_add_channel_changes.sql @@ -0,0 +1,19 @@ +CREATE TABLE IF NOT EXISTS "observed_buffer_edits" ( + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "lamport_timestamp" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + PRIMARY KEY (user_id, buffer_id) +); + +CREATE UNIQUE INDEX "index_observed_buffer_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id"); + +CREATE TABLE IF NOT EXISTS "observed_channel_messages" ( + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, + "channel_message_id" INTEGER NOT NULL, + PRIMARY KEY (user_id, channel_id) +); + +CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id"); diff --git a/crates/collab/migrations/20230926102500_add_participant_index_to_room_participants.sql b/crates/collab/migrations/20230926102500_add_participant_index_to_room_participants.sql new file mode 100644 index 0000000..1493119 --- /dev/null +++ b/crates/collab/migrations/20230926102500_add_participant_index_to_room_participants.sql @@ -0,0 +1 @@ +ALTER TABLE room_participants ADD COLUMN participant_index INTEGER; diff --git a/crates/collab/migrations/20231004130100_create_notifications.sql b/crates/collab/migrations/20231004130100_create_notifications.sql new file mode 100644 index 0000000..93c282c --- /dev/null +++ b/crates/collab/migrations/20231004130100_create_notifications.sql @@ -0,0 +1,22 @@ +CREATE TABLE "notification_kinds" ( + "id" SERIAL PRIMARY KEY, + "name" VARCHAR NOT NULL +); + +CREATE UNIQUE INDEX "index_notification_kinds_on_name" ON "notification_kinds" ("name"); + +CREATE TABLE notifications ( + "id" SERIAL PRIMARY KEY, + "created_at" TIMESTAMP NOT NULL DEFAULT now(), + "recipient_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "kind" INTEGER NOT NULL REFERENCES notification_kinds (id), + "entity_id" INTEGER, + "content" TEXT, + "is_read" BOOLEAN NOT NULL DEFAULT FALSE, + "response" BOOLEAN +); + +CREATE INDEX + "index_notifications_on_recipient_id_is_read_kind_entity_id" + ON "notifications" + ("recipient_id", "is_read", "kind", "entity_id"); diff --git a/crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql b/crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql new file mode 100644 index 0000000..8f3a704 --- /dev/null +++ b/crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql @@ -0,0 +1 @@ +ALTER TABLE rooms ADD COLUMN enviroment TEXT; diff --git a/crates/collab/migrations/20231010114600_add_unique_index_on_rooms_channel_id.sql b/crates/collab/migrations/20231010114600_add_unique_index_on_rooms_channel_id.sql new file mode 100644 index 0000000..21ec4cf --- /dev/null +++ b/crates/collab/migrations/20231010114600_add_unique_index_on_rooms_channel_id.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id"); diff --git a/crates/collab/migrations/20231011214412_add_guest_role.sql b/crates/collab/migrations/20231011214412_add_guest_role.sql new file mode 100644 index 0000000..1713547 --- /dev/null +++ b/crates/collab/migrations/20231011214412_add_guest_role.sql @@ -0,0 +1,4 @@ +ALTER TABLE channel_members ADD COLUMN role TEXT; +UPDATE channel_members SET role = CASE WHEN admin THEN 'admin' ELSE 'member' END; + +ALTER TABLE channels ADD COLUMN visibility TEXT NOT NULL DEFAULT 'members'; diff --git a/crates/collab/migrations/20231017185833_projects_room_id_fkey_on_delete_cascade.sql b/crates/collab/migrations/20231017185833_projects_room_id_fkey_on_delete_cascade.sql new file mode 100644 index 0000000..be535ff --- /dev/null +++ b/crates/collab/migrations/20231017185833_projects_room_id_fkey_on_delete_cascade.sql @@ -0,0 +1,8 @@ +-- Add migration script here + +ALTER TABLE projects + DROP CONSTRAINT projects_room_id_fkey, + ADD CONSTRAINT projects_room_id_fkey + FOREIGN KEY (room_id) + REFERENCES rooms (id) + ON DELETE CASCADE; diff --git a/crates/collab/migrations/20231018102700_create_mentions.sql b/crates/collab/migrations/20231018102700_create_mentions.sql new file mode 100644 index 0000000..221a174 --- /dev/null +++ b/crates/collab/migrations/20231018102700_create_mentions.sql @@ -0,0 +1,11 @@ +CREATE TABLE "channel_message_mentions" ( + "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE, + "start_offset" INTEGER NOT NULL, + "end_offset" INTEGER NOT NULL, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + PRIMARY KEY(message_id, start_offset) +); + +-- We use 'on conflict update' with this index, so it should be per-user. +CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce"); +DROP INDEX "index_channel_messages_on_nonce"; diff --git a/crates/collab/migrations/20231024085546_move_channel_paths_to_channels_table.sql b/crates/collab/migrations/20231024085546_move_channel_paths_to_channels_table.sql new file mode 100644 index 0000000..d9fc6c8 --- /dev/null +++ b/crates/collab/migrations/20231024085546_move_channel_paths_to_channels_table.sql @@ -0,0 +1,12 @@ +ALTER TABLE channels ADD COLUMN parent_path TEXT; + +UPDATE channels +SET parent_path = substr( + channel_paths.id_path, + 2, + length(channel_paths.id_path) - length('/' || channel_paths.channel_id::text || '/') +) +FROM channel_paths +WHERE channel_paths.channel_id = channels.id; + +CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path"); diff --git a/crates/collab/migrations/20240103025509_add_role_to_room_participants.sql b/crates/collab/migrations/20240103025509_add_role_to_room_participants.sql new file mode 100644 index 0000000..2748e00 --- /dev/null +++ b/crates/collab/migrations/20240103025509_add_role_to_room_participants.sql @@ -0,0 +1 @@ +ALTER TABLE room_participants ADD COLUMN role TEXT; diff --git a/crates/collab/migrations/20240111085546_fix_column_name.sql b/crates/collab/migrations/20240111085546_fix_column_name.sql new file mode 100644 index 0000000..3f32ee3 --- /dev/null +++ b/crates/collab/migrations/20240111085546_fix_column_name.sql @@ -0,0 +1 @@ +ALTER TABLE rooms ADD COLUMN environment TEXT; diff --git a/crates/collab/migrations/20240117150300_add_impersonator_to_access_tokens.sql b/crates/collab/migrations/20240117150300_add_impersonator_to_access_tokens.sql new file mode 100644 index 0000000..8c79640 --- /dev/null +++ b/crates/collab/migrations/20240117150300_add_impersonator_to_access_tokens.sql @@ -0,0 +1 @@ +ALTER TABLE access_tokens ADD COLUMN impersonated_user_id integer; diff --git a/crates/collab/migrations/20240122174606_add_contributors.sql b/crates/collab/migrations/20240122174606_add_contributors.sql new file mode 100644 index 0000000..16bec82 --- /dev/null +++ b/crates/collab/migrations/20240122174606_add_contributors.sql @@ -0,0 +1,5 @@ +CREATE TABLE contributors ( + user_id INTEGER REFERENCES users(id), + signed_at TIMESTAMP NOT NULL DEFAULT NOW(), + PRIMARY KEY (user_id) +); diff --git a/crates/collab/migrations/20240122224506_add_requires_zed_cla_column_to_channels.sql b/crates/collab/migrations/20240122224506_add_requires_zed_cla_column_to_channels.sql new file mode 100644 index 0000000..a9248d2 --- /dev/null +++ b/crates/collab/migrations/20240122224506_add_requires_zed_cla_column_to_channels.sql @@ -0,0 +1 @@ +ALTER TABLE "channels" ADD COLUMN "requires_zed_cla" BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20240129193601_fix_parent_path_index.sql b/crates/collab/migrations/20240129193601_fix_parent_path_index.sql new file mode 100644 index 0000000..73dd6e3 --- /dev/null +++ b/crates/collab/migrations/20240129193601_fix_parent_path_index.sql @@ -0,0 +1,4 @@ +-- Add migration script here + +DROP INDEX index_channels_on_parent_path; +CREATE INDEX index_channels_on_parent_path ON channels (parent_path text_pattern_ops); diff --git a/crates/collab/migrations/20240203113741_add_reply_to_message.sql b/crates/collab/migrations/20240203113741_add_reply_to_message.sql new file mode 100644 index 0000000..6f40b62 --- /dev/null +++ b/crates/collab/migrations/20240203113741_add_reply_to_message.sql @@ -0,0 +1 @@ +ALTER TABLE channel_messages ADD reply_to_message_id INTEGER DEFAULT NULL diff --git a/crates/collab/migrations/20240207041417_add_in_call_column_to_room_participants.sql b/crates/collab/migrations/20240207041417_add_in_call_column_to_room_participants.sql new file mode 100644 index 0000000..09463c6 --- /dev/null +++ b/crates/collab/migrations/20240207041417_add_in_call_column_to_room_participants.sql @@ -0,0 +1,3 @@ +-- Add migration script here + +ALTER TABLE room_participants ADD COLUMN in_call BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20240213200201_remove_unused_room_columns.sql b/crates/collab/migrations/20240213200201_remove_unused_room_columns.sql new file mode 100644 index 0000000..dc4897a --- /dev/null +++ b/crates/collab/migrations/20240213200201_remove_unused_room_columns.sql @@ -0,0 +1,4 @@ +-- Add migration script here +ALTER TABLE rooms DROP COLUMN enviroment; +ALTER TABLE rooms DROP COLUMN environment; +ALTER TABLE room_participants DROP COLUMN in_call; diff --git a/crates/collab/migrations/20240214102900_add_extensions.sql b/crates/collab/migrations/20240214102900_add_extensions.sql new file mode 100644 index 0000000..b320940 --- /dev/null +++ b/crates/collab/migrations/20240214102900_add_extensions.sql @@ -0,0 +1,22 @@ +CREATE TABLE IF NOT EXISTS extensions ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + external_id TEXT NOT NULL, + latest_version TEXT NOT NULL, + total_download_count BIGINT NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS extension_versions ( + extension_id INTEGER REFERENCES extensions(id), + version TEXT NOT NULL, + published_at TIMESTAMP NOT NULL DEFAULT now(), + authors TEXT NOT NULL, + repository TEXT NOT NULL, + description TEXT NOT NULL, + download_count BIGINT NOT NULL DEFAULT 0, + PRIMARY KEY(extension_id, version) +); + +CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id"); +CREATE INDEX "trigram_index_extensions_name" ON "extensions" USING GIN(name gin_trgm_ops); +CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count"); diff --git a/crates/collab/migrations/20240220234826_add_rate_buckets.sql b/crates/collab/migrations/20240220234826_add_rate_buckets.sql new file mode 100644 index 0000000..864a437 --- /dev/null +++ b/crates/collab/migrations/20240220234826_add_rate_buckets.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS rate_buckets ( + user_id INT NOT NULL, + rate_limit_name VARCHAR(255) NOT NULL, + token_count INT NOT NULL, + last_refill TIMESTAMP WITHOUT TIME ZONE NOT NULL, + PRIMARY KEY (user_id, rate_limit_name), + CONSTRAINT fk_user + FOREIGN KEY (user_id) REFERENCES users(id) +); + +CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); diff --git a/crates/collab/migrations/20240221151017_add_edited_at_field_to_channel_message.sql b/crates/collab/migrations/20240221151017_add_edited_at_field_to_channel_message.sql new file mode 100644 index 0000000..1d07b07 --- /dev/null +++ b/crates/collab/migrations/20240221151017_add_edited_at_field_to_channel_message.sql @@ -0,0 +1 @@ +ALTER TABLE channel_messages ADD edited_at TIMESTAMP DEFAULT NULL; diff --git a/crates/collab/migrations/20240226163408_hosted_projects.sql b/crates/collab/migrations/20240226163408_hosted_projects.sql new file mode 100644 index 0000000..c6ade71 --- /dev/null +++ b/crates/collab/migrations/20240226163408_hosted_projects.sql @@ -0,0 +1,11 @@ +-- Add migration script here + +CREATE TABLE hosted_projects ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + channel_id INT NOT NULL REFERENCES channels(id), + name TEXT NOT NULL, + visibility TEXT NOT NULL, + deleted_at TIMESTAMP NULL +); +CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id); +CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL); diff --git a/crates/collab/migrations/20240226164505_unique_channel_names.sql b/crates/collab/migrations/20240226164505_unique_channel_names.sql new file mode 100644 index 0000000..c9d9f0a --- /dev/null +++ b/crates/collab/migrations/20240226164505_unique_channel_names.sql @@ -0,0 +1,3 @@ +-- Add migration script here + +CREATE UNIQUE INDEX uix_channels_parent_path_name ON channels(parent_path, name) WHERE (parent_path IS NOT NULL AND parent_path != ''); diff --git a/crates/collab/migrations/20240227215556_hosted_projects_in_projects.sql b/crates/collab/migrations/20240227215556_hosted_projects_in_projects.sql new file mode 100644 index 0000000..69905d1 --- /dev/null +++ b/crates/collab/migrations/20240227215556_hosted_projects_in_projects.sql @@ -0,0 +1,3 @@ +-- Add migration script here +ALTER TABLE projects ALTER COLUMN host_user_id DROP NOT NULL; +ALTER TABLE projects ADD COLUMN hosted_project_id INTEGER REFERENCES hosted_projects(id) UNIQUE NULL; diff --git a/crates/collab/migrations/20240307163119_denormalize_buffer_ops.sql b/crates/collab/migrations/20240307163119_denormalize_buffer_ops.sql new file mode 100644 index 0000000..a332a20 --- /dev/null +++ b/crates/collab/migrations/20240307163119_denormalize_buffer_ops.sql @@ -0,0 +1,17 @@ +-- Add migration script here + +ALTER TABLE buffers ADD COLUMN latest_operation_epoch INTEGER; +ALTER TABLE buffers ADD COLUMN latest_operation_lamport_timestamp INTEGER; +ALTER TABLE buffers ADD COLUMN latest_operation_replica_id INTEGER; + +WITH ops AS ( + SELECT DISTINCT ON (buffer_id) buffer_id, epoch, lamport_timestamp, replica_id + FROM buffer_operations + ORDER BY buffer_id, epoch DESC, lamport_timestamp DESC, replica_id DESC +) +UPDATE buffers +SET latest_operation_epoch = ops.epoch, + latest_operation_lamport_timestamp = ops.lamport_timestamp, + latest_operation_replica_id = ops.replica_id +FROM ops +WHERE buffers.id = ops.buffer_id; diff --git a/crates/collab/migrations/20240315182903_non_null_channel_role.sql b/crates/collab/migrations/20240315182903_non_null_channel_role.sql new file mode 100644 index 0000000..2d359f8 --- /dev/null +++ b/crates/collab/migrations/20240315182903_non_null_channel_role.sql @@ -0,0 +1,4 @@ +-- Add migration script here + +ALTER TABLE channel_members ALTER role SET NOT NULL; +ALTER TABLE channel_members DROP COLUMN admin; diff --git a/crates/collab/migrations/20240315183903_channel_parent_path_not_null.sql b/crates/collab/migrations/20240315183903_channel_parent_path_not_null.sql new file mode 100644 index 0000000..5703578 --- /dev/null +++ b/crates/collab/migrations/20240315183903_channel_parent_path_not_null.sql @@ -0,0 +1,2 @@ +-- Add migration script here +ALTER TABLE channels ALTER parent_path SET NOT NULL; diff --git a/crates/collab/migrations/20240320124800_add_extension_schema_version.sql b/crates/collab/migrations/20240320124800_add_extension_schema_version.sql new file mode 100644 index 0000000..75fd0f4 --- /dev/null +++ b/crates/collab/migrations/20240320124800_add_extension_schema_version.sql @@ -0,0 +1,2 @@ +-- Add migration script here +ALTER TABLE extension_versions ADD COLUMN schema_version INTEGER NOT NULL DEFAULT 0; diff --git a/crates/collab/migrations/20240321162658_add_devservers.sql b/crates/collab/migrations/20240321162658_add_devservers.sql new file mode 100644 index 0000000..cb1ff4d --- /dev/null +++ b/crates/collab/migrations/20240321162658_add_devservers.sql @@ -0,0 +1,7 @@ +CREATE TABLE dev_servers ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + channel_id INT NOT NULL REFERENCES channels(id), + name TEXT NOT NULL, + hashed_token TEXT NOT NULL +); +CREATE INDEX idx_dev_servers_on_channel_id ON dev_servers (channel_id); diff --git a/crates/collab/migrations/20240335123500_add_extension_wasm_api_version.sql b/crates/collab/migrations/20240335123500_add_extension_wasm_api_version.sql new file mode 100644 index 0000000..3b95323 --- /dev/null +++ b/crates/collab/migrations/20240335123500_add_extension_wasm_api_version.sql @@ -0,0 +1 @@ +ALTER TABLE extension_versions ADD COLUMN wasm_api_version TEXT; diff --git a/crates/collab/migrations/20240402155003_add_dev_server_projects.sql b/crates/collab/migrations/20240402155003_add_dev_server_projects.sql new file mode 100644 index 0000000..003c43f --- /dev/null +++ b/crates/collab/migrations/20240402155003_add_dev_server_projects.sql @@ -0,0 +1,9 @@ +CREATE TABLE remote_projects ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + channel_id INT NOT NULL REFERENCES channels(id), + dev_server_id INT NOT NULL REFERENCES dev_servers(id), + name TEXT NOT NULL, + path TEXT NOT NULL +); + +ALTER TABLE projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id); diff --git a/crates/collab/migrations/20240409082755_create_embeddings.sql b/crates/collab/migrations/20240409082755_create_embeddings.sql new file mode 100644 index 0000000..ae4b4bc --- /dev/null +++ b/crates/collab/migrations/20240409082755_create_embeddings.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS "embeddings" ( + "model" TEXT, + "digest" BYTEA, + "dimensions" FLOAT4[1536], + "retrieved_at" TIMESTAMP NOT NULL DEFAULT now(), + PRIMARY KEY ("model", "digest") +); + +CREATE INDEX IF NOT EXISTS "idx_retrieved_at_on_embeddings" ON "embeddings" ("retrieved_at"); diff --git a/crates/collab/migrations/20240412165156_dev_servers_per_user.sql b/crates/collab/migrations/20240412165156_dev_servers_per_user.sql new file mode 100644 index 0000000..7ef9e2f --- /dev/null +++ b/crates/collab/migrations/20240412165156_dev_servers_per_user.sql @@ -0,0 +1,7 @@ +DELETE FROM remote_projects; +DELETE FROM dev_servers; + +ALTER TABLE dev_servers DROP COLUMN channel_id; +ALTER TABLE dev_servers ADD COLUMN user_id INT NOT NULL REFERENCES users(id); + +ALTER TABLE remote_projects DROP COLUMN channel_id; diff --git a/crates/collab/migrations/20240417192746_unique_remote_projects_by_paths.sql b/crates/collab/migrations/20240417192746_unique_remote_projects_by_paths.sql new file mode 100644 index 0000000..923b948 --- /dev/null +++ b/crates/collab/migrations/20240417192746_unique_remote_projects_by_paths.sql @@ -0,0 +1,3 @@ +ALTER TABLE remote_projects DROP COLUMN name; +ALTER TABLE remote_projects +ADD CONSTRAINT unique_path_constraint UNIQUE(dev_server_id, path); diff --git a/crates/collab/migrations/20240502150229_rename_to_dev_server_projects.sql b/crates/collab/migrations/20240502150229_rename_to_dev_server_projects.sql new file mode 100644 index 0000000..0d8e9de --- /dev/null +++ b/crates/collab/migrations/20240502150229_rename_to_dev_server_projects.sql @@ -0,0 +1,11 @@ +CREATE TABLE dev_server_projects ( + id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY (START WITH 100), + dev_server_id INT NOT NULL REFERENCES dev_servers(id) ON DELETE CASCADE, + path TEXT NOT NULL +); +INSERT INTO dev_server_projects OVERRIDING SYSTEM VALUE SELECT * FROM remote_projects; + +ALTER TABLE dev_server_projects ADD CONSTRAINT uix_dev_server_projects_dev_server_id_path UNIQUE(dev_server_id, path); + +ALTER TABLE projects ADD COLUMN dev_server_project_id INTEGER REFERENCES dev_server_projects(id); +UPDATE projects SET dev_server_project_id = remote_project_id; diff --git a/crates/collab/migrations/20240502180204_remove_old_remote_projects.sql b/crates/collab/migrations/20240502180204_remove_old_remote_projects.sql new file mode 100644 index 0000000..01ace43 --- /dev/null +++ b/crates/collab/migrations/20240502180204_remove_old_remote_projects.sql @@ -0,0 +1,2 @@ +ALTER TABLE projects DROP COLUMN remote_project_id; +DROP TABLE remote_projects; diff --git a/crates/collab/migrations/20240514164510_store_ssh_connect_string.sql b/crates/collab/migrations/20240514164510_store_ssh_connect_string.sql new file mode 100644 index 0000000..5085ca2 --- /dev/null +++ b/crates/collab/migrations/20240514164510_store_ssh_connect_string.sql @@ -0,0 +1 @@ +ALTER TABLE dev_servers ADD COLUMN ssh_connection_string TEXT; diff --git a/crates/collab/seed.default.json b/crates/collab/seed.default.json new file mode 100644 index 0000000..1abec64 --- /dev/null +++ b/crates/collab/seed.default.json @@ -0,0 +1,13 @@ +{ + "admins": [ + "nathansobo", + "as-cii", + "maxbrunsfeld", + "iamnbutler", + "mikayla-maki", + "JosephTLyons", + "rgbkrk" + ], + "channels": ["zed"], + "number_of_users": 100 +} diff --git a/crates/collab/src/ai.rs b/crates/collab/src/ai.rs new file mode 100644 index 0000000..06c6e77 --- /dev/null +++ b/crates/collab/src/ai.rs @@ -0,0 +1,138 @@ +use anyhow::{anyhow, Context as _, Result}; +use rpc::proto; +use util::ResultExt as _; + +pub fn language_model_request_to_open_ai( + request: proto::CompleteWithLanguageModel, +) -> Result { + Ok(open_ai::Request { + model: open_ai::Model::from_id(&request.model).unwrap_or(open_ai::Model::FourTurbo), + messages: request + .messages + .into_iter() + .map(|message: proto::LanguageModelRequestMessage| { + let role = proto::LanguageModelRole::from_i32(message.role) + .ok_or_else(|| anyhow!("invalid role {}", message.role))?; + + let openai_message = match role { + proto::LanguageModelRole::LanguageModelUser => open_ai::RequestMessage::User { + content: message.content, + }, + proto::LanguageModelRole::LanguageModelAssistant => { + open_ai::RequestMessage::Assistant { + content: Some(message.content), + tool_calls: message + .tool_calls + .into_iter() + .filter_map(|call| { + Some(open_ai::ToolCall { + id: call.id, + content: match call.variant? { + proto::tool_call::Variant::Function(f) => { + open_ai::ToolCallContent::Function { + function: open_ai::FunctionContent { + name: f.name, + arguments: f.arguments, + }, + } + } + }, + }) + }) + .collect(), + } + } + proto::LanguageModelRole::LanguageModelSystem => { + open_ai::RequestMessage::System { + content: message.content, + } + } + proto::LanguageModelRole::LanguageModelTool => open_ai::RequestMessage::Tool { + tool_call_id: message + .tool_call_id + .ok_or_else(|| anyhow!("tool message is missing tool call id"))?, + content: message.content, + }, + }; + + Ok(openai_message) + }) + .collect::>>()?, + stream: true, + stop: request.stop, + temperature: request.temperature, + tools: request + .tools + .into_iter() + .filter_map(|tool| { + Some(match tool.variant? { + proto::chat_completion_tool::Variant::Function(f) => { + open_ai::ToolDefinition::Function { + function: open_ai::FunctionDefinition { + name: f.name, + description: f.description, + parameters: if let Some(params) = &f.parameters { + Some( + serde_json::from_str(params) + .context("failed to deserialize tool parameters") + .log_err()?, + ) + } else { + None + }, + }, + } + } + }) + }) + .collect(), + tool_choice: request.tool_choice, + }) +} + +pub fn language_model_request_to_google_ai( + request: proto::CompleteWithLanguageModel, +) -> Result { + Ok(google_ai::GenerateContentRequest { + contents: request + .messages + .into_iter() + .map(language_model_request_message_to_google_ai) + .collect::>>()?, + generation_config: None, + safety_settings: None, + }) +} + +pub fn language_model_request_message_to_google_ai( + message: proto::LanguageModelRequestMessage, +) -> Result { + let role = proto::LanguageModelRole::from_i32(message.role) + .ok_or_else(|| anyhow!("invalid role {}", message.role))?; + + Ok(google_ai::Content { + parts: vec![google_ai::Part::TextPart(google_ai::TextPart { + text: message.content, + })], + role: match role { + proto::LanguageModelRole::LanguageModelUser => google_ai::Role::User, + proto::LanguageModelRole::LanguageModelAssistant => google_ai::Role::Model, + proto::LanguageModelRole::LanguageModelSystem => google_ai::Role::User, + proto::LanguageModelRole::LanguageModelTool => { + Err(anyhow!("we don't handle tool calls with google ai yet"))? + } + }, + }) +} + +pub fn count_tokens_request_to_google_ai( + request: proto::CountTokensWithLanguageModel, +) -> Result { + Ok(google_ai::CountTokensRequest { + contents: request + .messages + .into_iter() + .map(language_model_request_message_to_google_ai) + .collect::>>()?, + }) +} diff --git a/crates/collab/src/api.rs b/crates/collab/src/api.rs new file mode 100644 index 0000000..f5663db --- /dev/null +++ b/crates/collab/src/api.rs @@ -0,0 +1,240 @@ +pub mod events; +pub mod extensions; +pub mod ips_file; +pub mod slack; + +use crate::{ + auth, + db::{ContributorSelector, User, UserId}, + rpc, AppState, Error, Result, +}; +use anyhow::anyhow; +use axum::{ + body::Body, + extract::{self, Path, Query}, + http::{self, Request, StatusCode}, + middleware::{self, Next}, + response::IntoResponse, + routing::{get, post}, + Extension, Json, Router, +}; +use axum_extra::response::ErasedJson; +use chrono::SecondsFormat; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tower::ServiceBuilder; + +pub use extensions::fetch_extensions_from_blob_store_periodically; + +pub fn routes(rpc_server: Option>, state: Arc) -> Router<(), Body> { + Router::new() + .route("/user", get(get_authenticated_user)) + .route("/users/:id/access_tokens", post(create_access_token)) + .route("/rpc_server_snapshot", get(get_rpc_server_snapshot)) + .route("/contributors", get(get_contributors).post(add_contributor)) + .route("/contributor", get(check_is_contributor)) + .layer( + ServiceBuilder::new() + .layer(Extension(state)) + .layer(Extension(rpc_server)) + .layer(middleware::from_fn(validate_api_token)), + ) +} + +pub async fn validate_api_token(req: Request, next: Next) -> impl IntoResponse { + let token = req + .headers() + .get(http::header::AUTHORIZATION) + .and_then(|header| header.to_str().ok()) + .ok_or_else(|| { + Error::Http( + StatusCode::BAD_REQUEST, + "missing authorization header".to_string(), + ) + })? + .strip_prefix("token ") + .ok_or_else(|| { + Error::Http( + StatusCode::BAD_REQUEST, + "invalid authorization header".to_string(), + ) + })?; + + let state = req.extensions().get::>().unwrap(); + + if token != state.config.api_token { + Err(Error::Http( + StatusCode::UNAUTHORIZED, + "invalid authorization token".to_string(), + ))? + } + + Ok::<_, Error>(next.run(req).await) +} + +#[derive(Debug, Deserialize)] +struct AuthenticatedUserParams { + github_user_id: Option, + github_login: String, + github_email: Option, +} + +#[derive(Debug, Serialize)] +struct AuthenticatedUserResponse { + user: User, + metrics_id: String, +} + +async fn get_authenticated_user( + Query(params): Query, + Extension(app): Extension>, +) -> Result> { + let initial_channel_id = app.config.auto_join_channel_id; + + let user = app + .db + .get_or_create_user_by_github_account( + ¶ms.github_login, + params.github_user_id, + params.github_email.as_deref(), + initial_channel_id, + ) + .await?; + let metrics_id = app.db.get_user_metrics_id(user.id).await?; + return Ok(Json(AuthenticatedUserResponse { user, metrics_id })); +} + +#[derive(Deserialize, Debug)] +struct CreateUserParams { + github_user_id: i32, + github_login: String, + email_address: String, + email_confirmation_code: Option, + #[serde(default)] + admin: bool, + #[serde(default)] + invite_count: i32, +} + +async fn get_rpc_server_snapshot( + Extension(rpc_server): Extension>>, +) -> Result { + let Some(rpc_server) = rpc_server else { + return Err(Error::Internal(anyhow!("rpc server is not available"))); + }; + + Ok(ErasedJson::pretty(rpc_server.snapshot().await)) +} + +async fn get_contributors(Extension(app): Extension>) -> Result>> { + Ok(Json(app.db.get_contributors().await?)) +} + +#[derive(Debug, Deserialize)] +struct CheckIsContributorParams { + github_user_id: Option, + github_login: Option, +} + +impl CheckIsContributorParams { + fn as_contributor_selector(self) -> Result { + if let Some(github_user_id) = self.github_user_id { + return Ok(ContributorSelector::GitHubUserId { github_user_id }); + } + + if let Some(github_login) = self.github_login { + return Ok(ContributorSelector::GitHubLogin { github_login }); + } + + Err(anyhow!( + "must be one of `github_user_id` or `github_login`." + ))? + } +} + +#[derive(Debug, Serialize)] +struct CheckIsContributorResponse { + signed_at: Option, +} + +async fn check_is_contributor( + Extension(app): Extension>, + Query(params): Query, +) -> Result> { + let params = params.as_contributor_selector()?; + Ok(Json(CheckIsContributorResponse { + signed_at: app + .db + .get_contributor_sign_timestamp(¶ms) + .await? + .map(|ts| ts.and_utc().to_rfc3339_opts(SecondsFormat::Millis, true)), + })) +} + +async fn add_contributor( + Extension(app): Extension>, + extract::Json(params): extract::Json, +) -> Result<()> { + let initial_channel_id = app.config.auto_join_channel_id; + app.db + .add_contributor( + ¶ms.github_login, + params.github_user_id, + params.github_email.as_deref(), + initial_channel_id, + ) + .await +} + +#[derive(Deserialize)] +struct CreateAccessTokenQueryParams { + public_key: String, + impersonate: Option, +} + +#[derive(Serialize)] +struct CreateAccessTokenResponse { + user_id: UserId, + encrypted_access_token: String, +} + +async fn create_access_token( + Path(user_id): Path, + Query(params): Query, + Extension(app): Extension>, +) -> Result> { + let user = app + .db + .get_user_by_id(user_id) + .await? + .ok_or_else(|| anyhow!("user not found"))?; + + let mut impersonated_user_id = None; + if let Some(impersonate) = params.impersonate { + if user.admin { + if let Some(impersonated_user) = app.db.get_user_by_github_login(&impersonate).await? { + impersonated_user_id = Some(impersonated_user.id); + } else { + return Err(Error::Http( + StatusCode::UNPROCESSABLE_ENTITY, + format!("user {impersonate} does not exist"), + )); + } + } else { + return Err(Error::Http( + StatusCode::UNAUTHORIZED, + "you do not have permission to impersonate other users".to_string(), + )); + } + } + + let access_token = + auth::create_access_token(app.db.as_ref(), user_id, impersonated_user_id).await?; + let encrypted_access_token = + auth::encrypt_access_token(&access_token, params.public_key.clone())?; + + Ok(Json(CreateAccessTokenResponse { + user_id: impersonated_user_id.unwrap_or(user_id), + encrypted_access_token, + })) +} diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs new file mode 100644 index 0000000..8c269f5 --- /dev/null +++ b/crates/collab/src/api/events.rs @@ -0,0 +1,1234 @@ +use super::ips_file::IpsFile; +use crate::{api::slack, AppState, Error, Result}; +use anyhow::{anyhow, Context}; +use aws_sdk_s3::primitives::ByteStream; +use axum::{ + body::Bytes, + headers::Header, + http::{HeaderMap, HeaderName, StatusCode}, + routing::post, + Extension, Router, TypedHeader, +}; +use rpc::ExtensionMetadata; +use semantic_version::SemanticVersion; +use serde::{Serialize, Serializer}; +use sha2::{Digest, Sha256}; +use std::sync::{Arc, OnceLock}; +use telemetry_events::{ + ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event, + EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, + SettingEvent, +}; +use uuid::Uuid; + +static CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; + +pub fn router() -> Router { + Router::new() + .route("/telemetry/events", post(post_events)) + .route("/telemetry/crashes", post(post_crash)) + .route("/telemetry/panics", post(post_panic)) + .route("/telemetry/hangs", post(post_hang)) +} + +pub struct ZedChecksumHeader(Vec); + +impl Header for ZedChecksumHeader { + fn name() -> &'static HeaderName { + static ZED_CHECKSUM_HEADER: OnceLock = OnceLock::new(); + ZED_CHECKSUM_HEADER.get_or_init(|| HeaderName::from_static("x-zed-checksum")) + } + + fn decode<'i, I>(values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + let checksum = values + .next() + .ok_or_else(axum::headers::Error::invalid)? + .to_str() + .map_err(|_| axum::headers::Error::invalid())?; + + let bytes = hex::decode(checksum).map_err(|_| axum::headers::Error::invalid())?; + Ok(Self(bytes)) + } + + fn encode>(&self, _values: &mut E) { + unimplemented!() + } +} + +pub struct CloudflareIpCountryHeader(String); + +impl Header for CloudflareIpCountryHeader { + fn name() -> &'static HeaderName { + static CLOUDFLARE_IP_COUNTRY_HEADER: OnceLock = OnceLock::new(); + CLOUDFLARE_IP_COUNTRY_HEADER.get_or_init(|| HeaderName::from_static("cf-ipcountry")) + } + + fn decode<'i, I>(values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + let country_code = values + .next() + .ok_or_else(axum::headers::Error::invalid)? + .to_str() + .map_err(|_| axum::headers::Error::invalid())?; + + Ok(Self(country_code.to_string())) + } + + fn encode>(&self, _values: &mut E) { + unimplemented!() + } +} + +pub async fn post_crash( + Extension(app): Extension>, + headers: HeaderMap, + body: Bytes, +) -> Result<()> { + let report = IpsFile::parse(&body)?; + let version_threshold = SemanticVersion::new(0, 123, 0); + + let bundle_id = &report.header.bundle_id; + let app_version = &report.app_version(); + + if bundle_id == "dev.zed.Zed-Dev" { + log::error!("Crash uploads from {} are ignored.", bundle_id); + return Ok(()); + } + + if app_version.is_none() || app_version.unwrap() < version_threshold { + log::error!( + "Crash uploads from {} are ignored.", + report.header.app_version + ); + return Ok(()); + } + let app_version = app_version.unwrap(); + + if let Some(blob_store_client) = app.blob_store_client.as_ref() { + let response = blob_store_client + .head_object() + .bucket(CRASH_REPORTS_BUCKET) + .key(report.header.incident_id.clone() + ".ips") + .send() + .await; + + if response.is_ok() { + log::info!("We've already uploaded this crash"); + return Ok(()); + } + + blob_store_client + .put_object() + .bucket(CRASH_REPORTS_BUCKET) + .key(report.header.incident_id.clone() + ".ips") + .acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead) + .body(ByteStream::from(body.to_vec())) + .send() + .await + .map_err(|e| log::error!("Failed to upload crash: {}", e)) + .ok(); + } + + let recent_panic_on: Option = headers + .get("x-zed-panicked-on") + .and_then(|h| h.to_str().ok()) + .and_then(|s| s.parse().ok()); + + let installation_id = headers + .get("x-zed-installation-id") + .and_then(|h| h.to_str().ok()) + .map(|s| s.to_string()) + .unwrap_or_default(); + + let mut recent_panic = None; + + if let Some(recent_panic_on) = recent_panic_on { + let crashed_at = match report.timestamp() { + Ok(t) => Some(t), + Err(e) => { + log::error!("Can't parse {}: {}", report.header.timestamp, e); + None + } + }; + if crashed_at.is_some_and(|t| (t.timestamp_millis() - recent_panic_on).abs() <= 30000) { + recent_panic = headers.get("x-zed-panic").and_then(|h| h.to_str().ok()); + } + } + + let description = report.description(recent_panic); + let summary = report.backtrace_summary(); + + tracing::error!( + service = "client", + version = %report.header.app_version, + os_version = %report.header.os_version, + bundle_id = %report.header.bundle_id, + incident_id = %report.header.incident_id, + installation_id = %installation_id, + description = %description, + backtrace = %summary, + "crash report"); + + if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { + let payload = slack::WebhookBody::new(|w| { + w.add_section(|s| s.text(slack::Text::markdown(description))) + .add_section(|s| { + s.add_field(slack::Text::markdown(format!( + "*Version:*\n{} ({})", + bundle_id, app_version + ))) + .add_field({ + let hostname = app.config.blob_store_url.clone().unwrap_or_default(); + let hostname = hostname.strip_prefix("https://").unwrap_or_else(|| { + hostname.strip_prefix("http://").unwrap_or_default() + }); + + slack::Text::markdown(format!( + "*Incident:*\n", + CRASH_REPORTS_BUCKET, + hostname, + report.header.incident_id, + report + .header + .incident_id + .chars() + .take(8) + .collect::(), + )) + }) + }) + .add_rich_text(|r| r.add_preformatted(|p| p.add_text(summary))) + }); + let payload_json = serde_json::to_string(&payload).map_err(|err| { + log::error!("Failed to serialize payload to JSON: {err}"); + Error::Internal(anyhow!(err)) + })?; + + reqwest::Client::new() + .post(slack_panics_webhook) + .header("Content-Type", "application/json") + .body(payload_json) + .send() + .await + .map_err(|err| { + log::error!("Failed to send payload to Slack: {err}"); + Error::Internal(anyhow!(err)) + })?; + } + + Ok(()) +} + +pub async fn post_hang( + Extension(app): Extension>, + TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, + body: Bytes, +) -> Result<()> { + let Some(expected) = calculate_json_checksum(app.clone(), &body) else { + return Err(Error::Http( + StatusCode::INTERNAL_SERVER_ERROR, + "events not enabled".into(), + ))?; + }; + + if checksum != expected { + return Err(Error::Http( + StatusCode::BAD_REQUEST, + "invalid checksum".into(), + ))?; + } + + let incident_id = Uuid::new_v4().to_string(); + + // dump JSON into S3 so we can get frame offsets if we need to. + if let Some(blob_store_client) = app.blob_store_client.as_ref() { + blob_store_client + .put_object() + .bucket(CRASH_REPORTS_BUCKET) + .key(incident_id.clone() + ".hang.json") + .acl(aws_sdk_s3::types::ObjectCannedAcl::PublicRead) + .body(ByteStream::from(body.to_vec())) + .send() + .await + .map_err(|e| log::error!("Failed to upload crash: {}", e)) + .ok(); + } + + let report: telemetry_events::HangReport = serde_json::from_slice(&body).map_err(|err| { + log::error!("can't parse report json: {err}"); + Error::Internal(anyhow!(err)) + })?; + + let mut backtrace = "Possible hang detected on main thread:".to_string(); + let unknown = "".to_string(); + for frame in report.backtrace.iter() { + backtrace.push_str(&format!("\n{}", frame.symbols.first().unwrap_or(&unknown))); + } + + tracing::error!( + service = "client", + version = %report.app_version.unwrap_or_default().to_string(), + os_name = %report.os_name, + os_version = report.os_version.unwrap_or_default().to_string(), + incident_id = %incident_id, + installation_id = %report.installation_id.unwrap_or_default(), + backtrace = %backtrace, + "hang report"); + + Ok(()) +} + +pub async fn post_panic( + Extension(app): Extension>, + TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, + body: Bytes, +) -> Result<()> { + let Some(expected) = calculate_json_checksum(app.clone(), &body) else { + return Err(Error::Http( + StatusCode::INTERNAL_SERVER_ERROR, + "events not enabled".into(), + ))?; + }; + + if checksum != expected { + return Err(Error::Http( + StatusCode::BAD_REQUEST, + "invalid checksum".into(), + ))?; + } + + let report: telemetry_events::PanicRequest = serde_json::from_slice(&body) + .map_err(|_| Error::Http(StatusCode::BAD_REQUEST, "invalid json".into()))?; + let panic = report.panic; + + tracing::error!( + service = "client", + version = %panic.app_version, + os_name = %panic.os_name, + os_version = %panic.os_version.clone().unwrap_or_default(), + installation_id = %panic.installation_id.unwrap_or_default(), + description = %panic.payload, + backtrace = %panic.backtrace.join("\n"), + "panic report"); + + let backtrace = if panic.backtrace.len() > 25 { + let total = panic.backtrace.len(); + format!( + "{}\n and {} more", + panic + .backtrace + .iter() + .take(20) + .cloned() + .collect::>() + .join("\n"), + total - 20 + ) + } else { + panic.backtrace.join("\n") + }; + let backtrace_with_summary = panic.payload + "\n" + &backtrace; + + if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { + let payload = slack::WebhookBody::new(|w| { + w.add_section(|s| s.text(slack::Text::markdown("Panic request".to_string()))) + .add_section(|s| { + s.add_field(slack::Text::markdown(format!( + "*Version:*\n {} ", + panic.app_version + ))) + .add_field({ + slack::Text::markdown(format!( + "*OS:*\n{} {}", + panic.os_name, + panic.os_version.unwrap_or_default() + )) + }) + }) + .add_rich_text(|r| r.add_preformatted(|p| p.add_text(backtrace_with_summary))) + }); + let payload_json = serde_json::to_string(&payload).map_err(|err| { + log::error!("Failed to serialize payload to JSON: {err}"); + Error::Internal(anyhow!(err)) + })?; + + reqwest::Client::new() + .post(slack_panics_webhook) + .header("Content-Type", "application/json") + .body(payload_json) + .send() + .await + .map_err(|err| { + log::error!("Failed to send payload to Slack: {err}"); + Error::Internal(anyhow!(err)) + })?; + } + + Ok(()) +} + +pub async fn post_events( + Extension(app): Extension>, + TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, + country_code_header: Option>, + body: Bytes, +) -> Result<()> { + let Some(clickhouse_client) = app.clickhouse_client.clone() else { + Err(Error::Http( + StatusCode::NOT_IMPLEMENTED, + "not supported".into(), + ))? + }; + + let Some(expected) = calculate_json_checksum(app.clone(), &body) else { + return Err(Error::Http( + StatusCode::INTERNAL_SERVER_ERROR, + "events not enabled".into(), + ))?; + }; + + if checksum != expected { + return Err(Error::Http( + StatusCode::BAD_REQUEST, + "invalid checksum".into(), + ))?; + } + + let request_body: telemetry_events::EventRequestBody = + serde_json::from_slice(&body).map_err(|err| { + log::error!("can't parse event json: {err}"); + Error::Internal(anyhow!(err)) + })?; + + let mut to_upload = ToUpload::default(); + let Some(last_event) = request_body.events.last() else { + return Err(Error::Http(StatusCode::BAD_REQUEST, "no events".into()))?; + }; + let country_code = country_code_header.map(|h| h.0 .0); + + let first_event_at = chrono::Utc::now() + - chrono::Duration::milliseconds(last_event.milliseconds_since_first_event); + + for wrapper in &request_body.events { + match &wrapper.event { + Event::Editor(event) => to_upload.editor_events.push(EditorEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + country_code.clone(), + )), + // Needed for clients sending old copilot_event types + Event::Copilot(_) => {} + Event::InlineCompletion(event) => { + to_upload + .inline_completion_events + .push(InlineCompletionEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + country_code.clone(), + )) + } + Event::Call(event) => to_upload.call_events.push(CallEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::Assistant(event) => { + to_upload + .assistant_events + .push(AssistantEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )) + } + Event::Cpu(event) => to_upload.cpu_events.push(CpuEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::Memory(event) => to_upload.memory_events.push(MemoryEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::App(event) => to_upload.app_events.push(AppEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::Setting(event) => to_upload.setting_events.push(SettingEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::Edit(event) => to_upload.edit_events.push(EditEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::Action(event) => to_upload.action_events.push(ActionEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + first_event_at, + )), + Event::Extension(event) => { + let metadata = app + .db + .get_extension_version(&event.extension_id, &event.version) + .await?; + to_upload + .extension_events + .push(ExtensionEventRow::from_event( + event.clone(), + &wrapper, + &request_body, + metadata, + first_event_at, + )) + } + } + } + + to_upload + .upload(&clickhouse_client) + .await + .map_err(|err| Error::Internal(anyhow!(err)))?; + + Ok(()) +} + +#[derive(Default)] +struct ToUpload { + editor_events: Vec, + inline_completion_events: Vec, + assistant_events: Vec, + call_events: Vec, + cpu_events: Vec, + memory_events: Vec, + app_events: Vec, + setting_events: Vec, + extension_events: Vec, + edit_events: Vec, + action_events: Vec, +} + +impl ToUpload { + pub async fn upload(&self, clickhouse_client: &clickhouse::Client) -> anyhow::Result<()> { + const EDITOR_EVENTS_TABLE: &str = "editor_events"; + Self::upload_to_table(EDITOR_EVENTS_TABLE, &self.editor_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{EDITOR_EVENTS_TABLE}'"))?; + + const INLINE_COMPLETION_EVENTS_TABLE: &str = "inline_completion_events"; + Self::upload_to_table( + INLINE_COMPLETION_EVENTS_TABLE, + &self.inline_completion_events, + clickhouse_client, + ) + .await + .with_context(|| format!("failed to upload to table '{INLINE_COMPLETION_EVENTS_TABLE}'"))?; + + const ASSISTANT_EVENTS_TABLE: &str = "assistant_events"; + Self::upload_to_table( + ASSISTANT_EVENTS_TABLE, + &self.assistant_events, + clickhouse_client, + ) + .await + .with_context(|| format!("failed to upload to table '{ASSISTANT_EVENTS_TABLE}'"))?; + + const CALL_EVENTS_TABLE: &str = "call_events"; + Self::upload_to_table(CALL_EVENTS_TABLE, &self.call_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{CALL_EVENTS_TABLE}'"))?; + + const CPU_EVENTS_TABLE: &str = "cpu_events"; + Self::upload_to_table(CPU_EVENTS_TABLE, &self.cpu_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{CPU_EVENTS_TABLE}'"))?; + + const MEMORY_EVENTS_TABLE: &str = "memory_events"; + Self::upload_to_table(MEMORY_EVENTS_TABLE, &self.memory_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{MEMORY_EVENTS_TABLE}'"))?; + + const APP_EVENTS_TABLE: &str = "app_events"; + Self::upload_to_table(APP_EVENTS_TABLE, &self.app_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{APP_EVENTS_TABLE}'"))?; + + const SETTING_EVENTS_TABLE: &str = "setting_events"; + Self::upload_to_table( + SETTING_EVENTS_TABLE, + &self.setting_events, + clickhouse_client, + ) + .await + .with_context(|| format!("failed to upload to table '{SETTING_EVENTS_TABLE}'"))?; + + const EXTENSION_EVENTS_TABLE: &str = "extension_events"; + Self::upload_to_table( + EXTENSION_EVENTS_TABLE, + &self.extension_events, + clickhouse_client, + ) + .await + .with_context(|| format!("failed to upload to table '{EXTENSION_EVENTS_TABLE}'"))?; + + const EDIT_EVENTS_TABLE: &str = "edit_events"; + Self::upload_to_table(EDIT_EVENTS_TABLE, &self.edit_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{EDIT_EVENTS_TABLE}'"))?; + + const ACTION_EVENTS_TABLE: &str = "action_events"; + Self::upload_to_table(ACTION_EVENTS_TABLE, &self.action_events, clickhouse_client) + .await + .with_context(|| format!("failed to upload to table '{ACTION_EVENTS_TABLE}'"))?; + + Ok(()) + } + + async fn upload_to_table( + table: &str, + rows: &[T], + clickhouse_client: &clickhouse::Client, + ) -> anyhow::Result<()> { + if !rows.is_empty() { + let mut insert = clickhouse_client.insert(table)?; + + for event in rows { + insert.write(event).await?; + } + + insert.end().await?; + + let event_count = rows.len(); + log::info!( + "wrote {event_count} {event_specifier} to '{table}'", + event_specifier = if event_count == 1 { "event" } else { "events" } + ); + } + + Ok(()) + } +} + +pub fn serialize_country_code(country_code: &str, serializer: S) -> Result +where + S: Serializer, +{ + if country_code.len() != 2 { + use serde::ser::Error; + return Err(S::Error::custom( + "country_code must be exactly 2 characters", + )); + } + + let country_code = country_code.as_bytes(); + + serializer.serialize_u16(((country_code[1] as u16) << 8) + country_code[0] as u16) +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct EditorEventRow { + pub installation_id: String, + pub operation: String, + pub app_version: String, + pub file_extension: String, + pub os_name: String, + pub os_version: String, + pub release_channel: String, + pub signed_in: bool, + pub vim_mode: bool, + #[serde(serialize_with = "serialize_country_code")] + pub country_code: String, + pub region_code: String, + pub city: String, + pub time: i64, + pub copilot_enabled: bool, + pub copilot_enabled_for_language: bool, + pub historical_event: bool, + pub architecture: String, + pub is_staff: Option, + pub session_id: Option, + pub major: Option, + pub minor: Option, + pub patch: Option, +} + +impl EditorEventRow { + fn from_event( + event: EditorEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + country_code: Option, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + os_name: body.os_name.clone(), + os_version: body.os_version.clone().unwrap_or_default(), + architecture: body.architecture.clone(), + installation_id: body.installation_id.clone().unwrap_or_default(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + operation: event.operation, + file_extension: event.file_extension.unwrap_or_default(), + signed_in: wrapper.signed_in, + vim_mode: event.vim_mode, + copilot_enabled: event.copilot_enabled, + copilot_enabled_for_language: event.copilot_enabled_for_language, + country_code: country_code.unwrap_or("XX".to_string()), + region_code: "".to_string(), + city: "".to_string(), + historical_event: false, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct InlineCompletionEventRow { + pub installation_id: String, + pub provider: String, + pub suggestion_accepted: bool, + pub app_version: String, + pub file_extension: String, + pub os_name: String, + pub os_version: String, + pub release_channel: String, + pub signed_in: bool, + #[serde(serialize_with = "serialize_country_code")] + pub country_code: String, + pub region_code: String, + pub city: String, + pub time: i64, + pub is_staff: Option, + pub session_id: Option, + pub major: Option, + pub minor: Option, + pub patch: Option, +} + +impl InlineCompletionEventRow { + fn from_event( + event: InlineCompletionEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + country_code: Option, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + os_name: body.os_name.clone(), + os_version: body.os_version.clone().unwrap_or_default(), + installation_id: body.installation_id.clone().unwrap_or_default(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + file_extension: event.file_extension.unwrap_or_default(), + signed_in: wrapper.signed_in, + country_code: country_code.unwrap_or("XX".to_string()), + region_code: "".to_string(), + city: "".to_string(), + provider: event.provider, + suggestion_accepted: event.suggestion_accepted, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct CallEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: String, + session_id: Option, + is_staff: Option, + time: i64, + + // CallEventRow + operation: String, + room_id: Option, + channel_id: Option, +} + +impl CallEventRow { + fn from_event( + event: CallEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone().unwrap_or_default(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + operation: event.operation, + room_id: event.room_id, + channel_id: event.channel_id, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct AssistantEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + session_id: Option, + is_staff: Option, + time: i64, + + // AssistantEventRow + conversation_id: String, + kind: String, + model: String, + response_latency_in_ms: Option, + error_message: Option, +} + +impl AssistantEventRow { + fn from_event( + event: AssistantEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + conversation_id: event.conversation_id.unwrap_or_default(), + kind: event.kind.to_string(), + model: event.model, + response_latency_in_ms: event + .response_latency + .map(|latency| latency.as_millis() as i64), + error_message: event.error_message, + } + } +} + +#[derive(Debug, clickhouse::Row, Serialize)] +pub struct CpuEventRow { + pub installation_id: Option, + pub is_staff: Option, + pub usage_as_percentage: f32, + pub core_count: u32, + pub app_version: String, + pub release_channel: String, + pub time: i64, + pub session_id: Option, + // pub normalized_cpu_usage: f64, MATERIALIZED + pub major: Option, + pub minor: Option, + pub patch: Option, +} + +impl CpuEventRow { + fn from_event( + event: CpuEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + usage_as_percentage: event.usage_as_percentage, + core_count: event.core_count, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct MemoryEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + session_id: Option, + is_staff: Option, + time: i64, + + // MemoryEventRow + memory_in_bytes: u64, + virtual_memory_in_bytes: u64, +} + +impl MemoryEventRow { + fn from_event( + event: MemoryEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + memory_in_bytes: event.memory_in_bytes, + virtual_memory_in_bytes: event.virtual_memory_in_bytes, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct AppEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + session_id: Option, + is_staff: Option, + time: i64, + + // AppEventRow + operation: String, +} + +impl AppEventRow { + fn from_event( + event: AppEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + operation: event.operation, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct SettingEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + session_id: Option, + is_staff: Option, + time: i64, + // SettingEventRow + setting: String, + value: String, +} + +impl SettingEventRow { + fn from_event( + event: SettingEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + setting: event.setting, + value: event.value, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct ExtensionEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + session_id: Option, + is_staff: Option, + time: i64, + + // ExtensionEventRow + extension_id: Arc, + extension_version: Arc, + dev: bool, + schema_version: Option, + wasm_api_version: Option, +} + +impl ExtensionEventRow { + fn from_event( + event: ExtensionEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + extension_metadata: Option, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + extension_id: event.extension_id, + extension_version: event.version, + dev: extension_metadata.is_none(), + schema_version: extension_metadata + .as_ref() + .and_then(|metadata| metadata.manifest.schema_version), + wasm_api_version: extension_metadata.as_ref().and_then(|metadata| { + metadata + .manifest + .wasm_api_version + .as_ref() + .map(|version| version.to_string()) + }), + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct EditEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + // Note: This column name has a typo in the ClickHouse table. + #[serde(rename = "sesssion_id")] + session_id: Option, + is_staff: Option, + time: i64, + + // EditEventRow + period_start: i64, + period_end: i64, + environment: String, +} + +impl EditEventRow { + fn from_event( + event: EditEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + let period_start = time - chrono::Duration::milliseconds(event.duration); + let period_end = time; + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + period_start: period_start.timestamp_millis(), + period_end: period_end.timestamp_millis(), + environment: event.environment, + } + } +} + +#[derive(Serialize, Debug, clickhouse::Row)] +pub struct ActionEventRow { + // AppInfoBase + app_version: String, + major: Option, + minor: Option, + patch: Option, + release_channel: String, + + // ClientEventBase + installation_id: Option, + // Note: This column name has a typo in the ClickHouse table. + #[serde(rename = "sesssion_id")] + session_id: Option, + is_staff: Option, + time: i64, + // ActionEventRow + source: String, + action: String, +} + +impl ActionEventRow { + fn from_event( + event: ActionEvent, + wrapper: &EventWrapper, + body: &EventRequestBody, + first_event_at: chrono::DateTime, + ) -> Self { + let semver = body.semver(); + let time = + first_event_at + chrono::Duration::milliseconds(wrapper.milliseconds_since_first_event); + + Self { + app_version: body.app_version.clone(), + major: semver.map(|v| v.major() as i32), + minor: semver.map(|v| v.minor() as i32), + patch: semver.map(|v| v.patch() as i32), + release_channel: body.release_channel.clone().unwrap_or_default(), + installation_id: body.installation_id.clone(), + session_id: body.session_id.clone(), + is_staff: body.is_staff, + time: time.timestamp_millis(), + source: event.source, + action: event.action, + } + } +} + +pub fn calculate_json_checksum(app: Arc, json: &impl AsRef<[u8]>) -> Option> { + let Some(checksum_seed) = app.config.zed_client_checksum_seed.as_ref() else { + return None; + }; + + let mut summer = Sha256::new(); + summer.update(checksum_seed); + summer.update(&json); + summer.update(checksum_seed); + Some(summer.finalize().into_iter().collect()) +} diff --git a/crates/collab/src/api/extensions.rs b/crates/collab/src/api/extensions.rs new file mode 100644 index 0000000..20e35ec --- /dev/null +++ b/crates/collab/src/api/extensions.rs @@ -0,0 +1,363 @@ +use crate::db::ExtensionVersionConstraints; +use crate::{db::NewExtensionVersion, AppState, Error, Result}; +use anyhow::{anyhow, Context as _}; +use aws_sdk_s3::presigning::PresigningConfig; +use axum::{ + extract::{Path, Query}, + http::StatusCode, + response::Redirect, + routing::get, + Extension, Json, Router, +}; +use collections::HashMap; +use rpc::{ExtensionApiManifest, GetExtensionsResponse}; +use semantic_version::SemanticVersion; +use serde::Deserialize; +use std::{sync::Arc, time::Duration}; +use time::PrimitiveDateTime; +use util::{maybe, ResultExt}; + +pub fn router() -> Router { + Router::new() + .route("/extensions", get(get_extensions)) + .route("/extensions/updates", get(get_extension_updates)) + .route("/extensions/:extension_id", get(get_extension_versions)) + .route( + "/extensions/:extension_id/download", + get(download_latest_extension), + ) + .route( + "/extensions/:extension_id/:version/download", + get(download_extension), + ) +} + +#[derive(Debug, Deserialize)] +struct GetExtensionsParams { + filter: Option, + #[serde(default)] + ids: Option, + #[serde(default)] + max_schema_version: i32, +} + +async fn get_extensions( + Extension(app): Extension>, + Query(params): Query, +) -> Result> { + let extension_ids = params + .ids + .as_ref() + .map(|s| s.split(',').map(|s| s.trim()).collect::>()); + + let extensions = if let Some(extension_ids) = extension_ids { + app.db.get_extensions_by_ids(&extension_ids, None).await? + } else { + app.db + .get_extensions(params.filter.as_deref(), params.max_schema_version, 500) + .await? + }; + + Ok(Json(GetExtensionsResponse { data: extensions })) +} + +#[derive(Debug, Deserialize)] +struct GetExtensionUpdatesParams { + ids: String, + min_schema_version: i32, + max_schema_version: i32, + min_wasm_api_version: SemanticVersion, + max_wasm_api_version: SemanticVersion, +} + +async fn get_extension_updates( + Extension(app): Extension>, + Query(params): Query, +) -> Result> { + let constraints = ExtensionVersionConstraints { + schema_versions: params.min_schema_version..=params.max_schema_version, + wasm_api_versions: params.min_wasm_api_version..=params.max_wasm_api_version, + }; + + let extension_ids = params.ids.split(',').map(|s| s.trim()).collect::>(); + + let extensions = app + .db + .get_extensions_by_ids(&extension_ids, Some(&constraints)) + .await?; + + Ok(Json(GetExtensionsResponse { data: extensions })) +} + +#[derive(Debug, Deserialize)] +struct GetExtensionVersionsParams { + extension_id: String, +} + +async fn get_extension_versions( + Extension(app): Extension>, + Path(params): Path, +) -> Result> { + let extension_versions = app.db.get_extension_versions(¶ms.extension_id).await?; + + Ok(Json(GetExtensionsResponse { + data: extension_versions, + })) +} + +#[derive(Debug, Deserialize)] +struct DownloadLatestExtensionPathParams { + extension_id: String, +} + +#[derive(Debug, Deserialize)] +struct DownloadLatestExtensionQueryParams { + min_schema_version: Option, + max_schema_version: Option, + min_wasm_api_version: Option, + max_wasm_api_version: Option, +} + +async fn download_latest_extension( + Extension(app): Extension>, + Path(params): Path, + Query(query): Query, +) -> Result { + let constraints = maybe!({ + let min_schema_version = query.min_schema_version?; + let max_schema_version = query.max_schema_version?; + let min_wasm_api_version = query.min_wasm_api_version?; + let max_wasm_api_version = query.max_wasm_api_version?; + + Some(ExtensionVersionConstraints { + schema_versions: min_schema_version..=max_schema_version, + wasm_api_versions: min_wasm_api_version..=max_wasm_api_version, + }) + }); + + let extension = app + .db + .get_extension(¶ms.extension_id, constraints.as_ref()) + .await? + .ok_or_else(|| anyhow!("unknown extension"))?; + download_extension( + Extension(app), + Path(DownloadExtensionParams { + extension_id: params.extension_id, + version: extension.manifest.version.to_string(), + }), + ) + .await +} + +#[derive(Debug, Deserialize)] +struct DownloadExtensionParams { + extension_id: String, + version: String, +} + +async fn download_extension( + Extension(app): Extension>, + Path(params): Path, +) -> Result { + let Some((blob_store_client, bucket)) = app + .blob_store_client + .clone() + .zip(app.config.blob_store_bucket.clone()) + else { + Err(Error::Http( + StatusCode::NOT_IMPLEMENTED, + "not supported".into(), + ))? + }; + + let DownloadExtensionParams { + extension_id, + version, + } = params; + + let version_exists = app + .db + .record_extension_download(&extension_id, &version) + .await?; + + if !version_exists { + Err(Error::Http( + StatusCode::NOT_FOUND, + "unknown extension version".into(), + ))?; + } + + let url = blob_store_client + .get_object() + .bucket(bucket) + .key(format!( + "extensions/{extension_id}/{version}/archive.tar.gz" + )) + .presigned(PresigningConfig::expires_in(EXTENSION_DOWNLOAD_URL_LIFETIME).unwrap()) + .await + .map_err(|e| anyhow!("failed to create presigned extension download url {e}"))?; + + Ok(Redirect::temporary(url.uri())) +} + +const EXTENSION_FETCH_INTERVAL: Duration = Duration::from_secs(5 * 60); +const EXTENSION_DOWNLOAD_URL_LIFETIME: Duration = Duration::from_secs(3 * 60); + +pub fn fetch_extensions_from_blob_store_periodically(app_state: Arc) { + let Some(blob_store_client) = app_state.blob_store_client.clone() else { + log::info!("no blob store client"); + return; + }; + let Some(blob_store_bucket) = app_state.config.blob_store_bucket.clone() else { + log::info!("no blob store bucket"); + return; + }; + + let executor = app_state.executor.clone(); + executor.spawn_detached({ + let executor = executor.clone(); + async move { + loop { + fetch_extensions_from_blob_store( + &blob_store_client, + &blob_store_bucket, + &app_state, + ) + .await + .log_err(); + executor.sleep(EXTENSION_FETCH_INTERVAL).await; + } + } + }); +} + +async fn fetch_extensions_from_blob_store( + blob_store_client: &aws_sdk_s3::Client, + blob_store_bucket: &String, + app_state: &Arc, +) -> anyhow::Result<()> { + log::info!("fetching extensions from blob store"); + + let list = blob_store_client + .list_objects() + .bucket(blob_store_bucket) + .prefix("extensions/") + .send() + .await?; + + let objects = list.contents.unwrap_or_default(); + + let mut published_versions = HashMap::<&str, Vec<&str>>::default(); + for object in &objects { + let Some(key) = object.key.as_ref() else { + continue; + }; + let mut parts = key.split('/'); + let Some(_) = parts.next().filter(|part| *part == "extensions") else { + continue; + }; + let Some(extension_id) = parts.next() else { + continue; + }; + let Some(version) = parts.next() else { + continue; + }; + if parts.next() == Some("manifest.json") { + published_versions + .entry(extension_id) + .or_default() + .push(version); + } + } + + let known_versions = app_state.db.get_known_extension_versions().await?; + + let mut new_versions = HashMap::<&str, Vec>::default(); + let empty = Vec::new(); + for (extension_id, published_versions) in published_versions { + let known_versions = known_versions.get(extension_id).unwrap_or(&empty); + + for published_version in published_versions { + if known_versions + .binary_search_by_key(&published_version, String::as_str) + .is_err() + { + if let Some(extension) = fetch_extension_manifest( + blob_store_client, + blob_store_bucket, + extension_id, + published_version, + ) + .await + .log_err() + { + new_versions + .entry(extension_id) + .or_default() + .push(extension); + } + } + } + } + + app_state + .db + .insert_extension_versions(&new_versions) + .await?; + + log::info!( + "fetched {} new extensions from blob store", + new_versions.values().map(|v| v.len()).sum::() + ); + + Ok(()) +} + +async fn fetch_extension_manifest( + blob_store_client: &aws_sdk_s3::Client, + blob_store_bucket: &String, + extension_id: &str, + version: &str, +) -> Result { + let object = blob_store_client + .get_object() + .bucket(blob_store_bucket) + .key(format!("extensions/{extension_id}/{version}/manifest.json")) + .send() + .await?; + let manifest_bytes = object + .body + .collect() + .await + .map(|data| data.into_bytes()) + .with_context(|| { + format!("failed to download manifest for extension {extension_id} version {version}") + })? + .to_vec(); + let manifest = + serde_json::from_slice::(&manifest_bytes).with_context(|| { + format!( + "invalid manifest for extension {extension_id} version {version}: {}", + String::from_utf8_lossy(&manifest_bytes) + ) + })?; + let published_at = object.last_modified.ok_or_else(|| { + anyhow!("missing last modified timestamp for extension {extension_id} version {version}") + })?; + let published_at = time::OffsetDateTime::from_unix_timestamp_nanos(published_at.as_nanos())?; + let published_at = PrimitiveDateTime::new(published_at.date(), published_at.time()); + let version = semver::Version::parse(&manifest.version).with_context(|| { + format!("invalid version for extension {extension_id} version {version}") + })?; + Ok(NewExtensionVersion { + name: manifest.name, + version, + description: manifest.description.unwrap_or_default(), + authors: manifest.authors, + repository: manifest.repository, + schema_version: manifest.schema_version.unwrap_or(0), + wasm_api_version: manifest.wasm_api_version, + published_at, + }) +} diff --git a/crates/collab/src/api/ips_file.rs b/crates/collab/src/api/ips_file.rs new file mode 100644 index 0000000..0f5fbcc --- /dev/null +++ b/crates/collab/src/api/ips_file.rs @@ -0,0 +1,351 @@ +use collections::HashMap; + +use semantic_version::SemanticVersion; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug)] +pub struct IpsFile { + pub header: Header, + pub body: Body, +} + +impl IpsFile { + pub fn parse(bytes: &[u8]) -> anyhow::Result { + let mut split = bytes.splitn(2, |&b| b == b'\n'); + let header_bytes = split + .next() + .ok_or_else(|| anyhow::anyhow!("No header found"))?; + let header: Header = serde_json::from_slice(header_bytes) + .map_err(|e| anyhow::anyhow!("Failed to parse header: {}", e))?; + + let body_bytes = split + .next() + .ok_or_else(|| anyhow::anyhow!("No body found"))?; + + let body: Body = serde_json::from_slice(body_bytes) + .map_err(|e| anyhow::anyhow!("Failed to parse body: {}", e))?; + Ok(IpsFile { header, body }) + } + + pub fn faulting_thread(&self) -> Option<&Thread> { + self.body.threads.get(self.body.faulting_thread? as usize) + } + + pub fn app_version(&self) -> Option { + self.header.app_version.parse().ok() + } + + pub fn timestamp(&self) -> anyhow::Result> { + chrono::DateTime::parse_from_str(&self.header.timestamp, "%Y-%m-%d %H:%M:%S%.f %#z") + .map_err(|e| anyhow::anyhow!(e)) + } + + pub fn description(&self, panic: Option<&str>) -> String { + let mut desc = if self.body.termination.indicator == "Abort trap: 6" { + match panic { + Some(panic_message) => format!("Panic `{}`", panic_message), + None => "Crash `Abort trap: 6` (possible panic)".into(), + } + } else if let Some(msg) = &self.body.exception.message { + format!("Exception `{}`", msg) + } else { + format!("Crash `{}`", self.body.termination.indicator) + }; + if let Some(thread) = self.faulting_thread() { + if let Some(queue) = thread.queue.as_ref() { + desc += &format!( + " on thread {} ({})", + self.body.faulting_thread.unwrap_or_default(), + queue + ); + } else { + desc += &format!( + " on thread {} ({})", + self.body.faulting_thread.unwrap_or_default(), + thread.name.clone().unwrap_or_default() + ); + } + } + desc + } + + pub fn backtrace_summary(&self) -> String { + if let Some(thread) = self.faulting_thread() { + let mut frames = thread + .frames + .iter() + .filter_map(|frame| { + if let Some(name) = &frame.symbol { + if self.is_ignorable_frame(name) { + return None; + } + Some(format!("{:#}", rustc_demangle::demangle(name))) + } else if let Some(image) = self.body.used_images.get(frame.image_index) { + Some(image.name.clone().unwrap_or("".into())) + } else { + Some("".into()) + } + }) + .collect::>(); + + let total = frames.len(); + if total > 21 { + frames = frames.into_iter().take(20).collect(); + frames.push(format!(" and {} more...", total - 20)) + } + frames.join("\n") + } else { + "".into() + } + } + + fn is_ignorable_frame(&self, symbol: &String) -> bool { + [ + "pthread_kill", + "panic", + "backtrace", + "rust_begin_unwind", + "abort", + ] + .iter() + .any(|s| symbol.contains(s)) + } +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(default)] +pub struct Header { + pub app_name: String, + pub timestamp: String, + pub app_version: String, + pub slice_uuid: String, + pub build_version: String, + pub platform: i64, + #[serde(rename = "bundleID", default)] + pub bundle_id: String, + pub share_with_app_devs: i64, + pub is_first_party: i64, + pub bug_type: String, + pub os_version: String, + pub roots_installed: i64, + pub name: String, + pub incident_id: String, +} +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Body { + pub uptime: i64, + pub proc_role: String, + pub version: i64, + #[serde(rename = "userID")] + pub user_id: i64, + pub deploy_version: i64, + pub model_code: String, + #[serde(rename = "coalitionID")] + pub coalition_id: i64, + pub os_version: OsVersion, + pub capture_time: String, + pub code_signing_monitor: i64, + pub incident: String, + pub pid: i64, + pub translated: bool, + pub cpu_type: String, + #[serde(rename = "roots_installed")] + pub roots_installed: i64, + #[serde(rename = "bug_type")] + pub bug_type: String, + pub proc_launch: String, + pub proc_start_abs_time: i64, + pub proc_exit_abs_time: i64, + pub proc_name: String, + pub proc_path: String, + pub bundle_info: BundleInfo, + pub store_info: StoreInfo, + pub parent_proc: String, + pub parent_pid: i64, + pub coalition_name: String, + pub crash_reporter_key: String, + #[serde(rename = "codeSigningID")] + pub code_signing_id: String, + #[serde(rename = "codeSigningTeamID")] + pub code_signing_team_id: String, + pub code_signing_flags: i64, + pub code_signing_validation_category: i64, + pub code_signing_trust_level: i64, + pub instruction_byte_stream: InstructionByteStream, + pub sip: String, + pub exception: Exception, + pub termination: Termination, + pub asi: Asi, + pub ext_mods: ExtMods, + pub faulting_thread: Option, + pub threads: Vec, + pub used_images: Vec, + pub shared_cache: SharedCache, + pub vm_summary: String, + pub legacy_info: LegacyInfo, + pub log_writing_signature: String, + pub trial_info: TrialInfo, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct OsVersion { + pub train: String, + pub build: String, + pub release_type: String, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct BundleInfo { + #[serde(rename = "CFBundleShortVersionString")] + pub cfbundle_short_version_string: String, + #[serde(rename = "CFBundleVersion")] + pub cfbundle_version: String, + #[serde(rename = "CFBundleIdentifier")] + pub cfbundle_identifier: String, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct StoreInfo { + pub device_identifier_for_vendor: String, + pub third_party: bool, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct InstructionByteStream { + #[serde(rename = "beforePC")] + pub before_pc: String, + #[serde(rename = "atPC")] + pub at_pc: String, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Exception { + pub codes: String, + pub raw_codes: Vec, + #[serde(rename = "type")] + pub type_field: String, + pub subtype: Option, + pub signal: String, + pub port: Option, + pub guard_id: Option, + pub message: Option, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Termination { + pub flags: i64, + pub code: i64, + pub namespace: String, + pub indicator: String, + pub by_proc: String, + pub by_pid: i64, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Asi { + #[serde(rename = "libsystem_c.dylib")] + pub libsystem_c_dylib: Vec, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct ExtMods { + pub caller: ExtMod, + pub system: ExtMod, + pub targeted: ExtMod, + pub warnings: i64, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct ExtMod { + #[serde(rename = "thread_create")] + pub thread_create: i64, + #[serde(rename = "thread_set_state")] + pub thread_set_state: i64, + #[serde(rename = "task_for_pid")] + pub task_for_pid: i64, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Thread { + pub thread_state: HashMap, + pub id: i64, + pub triggered: Option, + pub name: Option, + pub queue: Option, + pub frames: Vec, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Frame { + pub image_offset: i64, + pub symbol: Option, + pub symbol_location: Option, + pub image_index: usize, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct UsedImage { + pub source: String, + pub arch: Option, + pub base: i64, + #[serde(rename = "CFBundleShortVersionString")] + pub cfbundle_short_version_string: Option, + #[serde(rename = "CFBundleIdentifier")] + pub cfbundle_identifier: Option, + pub size: i64, + pub uuid: String, + pub path: Option, + pub name: Option, + #[serde(rename = "CFBundleVersion")] + pub cfbundle_version: Option, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct SharedCache { + pub base: i64, + pub size: i64, + pub uuid: String, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct LegacyInfo { + pub thread_triggered: ThreadTriggered, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct ThreadTriggered { + pub name: String, + pub queue: String, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct TrialInfo { + pub rollouts: Vec, + pub experiments: Vec, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", default)] +pub struct Rollout { + pub rollout_id: String, + pub factor_pack_ids: HashMap, + pub deployment_id: i64, +} diff --git a/crates/collab/src/api/slack.rs b/crates/collab/src/api/slack.rs new file mode 100644 index 0000000..2f4234b --- /dev/null +++ b/crates/collab/src/api/slack.rs @@ -0,0 +1,144 @@ +use serde::{Deserialize, Serialize}; + +/// https://api.slack.com/reference/messaging/payload +#[derive(Default, Clone, Serialize, Deserialize)] +pub struct WebhookBody { + text: String, + #[serde(skip_serializing_if = "Vec::is_empty")] + blocks: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + thread_ts: Option, + #[serde(skip_serializing_if = "Option::is_none")] + mrkdwn: Option, +} + +impl WebhookBody { + pub fn new(f: impl FnOnce(Self) -> Self) -> Self { + f(Self::default()) + } + + pub fn add_section(mut self, build: impl FnOnce(Section) -> Section) -> Self { + self.blocks.push(Block::Section(build(Section::default()))); + self + } + + pub fn add_rich_text(mut self, build: impl FnOnce(RichText) -> RichText) -> Self { + self.blocks + .push(Block::RichText(build(RichText::default()))); + self + } +} + +#[derive(Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +/// https://api.slack.com/reference/block-kit/blocks +pub enum Block { + #[serde(rename = "section")] + Section(Section), + #[serde(rename = "rich_text")] + RichText(RichText), + // .... etc. +} + +/// https://api.slack.com/reference/block-kit/blocks#section +#[derive(Default, Clone, Serialize, Deserialize)] +pub struct Section { + #[serde(skip_serializing_if = "Option::is_none")] + text: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + fields: Vec, + // fields, accessories... +} + +impl Section { + pub fn text(mut self, text: Text) -> Self { + self.text = Some(text); + self + } + + pub fn add_field(mut self, field: Text) -> Self { + self.fields.push(field); + self + } +} + +/// https://api.slack.com/reference/block-kit/composition-objects#text +#[derive(Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum Text { + #[serde(rename = "plain_text")] + PlainText { text: String, emoji: bool }, + #[serde(rename = "mrkdwn")] + Markdown { text: String, verbatim: bool }, +} + +impl Text { + pub fn plain(s: String) -> Self { + Self::PlainText { + text: s, + emoji: true, + } + } + + pub fn markdown(s: String) -> Self { + Self::Markdown { + text: s, + verbatim: false, + } + } +} + +#[derive(Default, Clone, Serialize, Deserialize)] +pub struct RichText { + elements: Vec, +} + +impl RichText { + pub fn new(f: impl FnOnce(Self) -> Self) -> Self { + f(Self::default()) + } + + pub fn add_preformatted( + mut self, + build: impl FnOnce(RichTextPreformatted) -> RichTextPreformatted, + ) -> Self { + self.elements.push(RichTextObject::Preformatted(build( + RichTextPreformatted::default(), + ))); + self + } +} + +/// https://api.slack.com/reference/block-kit/blocks#rich_text +#[derive(Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum RichTextObject { + #[serde(rename = "rich_text_preformatted")] + Preformatted(RichTextPreformatted), + // etc. +} + +/// https://api.slack.com/reference/block-kit/blocks#rich_text_preformatted +#[derive(Clone, Default, Serialize, Deserialize)] +pub struct RichTextPreformatted { + #[serde(skip_serializing_if = "Vec::is_empty")] + elements: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + border: Option, +} + +impl RichTextPreformatted { + pub fn add_text(mut self, text: String) -> Self { + self.elements.push(RichTextElement::Text { text }); + self + } +} + +/// https://api.slack.com/reference/block-kit/blocks#element-types +#[derive(Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum RichTextElement { + #[serde(rename = "text")] + Text { text: String }, + // etc. +} diff --git a/crates/collab/src/auth.rs b/crates/collab/src/auth.rs new file mode 100644 index 0000000..915563d --- /dev/null +++ b/crates/collab/src/auth.rs @@ -0,0 +1,409 @@ +use crate::{ + db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId}, + rpc::Principal, + AppState, Error, Result, +}; +use anyhow::{anyhow, Context}; +use axum::{ + http::{self, Request, StatusCode}, + middleware::Next, + response::IntoResponse, +}; +use prometheus::{exponential_buckets, register_histogram, Histogram}; +pub use rpc::auth::random_token; +use scrypt::{ + password_hash::{PasswordHash, PasswordVerifier}, + Scrypt, +}; +use serde::{Deserialize, Serialize}; +use sha2::Digest; +use std::sync::OnceLock; +use std::{sync::Arc, time::Instant}; +use subtle::ConstantTimeEq; + +/// Validates the authorization header and adds an Extension to the request. +/// Authorization: +/// can be an access_token attached to that user, or an access token of an admin +/// or (in development) the string ADMIN:. +/// Authorization: "dev-server-token" +pub async fn validate_header(mut req: Request, next: Next) -> impl IntoResponse { + let mut auth_header = req + .headers() + .get(http::header::AUTHORIZATION) + .and_then(|header| header.to_str().ok()) + .ok_or_else(|| { + Error::Http( + StatusCode::UNAUTHORIZED, + "missing authorization header".to_string(), + ) + })? + .split_whitespace(); + + let state = req.extensions().get::>().unwrap(); + + let first = auth_header.next().unwrap_or(""); + if first == "dev-server-token" { + let dev_server_token = auth_header.next().ok_or_else(|| { + Error::Http( + StatusCode::BAD_REQUEST, + "missing dev-server-token token in authorization header".to_string(), + ) + })?; + let dev_server = verify_dev_server_token(dev_server_token, &state.db) + .await + .map_err(|e| Error::Http(StatusCode::UNAUTHORIZED, format!("{}", e)))?; + + req.extensions_mut() + .insert(Principal::DevServer(dev_server)); + return Ok::<_, Error>(next.run(req).await); + } + + let user_id = UserId(first.parse().map_err(|_| { + Error::Http( + StatusCode::BAD_REQUEST, + "missing user id in authorization header".to_string(), + ) + })?); + + let access_token = auth_header.next().ok_or_else(|| { + Error::Http( + StatusCode::BAD_REQUEST, + "missing access token in authorization header".to_string(), + ) + })?; + + // In development, allow impersonation using the admin API token. + // Don't allow this in production because we can't tell who is doing + // the impersonating. + let validate_result = if let (Some(admin_token), true) = ( + access_token.strip_prefix("ADMIN_TOKEN:"), + state.config.is_development(), + ) { + Ok(VerifyAccessTokenResult { + is_valid: state.config.api_token == admin_token, + impersonator_id: None, + }) + } else { + verify_access_token(&access_token, user_id, &state.db).await + }; + + if let Ok(validate_result) = validate_result { + if validate_result.is_valid { + let user = state + .db + .get_user_by_id(user_id) + .await? + .ok_or_else(|| anyhow!("user {} not found", user_id))?; + + if let Some(impersonator_id) = validate_result.impersonator_id { + let admin = state + .db + .get_user_by_id(impersonator_id) + .await? + .ok_or_else(|| anyhow!("user {} not found", impersonator_id))?; + req.extensions_mut() + .insert(Principal::Impersonated { user, admin }); + } else { + req.extensions_mut().insert(Principal::User(user)); + }; + return Ok::<_, Error>(next.run(req).await); + } + } + + Err(Error::Http( + StatusCode::UNAUTHORIZED, + "invalid credentials".to_string(), + )) +} + +const MAX_ACCESS_TOKENS_TO_STORE: usize = 8; + +#[derive(Serialize, Deserialize)] +struct AccessTokenJson { + version: usize, + id: AccessTokenId, + token: String, +} + +/// Creates a new access token to identify the given user. before returning it, you should +/// encrypt it with the user's public key. +pub async fn create_access_token( + db: &db::Database, + user_id: UserId, + impersonated_user_id: Option, +) -> Result { + const VERSION: usize = 1; + let access_token = rpc::auth::random_token(); + let access_token_hash = hash_access_token(&access_token); + let id = db + .create_access_token( + user_id, + impersonated_user_id, + &access_token_hash, + MAX_ACCESS_TOKENS_TO_STORE, + ) + .await?; + Ok(serde_json::to_string(&AccessTokenJson { + version: VERSION, + id, + token: access_token, + })?) +} + +/// Hashing prevents anyone with access to the database being able to login. +/// As the token is randomly generated, we don't need to worry about scrypt-style +/// protection. +pub fn hash_access_token(token: &str) -> String { + let digest = sha2::Sha256::digest(token); + format!( + "$sha256${}", + base64::encode_config(digest, base64::URL_SAFE) + ) +} + +/// Encrypts the given access token with the given public key to avoid leaking it on the way +/// to the client. +pub fn encrypt_access_token(access_token: &str, public_key: String) -> Result { + let native_app_public_key = + rpc::auth::PublicKey::try_from(public_key).context("failed to parse app public key")?; + let encrypted_access_token = native_app_public_key + .encrypt_string(access_token) + .context("failed to encrypt access token with public key")?; + Ok(encrypted_access_token) +} + +pub struct VerifyAccessTokenResult { + pub is_valid: bool, + pub impersonator_id: Option, +} + +/// Checks that the given access token is valid for the given user. +pub async fn verify_access_token( + token: &str, + user_id: UserId, + db: &Arc, +) -> Result { + static METRIC_ACCESS_TOKEN_HASHING_TIME: OnceLock = OnceLock::new(); + let metric_access_token_hashing_time = METRIC_ACCESS_TOKEN_HASHING_TIME.get_or_init(|| { + register_histogram!( + "access_token_hashing_time", + "time spent hashing access tokens", + exponential_buckets(10.0, 2.0, 10).unwrap(), + ) + .unwrap() + }); + + let token: AccessTokenJson = serde_json::from_str(&token)?; + + let db_token = db.get_access_token(token.id).await?; + let token_user_id = db_token.impersonated_user_id.unwrap_or(db_token.user_id); + if token_user_id != user_id { + return Err(anyhow!("no such access token"))?; + } + let t0 = Instant::now(); + + let is_valid = if db_token.hash.starts_with("$scrypt$") { + let db_hash = PasswordHash::new(&db_token.hash).map_err(anyhow::Error::new)?; + Scrypt + .verify_password(token.token.as_bytes(), &db_hash) + .is_ok() + } else { + let token_hash = hash_access_token(&token.token); + db_token.hash.as_bytes().ct_eq(token_hash.as_ref()).into() + }; + + let duration = t0.elapsed(); + log::info!("hashed access token in {:?}", duration); + metric_access_token_hashing_time.observe(duration.as_millis() as f64); + + if is_valid && db_token.hash.starts_with("$scrypt$") { + let new_hash = hash_access_token(&token.token); + db.update_access_token_hash(db_token.id, &new_hash).await?; + } + + Ok(VerifyAccessTokenResult { + is_valid, + impersonator_id: if db_token.impersonated_user_id.is_some() { + Some(db_token.user_id) + } else { + None + }, + }) +} + +pub fn generate_dev_server_token(id: usize, access_token: String) -> String { + format!("{}.{}", id, access_token) +} + +pub async fn verify_dev_server_token( + dev_server_token: &str, + db: &Arc, +) -> anyhow::Result { + let (id, token) = split_dev_server_token(dev_server_token)?; + let token_hash = hash_access_token(&token); + let server = db.get_dev_server(id).await?; + + if server + .hashed_token + .as_bytes() + .ct_eq(token_hash.as_ref()) + .into() + { + Ok(server) + } else { + Err(anyhow!("wrong token for dev server")) + } +} + +// a dev_server_token has the format .. This is to make them +// relatively easy to copy/paste around. +pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> { + let mut parts = dev_server_token.splitn(2, '.'); + let id = DevServerId(parts.next().unwrap_or_default().parse()?); + let token = parts + .next() + .ok_or_else(|| anyhow!("invalid dev server token format"))?; + Ok((id, token)) +} + +#[cfg(test)] +mod test { + use rand::thread_rng; + use scrypt::password_hash::{PasswordHasher, SaltString}; + use sea_orm::EntityTrait; + + use super::*; + use crate::db::{access_token, NewUserParams}; + + #[gpui::test] + async fn test_verify_access_token(cx: &mut gpui::TestAppContext) { + let test_db = crate::db::TestDb::postgres(cx.executor().clone()); + let db = test_db.db(); + + let user = db + .create_user( + "example@example.com", + false, + NewUserParams { + github_login: "example".into(), + github_user_id: 1, + }, + ) + .await + .unwrap(); + + let token = create_access_token(&db, user.user_id, None).await.unwrap(); + assert!(matches!( + verify_access_token(&token, user.user_id, &db) + .await + .unwrap(), + VerifyAccessTokenResult { + is_valid: true, + impersonator_id: None, + } + )); + + let old_token = create_previous_access_token(user.user_id, None, &db) + .await + .unwrap(); + + let old_token_id = serde_json::from_str::(&old_token) + .unwrap() + .id; + + let hash = db + .transaction(|tx| async move { + Ok(access_token::Entity::find_by_id(old_token_id) + .one(&*tx) + .await?) + }) + .await + .unwrap() + .unwrap() + .hash; + assert!(hash.starts_with("$scrypt$")); + + assert!(matches!( + verify_access_token(&old_token, user.user_id, &db) + .await + .unwrap(), + VerifyAccessTokenResult { + is_valid: true, + impersonator_id: None, + } + )); + + let hash = db + .transaction(|tx| async move { + Ok(access_token::Entity::find_by_id(old_token_id) + .one(&*tx) + .await?) + }) + .await + .unwrap() + .unwrap() + .hash; + assert!(hash.starts_with("$sha256$")); + + assert!(matches!( + verify_access_token(&old_token, user.user_id, &db) + .await + .unwrap(), + VerifyAccessTokenResult { + is_valid: true, + impersonator_id: None, + } + )); + + assert!(matches!( + verify_access_token(&token, user.user_id, &db) + .await + .unwrap(), + VerifyAccessTokenResult { + is_valid: true, + impersonator_id: None, + } + )); + } + + async fn create_previous_access_token( + user_id: UserId, + impersonated_user_id: Option, + db: &Database, + ) -> Result { + let access_token = rpc::auth::random_token(); + let access_token_hash = previous_hash_access_token(&access_token)?; + let id = db + .create_access_token( + user_id, + impersonated_user_id, + &access_token_hash, + MAX_ACCESS_TOKENS_TO_STORE, + ) + .await?; + Ok(serde_json::to_string(&AccessTokenJson { + version: 1, + id, + token: access_token, + })?) + } + + fn previous_hash_access_token(token: &str) -> Result { + // Avoid slow hashing in debug mode. + let params = if cfg!(debug_assertions) { + scrypt::Params::new(1, 1, 1).unwrap() + } else { + scrypt::Params::new(14, 8, 1).unwrap() + }; + + Ok(Scrypt + .hash_password( + token.as_bytes(), + None, + params, + &SaltString::generate(thread_rng()), + ) + .map_err(anyhow::Error::new)? + .to_string()) + } +} diff --git a/crates/collab/src/bin/dotenv.rs b/crates/collab/src/bin/dotenv.rs new file mode 100644 index 0000000..c093bcb --- /dev/null +++ b/crates/collab/src/bin/dotenv.rs @@ -0,0 +1,20 @@ +use anyhow::anyhow; +use std::fs; + +fn main() -> anyhow::Result<()> { + let env: toml::map::Map = toml::de::from_str( + &fs::read_to_string("./.env.toml").map_err(|_| anyhow!("no .env.toml file found"))?, + )?; + + for (key, value) in env { + let value = match value { + toml::Value::String(value) => value, + toml::Value::Integer(value) => value.to_string(), + toml::Value::Float(value) => value.to_string(), + _ => panic!("unsupported TOML value in .env.toml for key {}", key), + }; + println!("export {}=\"{}\"", key, value); + } + + Ok(()) +} diff --git a/crates/collab/src/completion.rs b/crates/collab/src/completion.rs new file mode 100644 index 0000000..dd1f4b3 --- /dev/null +++ b/crates/collab/src/completion.rs @@ -0,0 +1,2 @@ +use anyhow::{anyhow, Result}; +use rpc::proto; diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs new file mode 100644 index 0000000..02b182a --- /dev/null +++ b/crates/collab/src/db.rs @@ -0,0 +1,824 @@ +mod ids; +mod queries; +mod tables; +#[cfg(test)] +pub mod tests; + +use crate::{executor::Executor, Error, Result}; +use anyhow::anyhow; +use collections::{BTreeMap, HashMap, HashSet}; +use dashmap::DashMap; +use futures::StreamExt; +use rand::{prelude::StdRng, Rng, SeedableRng}; +use rpc::{ + proto::{self}, + ConnectionId, ExtensionMetadata, +}; +use sea_orm::{ + entity::prelude::*, + sea_query::{Alias, Expr, OnConflict}, + ActiveValue, Condition, ConnectionTrait, DatabaseConnection, DatabaseTransaction, DbErr, + FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement, + TransactionTrait, +}; +use semantic_version::SemanticVersion; +use serde::{Deserialize, Serialize}; +use sqlx::{ + migrate::{Migrate, Migration, MigrationSource}, + Connection, +}; +use std::ops::RangeInclusive; +use std::{ + fmt::Write as _, + future::Future, + marker::PhantomData, + ops::{Deref, DerefMut}, + path::Path, + rc::Rc, + sync::Arc, + time::Duration, +}; +use time::PrimitiveDateTime; +use tokio::sync::{Mutex, OwnedMutexGuard}; + +#[cfg(test)] +pub use tests::TestDb; + +pub use ids::*; +pub use queries::contributors::ContributorSelector; +pub use sea_orm::ConnectOptions; +pub use tables::user::Model as User; +pub use tables::*; + +/// Database gives you a handle that lets you access the database. +/// It handles pooling internally. +pub struct Database { + options: ConnectOptions, + pool: DatabaseConnection, + rooms: DashMap>>, + projects: DashMap>>, + rng: Mutex, + executor: Executor, + notification_kinds_by_id: HashMap, + notification_kinds_by_name: HashMap, + #[cfg(test)] + runtime: Option, +} + +// The `Database` type has so many methods that its impl blocks are split into +// separate files in the `queries` folder. +impl Database { + /// Connects to the database with the given options + pub async fn new(options: ConnectOptions, executor: Executor) -> Result { + sqlx::any::install_default_drivers(); + Ok(Self { + options: options.clone(), + pool: sea_orm::Database::connect(options).await?, + rooms: DashMap::with_capacity(16384), + projects: DashMap::with_capacity(16384), + rng: Mutex::new(StdRng::seed_from_u64(0)), + notification_kinds_by_id: HashMap::default(), + notification_kinds_by_name: HashMap::default(), + executor, + #[cfg(test)] + runtime: None, + }) + } + + #[cfg(test)] + pub fn reset(&self) { + self.rooms.clear(); + self.projects.clear(); + } + + /// Runs the database migrations. + pub async fn migrate( + &self, + migrations_path: &Path, + ignore_checksum_mismatch: bool, + ) -> anyhow::Result> { + let migrations = MigrationSource::resolve(migrations_path) + .await + .map_err(|err| anyhow!("failed to load migrations: {err:?}"))?; + + let mut connection = sqlx::AnyConnection::connect(self.options.get_url()).await?; + + connection.ensure_migrations_table().await?; + let applied_migrations: HashMap<_, _> = connection + .list_applied_migrations() + .await? + .into_iter() + .map(|m| (m.version, m)) + .collect(); + + let mut new_migrations = Vec::new(); + for migration in migrations { + match applied_migrations.get(&migration.version) { + Some(applied_migration) => { + if migration.checksum != applied_migration.checksum && !ignore_checksum_mismatch + { + Err(anyhow!( + "checksum mismatch for applied migration {}", + migration.description + ))?; + } + } + None => { + let elapsed = connection.apply(&migration).await?; + new_migrations.push((migration, elapsed)); + } + } + } + + Ok(new_migrations) + } + + /// Transaction runs things in a transaction. If you want to call other methods + /// and pass the transaction around you need to reborrow the transaction at each + /// call site with: `&*tx`. + pub async fn transaction(&self, f: F) -> Result + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>, + { + let body = async { + let mut i = 0; + loop { + let (tx, result) = self.with_transaction(&f).await?; + match result { + Ok(result) => match tx.commit().await.map_err(Into::into) { + Ok(()) => return Ok(result), + Err(error) => { + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + }, + Err(error) => { + tx.rollback().await?; + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + } + i += 1; + } + }; + + self.run(body).await + } + + pub async fn weak_transaction(&self, f: F) -> Result + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>, + { + let body = async { + let (tx, result) = self.with_weak_transaction(&f).await?; + match result { + Ok(result) => match tx.commit().await.map_err(Into::into) { + Ok(()) => return Ok(result), + Err(error) => { + return Err(error); + } + }, + Err(error) => { + tx.rollback().await?; + return Err(error); + } + } + }; + + self.run(body).await + } + + /// The same as room_transaction, but if you need to only optionally return a Room. + async fn optional_room_transaction( + &self, + f: F, + ) -> Result>> + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>>, + { + let body = async { + let mut i = 0; + loop { + let (tx, result) = self.with_transaction(&f).await?; + match result { + Ok(Some((room_id, data))) => { + let lock = self.rooms.entry(room_id).or_default().clone(); + let _guard = lock.lock_owned().await; + match tx.commit().await.map_err(Into::into) { + Ok(()) => { + return Ok(Some(TransactionGuard { + data, + _guard, + _not_send: PhantomData, + })); + } + Err(error) => { + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + } + } + Ok(None) => match tx.commit().await.map_err(Into::into) { + Ok(()) => return Ok(None), + Err(error) => { + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + }, + Err(error) => { + tx.rollback().await?; + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + } + i += 1; + } + }; + + self.run(body).await + } + + async fn project_transaction( + &self, + project_id: ProjectId, + f: F, + ) -> Result> + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>, + { + let room_id = Database::room_id_for_project(&self, project_id).await?; + let body = async { + let mut i = 0; + loop { + let lock = if let Some(room_id) = room_id { + self.rooms.entry(room_id).or_default().clone() + } else { + self.projects.entry(project_id).or_default().clone() + }; + let _guard = lock.lock_owned().await; + let (tx, result) = self.with_transaction(&f).await?; + match result { + Ok(data) => match tx.commit().await.map_err(Into::into) { + Ok(()) => { + return Ok(TransactionGuard { + data, + _guard, + _not_send: PhantomData, + }); + } + Err(error) => { + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + }, + Err(error) => { + tx.rollback().await?; + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + } + i += 1; + } + }; + + self.run(body).await + } + + /// room_transaction runs the block in a transaction. It returns a RoomGuard, that keeps + /// the database locked until it is dropped. This ensures that updates sent to clients are + /// properly serialized with respect to database changes. + async fn room_transaction( + &self, + room_id: RoomId, + f: F, + ) -> Result> + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>, + { + let body = async { + let mut i = 0; + loop { + let lock = self.rooms.entry(room_id).or_default().clone(); + let _guard = lock.lock_owned().await; + let (tx, result) = self.with_transaction(&f).await?; + match result { + Ok(data) => match tx.commit().await.map_err(Into::into) { + Ok(()) => { + return Ok(TransactionGuard { + data, + _guard, + _not_send: PhantomData, + }); + } + Err(error) => { + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + }, + Err(error) => { + tx.rollback().await?; + if !self.retry_on_serialization_error(&error, i).await { + return Err(error); + } + } + } + i += 1; + } + }; + + self.run(body).await + } + + async fn with_transaction(&self, f: &F) -> Result<(DatabaseTransaction, Result)> + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>, + { + let tx = self + .pool + .begin_with_config(Some(IsolationLevel::Serializable), None) + .await?; + + let mut tx = Arc::new(Some(tx)); + let result = f(TransactionHandle(tx.clone())).await; + let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else { + return Err(anyhow!( + "couldn't complete transaction because it's still in use" + ))?; + }; + + Ok((tx, result)) + } + + async fn with_weak_transaction( + &self, + f: &F, + ) -> Result<(DatabaseTransaction, Result)> + where + F: Send + Fn(TransactionHandle) -> Fut, + Fut: Send + Future>, + { + let tx = self + .pool + .begin_with_config(Some(IsolationLevel::ReadCommitted), None) + .await?; + + let mut tx = Arc::new(Some(tx)); + let result = f(TransactionHandle(tx.clone())).await; + let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else { + return Err(anyhow!( + "couldn't complete transaction because it's still in use" + ))?; + }; + + Ok((tx, result)) + } + + async fn run(&self, future: F) -> Result + where + F: Future>, + { + #[cfg(test)] + { + if let Executor::Deterministic(executor) = &self.executor { + executor.simulate_random_delay().await; + } + + self.runtime.as_ref().unwrap().block_on(future) + } + + #[cfg(not(test))] + { + future.await + } + } + + async fn retry_on_serialization_error(&self, error: &Error, prev_attempt_count: usize) -> bool { + // If the error is due to a failure to serialize concurrent transactions, then retry + // this transaction after a delay. With each subsequent retry, double the delay duration. + // Also vary the delay randomly in order to ensure different database connections retry + // at different times. + const SLEEPS: [f32; 10] = [10., 20., 40., 80., 160., 320., 640., 1280., 2560., 5120.]; + if is_serialization_error(error) && prev_attempt_count < SLEEPS.len() { + let base_delay = SLEEPS[prev_attempt_count]; + let randomized_delay = base_delay * self.rng.lock().await.gen_range(0.5..=2.0); + log::warn!( + "retrying transaction after serialization error. delay: {} ms.", + randomized_delay + ); + self.executor + .sleep(Duration::from_millis(randomized_delay as u64)) + .await; + true + } else { + false + } + } +} + +fn is_serialization_error(error: &Error) -> bool { + const SERIALIZATION_FAILURE_CODE: &str = "40001"; + match error { + Error::Database( + DbErr::Exec(sea_orm::RuntimeErr::SqlxError(error)) + | DbErr::Query(sea_orm::RuntimeErr::SqlxError(error)), + ) if error + .as_database_error() + .and_then(|error| error.code()) + .as_deref() + == Some(SERIALIZATION_FAILURE_CODE) => + { + true + } + _ => false, + } +} + +/// A handle to a [`DatabaseTransaction`]. +pub struct TransactionHandle(Arc>); + +impl Deref for TransactionHandle { + type Target = DatabaseTransaction; + + fn deref(&self) -> &Self::Target { + self.0.as_ref().as_ref().unwrap() + } +} + +/// [`TransactionGuard`] keeps a database transaction alive until it is dropped. +/// It wraps data that depends on the state of the database and prevents an additional +/// transaction from starting that would invalidate that data. +pub struct TransactionGuard { + data: T, + _guard: OwnedMutexGuard<()>, + _not_send: PhantomData>, +} + +impl Deref for TransactionGuard { + type Target = T; + + fn deref(&self) -> &T { + &self.data + } +} + +impl DerefMut for TransactionGuard { + fn deref_mut(&mut self) -> &mut T { + &mut self.data + } +} + +impl TransactionGuard { + /// Returns the inner value of the guard. + pub fn into_inner(self) -> T { + self.data + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Contact { + Accepted { user_id: UserId, busy: bool }, + Outgoing { user_id: UserId }, + Incoming { user_id: UserId }, +} + +impl Contact { + pub fn user_id(&self) -> UserId { + match self { + Contact::Accepted { user_id, .. } => *user_id, + Contact::Outgoing { user_id } => *user_id, + Contact::Incoming { user_id, .. } => *user_id, + } + } +} + +pub type NotificationBatch = Vec<(UserId, proto::Notification)>; + +pub struct CreatedChannelMessage { + pub message_id: MessageId, + pub participant_connection_ids: HashSet, + pub notifications: NotificationBatch, +} + +pub struct UpdatedChannelMessage { + pub message_id: MessageId, + pub participant_connection_ids: Vec, + pub notifications: NotificationBatch, + pub reply_to_message_id: Option, + pub timestamp: PrimitiveDateTime, + pub deleted_mention_notification_ids: Vec, + pub updated_mention_notifications: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)] +pub struct Invite { + pub email_address: String, + pub email_confirmation_code: String, +} + +#[derive(Clone, Debug, Deserialize)] +pub struct NewSignup { + pub email_address: String, + pub platform_mac: bool, + pub platform_windows: bool, + pub platform_linux: bool, + pub editor_features: Vec, + pub programming_languages: Vec, + pub device_id: Option, + pub added_to_mailing_list: bool, + pub created_at: Option, +} + +#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)] +pub struct WaitlistSummary { + pub count: i64, + pub linux_count: i64, + pub mac_count: i64, + pub windows_count: i64, + pub unknown_count: i64, +} + +/// The parameters to create a new user. +#[derive(Debug, Serialize, Deserialize)] +pub struct NewUserParams { + pub github_login: String, + pub github_user_id: i32, +} + +/// The result of creating a new user. +#[derive(Debug)] +pub struct NewUserResult { + pub user_id: UserId, + pub metrics_id: String, + pub inviting_user_id: Option, + pub signup_device_id: Option, +} + +/// The result of updating a channel membership. +#[derive(Debug)] +pub struct MembershipUpdated { + pub channel_id: ChannelId, + pub new_channels: ChannelsForUser, + pub removed_channels: Vec, +} + +/// The result of setting a member's role. +#[derive(Debug)] +#[allow(clippy::large_enum_variant)] +pub enum SetMemberRoleResult { + InviteUpdated(Channel), + MembershipUpdated(MembershipUpdated), +} + +/// The result of inviting a member to a channel. +#[derive(Debug)] +pub struct InviteMemberResult { + pub channel: Channel, + pub notifications: NotificationBatch, +} + +#[derive(Debug)] +pub struct RespondToChannelInvite { + pub membership_update: Option, + pub notifications: NotificationBatch, +} + +#[derive(Debug)] +pub struct RemoveChannelMemberResult { + pub membership_update: MembershipUpdated, + pub notification_id: Option, +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct Channel { + pub id: ChannelId, + pub name: String, + pub visibility: ChannelVisibility, + /// parent_path is the channel ids from the root to this one (not including this one) + pub parent_path: Vec, +} + +impl Channel { + pub fn from_model(value: channel::Model) -> Self { + Channel { + id: value.id, + visibility: value.visibility, + name: value.clone().name, + parent_path: value.ancestors().collect(), + } + } + + pub fn to_proto(&self) -> proto::Channel { + proto::Channel { + id: self.id.to_proto(), + name: self.name.clone(), + visibility: self.visibility.into(), + parent_path: self.parent_path.iter().map(|c| c.to_proto()).collect(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct ChannelMember { + pub role: ChannelRole, + pub user_id: UserId, + pub kind: proto::channel_member::Kind, +} + +impl ChannelMember { + pub fn to_proto(&self) -> proto::ChannelMember { + proto::ChannelMember { + role: self.role.into(), + user_id: self.user_id.to_proto(), + kind: self.kind.into(), + } + } +} + +#[derive(Debug, PartialEq)] +pub struct ChannelsForUser { + pub channels: Vec, + pub channel_memberships: Vec, + pub channel_participants: HashMap>, + pub hosted_projects: Vec, + + pub observed_buffer_versions: Vec, + pub observed_channel_messages: Vec, + pub latest_buffer_versions: Vec, + pub latest_channel_messages: Vec, +} + +#[derive(Debug)] +pub struct RejoinedChannelBuffer { + pub buffer: proto::RejoinedChannelBuffer, + pub old_connection_id: ConnectionId, +} + +#[derive(Clone)] +pub struct JoinRoom { + pub room: proto::Room, + pub channel: Option, +} + +pub struct RejoinedRoom { + pub room: proto::Room, + pub rejoined_projects: Vec, + pub reshared_projects: Vec, + pub channel: Option, +} + +pub struct ResharedProject { + pub id: ProjectId, + pub old_connection_id: ConnectionId, + pub collaborators: Vec, + pub worktrees: Vec, +} + +pub struct RejoinedProject { + pub id: ProjectId, + pub old_connection_id: ConnectionId, + pub collaborators: Vec, + pub worktrees: Vec, + pub language_servers: Vec, +} + +impl RejoinedProject { + pub fn to_proto(&self) -> proto::RejoinedProject { + proto::RejoinedProject { + id: self.id.to_proto(), + worktrees: self + .worktrees + .iter() + .map(|worktree| proto::WorktreeMetadata { + id: worktree.id, + root_name: worktree.root_name.clone(), + visible: worktree.visible, + abs_path: worktree.abs_path.clone(), + }) + .collect(), + collaborators: self + .collaborators + .iter() + .map(|collaborator| collaborator.to_proto()) + .collect(), + language_servers: self.language_servers.clone(), + } + } +} + +#[derive(Debug)] +pub struct RejoinedWorktree { + pub id: u64, + pub abs_path: String, + pub root_name: String, + pub visible: bool, + pub updated_entries: Vec, + pub removed_entries: Vec, + pub updated_repositories: Vec, + pub removed_repositories: Vec, + pub diagnostic_summaries: Vec, + pub settings_files: Vec, + pub scan_id: u64, + pub completed_scan_id: u64, +} + +pub struct LeftRoom { + pub room: proto::Room, + pub channel: Option, + pub left_projects: HashMap, + pub canceled_calls_to_user_ids: Vec, + pub deleted: bool, +} + +pub struct RefreshedRoom { + pub room: proto::Room, + pub channel: Option, + pub stale_participant_user_ids: Vec, + pub canceled_calls_to_user_ids: Vec, +} + +pub struct RefreshedChannelBuffer { + pub connection_ids: Vec, + pub collaborators: Vec, +} + +pub struct Project { + pub id: ProjectId, + pub role: ChannelRole, + pub collaborators: Vec, + pub worktrees: BTreeMap, + pub language_servers: Vec, + pub dev_server_project_id: Option, +} + +pub struct ProjectCollaborator { + pub connection_id: ConnectionId, + pub user_id: UserId, + pub replica_id: ReplicaId, + pub is_host: bool, +} + +impl ProjectCollaborator { + pub fn to_proto(&self) -> proto::Collaborator { + proto::Collaborator { + peer_id: Some(self.connection_id.into()), + replica_id: self.replica_id.0 as u32, + user_id: self.user_id.to_proto(), + } + } +} + +#[derive(Debug)] +pub struct LeftProject { + pub id: ProjectId, + pub should_unshare: bool, + pub connection_ids: Vec, +} + +pub struct Worktree { + pub id: u64, + pub abs_path: String, + pub root_name: String, + pub visible: bool, + pub entries: Vec, + pub repository_entries: BTreeMap, + pub diagnostic_summaries: Vec, + pub settings_files: Vec, + pub scan_id: u64, + pub completed_scan_id: u64, +} + +#[derive(Debug)] +pub struct WorktreeSettingsFile { + pub path: String, + pub content: String, +} + +pub struct NewExtensionVersion { + pub name: String, + pub version: semver::Version, + pub description: String, + pub authors: Vec, + pub repository: String, + pub schema_version: i32, + pub wasm_api_version: Option, + pub published_at: PrimitiveDateTime, +} + +pub struct ExtensionVersionConstraints { + pub schema_versions: RangeInclusive, + pub wasm_api_versions: RangeInclusive, +} diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs new file mode 100644 index 0000000..699189a --- /dev/null +++ b/crates/collab/src/db/ids.rs @@ -0,0 +1,288 @@ +use crate::Result; +use rpc::proto; +use sea_orm::{entity::prelude::*, DbErr}; +use serde::{Deserialize, Serialize}; + +macro_rules! id_type { + ($name:ident) => { + #[derive( + Clone, + Copy, + Debug, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + DeriveValueType, + )] + #[allow(missing_docs)] + #[serde(transparent)] + pub struct $name(pub i32); + + impl $name { + #[allow(unused)] + #[allow(missing_docs)] + pub const MAX: Self = Self(i32::MAX); + + #[allow(unused)] + #[allow(missing_docs)] + pub fn from_proto(value: u64) -> Self { + Self(value as i32) + } + + #[allow(unused)] + #[allow(missing_docs)] + pub fn to_proto(self) -> u64 { + self.0 as u64 + } + } + + impl std::fmt::Display for $name { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + self.0.fmt(f) + } + } + + impl sea_orm::TryFromU64 for $name { + fn try_from_u64(n: u64) -> Result { + Ok(Self(n.try_into().map_err(|_| { + DbErr::ConvertFromU64(concat!( + "error converting ", + stringify!($name), + " to u64" + )) + })?)) + } + } + + impl sea_orm::sea_query::Nullable for $name { + fn null() -> Value { + Value::Int(None) + } + } + }; +} + +id_type!(AccessTokenId); +id_type!(BufferId); +id_type!(ChannelBufferCollaboratorId); +id_type!(ChannelChatParticipantId); +id_type!(ChannelId); +id_type!(ChannelMemberId); +id_type!(ContactId); +id_type!(DevServerId); +id_type!(ExtensionId); +id_type!(FlagId); +id_type!(FollowerId); +id_type!(HostedProjectId); +id_type!(MessageId); +id_type!(NotificationId); +id_type!(NotificationKindId); +id_type!(ProjectCollaboratorId); +id_type!(ProjectId); +id_type!(DevServerProjectId); +id_type!(ReplicaId); +id_type!(RoomId); +id_type!(RoomParticipantId); +id_type!(ServerId); +id_type!(SignupId); +id_type!(UserId); + +/// ChannelRole gives you permissions for both channels and calls. +#[derive( + Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash, Serialize, +)] +#[sea_orm(rs_type = "String", db_type = "String(None)")] +pub enum ChannelRole { + /// Admin can read/write and change permissions. + #[sea_orm(string_value = "admin")] + Admin, + /// Member can read/write, but not change pemissions. + #[sea_orm(string_value = "member")] + #[default] + Member, + /// Talker can read, but not write. + /// They can use microphones and the channel chat + #[sea_orm(string_value = "talker")] + Talker, + /// Guest can read, but not write. + /// They can not use microphones but can use the chat. + #[sea_orm(string_value = "guest")] + Guest, + /// Banned may not read. + #[sea_orm(string_value = "banned")] + Banned, +} + +impl ChannelRole { + /// Returns true if this role is more powerful than the other role. + pub fn should_override(&self, other: Self) -> bool { + use ChannelRole::*; + match self { + Admin => matches!(other, Member | Banned | Talker | Guest), + Member => matches!(other, Banned | Talker | Guest), + Talker => matches!(other, Guest), + Banned => matches!(other, Guest), + Guest => false, + } + } + + /// Returns the maximal role between the two + pub fn max(&self, other: Self) -> Self { + if self.should_override(other) { + *self + } else { + other + } + } + + pub fn can_see_channel(&self, visibility: ChannelVisibility) -> bool { + use ChannelRole::*; + match self { + Admin | Member => true, + Guest | Talker => visibility == ChannelVisibility::Public, + Banned => false, + } + } + + /// True if the role allows access to all descendant channels + pub fn can_see_all_descendants(&self) -> bool { + use ChannelRole::*; + match self { + Admin | Member => true, + Guest | Talker | Banned => false, + } + } + + /// True if the role only allows access to public descendant channels + pub fn can_only_see_public_descendants(&self) -> bool { + use ChannelRole::*; + match self { + Guest | Talker => true, + Admin | Member | Banned => false, + } + } + + /// True if the role can share screen/microphone/projects into rooms. + pub fn can_use_microphone(&self) -> bool { + use ChannelRole::*; + match self { + Admin | Member | Talker => true, + Guest | Banned => false, + } + } + + /// True if the role can edit shared projects. + pub fn can_edit_projects(&self) -> bool { + use ChannelRole::*; + match self { + Admin | Member => true, + Talker | Guest | Banned => false, + } + } + + /// True if the role can read shared projects. + pub fn can_read_projects(&self) -> bool { + use ChannelRole::*; + match self { + Admin | Member | Guest | Talker => true, + Banned => false, + } + } + + pub fn requires_cla(&self) -> bool { + use ChannelRole::*; + match self { + Admin | Member => true, + Banned | Guest | Talker => false, + } + } +} + +impl From for ChannelRole { + fn from(value: proto::ChannelRole) -> Self { + match value { + proto::ChannelRole::Admin => ChannelRole::Admin, + proto::ChannelRole::Member => ChannelRole::Member, + proto::ChannelRole::Talker => ChannelRole::Talker, + proto::ChannelRole::Guest => ChannelRole::Guest, + proto::ChannelRole::Banned => ChannelRole::Banned, + } + } +} + +impl Into for ChannelRole { + fn into(self) -> proto::ChannelRole { + match self { + ChannelRole::Admin => proto::ChannelRole::Admin, + ChannelRole::Member => proto::ChannelRole::Member, + ChannelRole::Talker => proto::ChannelRole::Talker, + ChannelRole::Guest => proto::ChannelRole::Guest, + ChannelRole::Banned => proto::ChannelRole::Banned, + } + } +} + +impl Into for ChannelRole { + fn into(self) -> i32 { + let proto: proto::ChannelRole = self.into(); + proto.into() + } +} + +/// ChannelVisibility controls whether channels are public or private. +#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash)] +#[sea_orm(rs_type = "String", db_type = "String(None)")] +pub enum ChannelVisibility { + /// Public channels are visible to anyone with the link. People join with the Guest role by default. + #[sea_orm(string_value = "public")] + Public, + /// Members channels are only visible to members of this channel or its parents. + #[sea_orm(string_value = "members")] + #[default] + Members, +} + +impl From for ChannelVisibility { + fn from(value: proto::ChannelVisibility) -> Self { + match value { + proto::ChannelVisibility::Public => ChannelVisibility::Public, + proto::ChannelVisibility::Members => ChannelVisibility::Members, + } + } +} + +impl Into for ChannelVisibility { + fn into(self) -> proto::ChannelVisibility { + match self { + ChannelVisibility::Public => proto::ChannelVisibility::Public, + ChannelVisibility::Members => proto::ChannelVisibility::Members, + } + } +} + +impl Into for ChannelVisibility { + fn into(self) -> i32 { + let proto: proto::ChannelVisibility = self.into(); + proto.into() + } +} + +#[derive(Copy, Clone, Debug, Serialize, PartialEq)] +pub enum PrincipalId { + UserId(UserId), + DevServerId(DevServerId), +} + +/// Indicate whether a [Buffer] has permissions to edit. +#[derive(PartialEq, Clone, Copy, Debug)] +pub enum Capability { + /// The buffer is a mutable replica. + ReadWrite, + /// The buffer is a read-only replica. + ReadOnly, +} diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs new file mode 100644 index 0000000..31301a0 --- /dev/null +++ b/crates/collab/src/db/queries.rs @@ -0,0 +1,19 @@ +use super::*; + +pub mod access_tokens; +pub mod buffers; +pub mod channels; +pub mod contacts; +pub mod contributors; +pub mod dev_server_projects; +pub mod dev_servers; +pub mod embeddings; +pub mod extensions; +pub mod hosted_projects; +pub mod messages; +pub mod notifications; +pub mod projects; +pub mod rate_buckets; +pub mod rooms; +pub mod servers; +pub mod users; diff --git a/crates/collab/src/db/queries/access_tokens.rs b/crates/collab/src/db/queries/access_tokens.rs new file mode 100644 index 0000000..f251cda --- /dev/null +++ b/crates/collab/src/db/queries/access_tokens.rs @@ -0,0 +1,76 @@ +use super::*; +use sea_orm::sea_query::Query; + +impl Database { + /// Creates a new access token for the given user. + pub async fn create_access_token( + &self, + user_id: UserId, + impersonated_user_id: Option, + access_token_hash: &str, + max_access_token_count: usize, + ) -> Result { + self.transaction(|tx| async { + let tx = tx; + + let token = access_token::ActiveModel { + user_id: ActiveValue::set(user_id), + impersonated_user_id: ActiveValue::set(impersonated_user_id), + hash: ActiveValue::set(access_token_hash.into()), + ..Default::default() + } + .insert(&*tx) + .await?; + + access_token::Entity::delete_many() + .filter( + access_token::Column::Id.in_subquery( + Query::select() + .column(access_token::Column::Id) + .from(access_token::Entity) + .and_where(access_token::Column::UserId.eq(user_id)) + .order_by(access_token::Column::Id, sea_orm::Order::Desc) + .limit(10000) + .offset(max_access_token_count as u64) + .to_owned(), + ), + ) + .exec(&*tx) + .await?; + Ok(token.id) + }) + .await + } + + /// Retrieves the access token with the given ID. + pub async fn get_access_token( + &self, + access_token_id: AccessTokenId, + ) -> Result { + self.transaction(|tx| async move { + Ok(access_token::Entity::find_by_id(access_token_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such access token"))?) + }) + .await + } + + /// Retrieves the access token with the given ID. + pub async fn update_access_token_hash( + &self, + id: AccessTokenId, + new_hash: &str, + ) -> Result { + self.transaction(|tx| async move { + Ok(access_token::Entity::update(access_token::ActiveModel { + id: ActiveValue::unchanged(id), + hash: ActiveValue::set(new_hash.into()), + ..Default::default() + }) + .exec(&*tx) + .await?) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs new file mode 100644 index 0000000..810d89b --- /dev/null +++ b/crates/collab/src/db/queries/buffers.rs @@ -0,0 +1,1008 @@ +use super::*; +use prost::Message; +use text::{EditOperation, UndoOperation}; + +pub struct LeftChannelBuffer { + pub channel_id: ChannelId, + pub collaborators: Vec, + pub connections: Vec, +} + +impl Database { + /// Open a channel buffer. Returns the current contents, and adds you to the list of people + /// to notify on changes. + pub async fn join_channel_buffer( + &self, + channel_id: ChannelId, + user_id: UserId, + connection: ConnectionId, + ) -> Result { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + + let buffer = channel::Model { + id: channel_id, + ..Default::default() + } + .find_related(buffer::Entity) + .one(&*tx) + .await?; + + let buffer = if let Some(buffer) = buffer { + buffer + } else { + let buffer = buffer::ActiveModel { + channel_id: ActiveValue::Set(channel_id), + ..Default::default() + } + .insert(&*tx) + .await?; + buffer_snapshot::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(0), + text: ActiveValue::Set(String::new()), + operation_serialization_version: ActiveValue::Set( + storage::SERIALIZATION_VERSION, + ), + } + .insert(&*tx) + .await?; + buffer + }; + + // Join the collaborators + let mut collaborators = channel_buffer_collaborator::Entity::find() + .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) + .all(&*tx) + .await?; + let replica_ids = collaborators + .iter() + .map(|c| c.replica_id) + .collect::>(); + let mut replica_id = ReplicaId(0); + while replica_ids.contains(&replica_id) { + replica_id.0 += 1; + } + let collaborator = channel_buffer_collaborator::ActiveModel { + channel_id: ActiveValue::Set(channel_id), + connection_id: ActiveValue::Set(connection.id as i32), + connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)), + user_id: ActiveValue::Set(user_id), + replica_id: ActiveValue::Set(replica_id), + ..Default::default() + } + .insert(&*tx) + .await?; + collaborators.push(collaborator); + + let (base_text, operations, max_operation) = + self.get_buffer_state(&buffer, &tx).await?; + + // Save the last observed operation + if let Some(op) = max_operation { + observed_buffer_edits::Entity::insert(observed_buffer_edits::ActiveModel { + user_id: ActiveValue::Set(user_id), + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(op.epoch), + lamport_timestamp: ActiveValue::Set(op.lamport_timestamp), + replica_id: ActiveValue::Set(op.replica_id), + }) + .on_conflict( + OnConflict::columns([ + observed_buffer_edits::Column::UserId, + observed_buffer_edits::Column::BufferId, + ]) + .update_columns([ + observed_buffer_edits::Column::Epoch, + observed_buffer_edits::Column::LamportTimestamp, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + } + + Ok(proto::JoinChannelBufferResponse { + buffer_id: buffer.id.to_proto(), + replica_id: replica_id.to_proto() as u32, + base_text, + operations, + epoch: buffer.epoch as u64, + collaborators: collaborators + .into_iter() + .map(|collaborator| proto::Collaborator { + peer_id: Some(collaborator.connection().into()), + user_id: collaborator.user_id.to_proto(), + replica_id: collaborator.replica_id.0 as u32, + }) + .collect(), + }) + }) + .await + } + + /// Rejoin a channel buffer (after a connection interruption) + pub async fn rejoin_channel_buffers( + &self, + buffers: &[proto::ChannelBufferVersion], + user_id: UserId, + connection_id: ConnectionId, + ) -> Result> { + self.transaction(|tx| async move { + let mut results = Vec::new(); + for client_buffer in buffers { + let channel = self + .get_channel_internal(ChannelId::from_proto(client_buffer.channel_id), &tx) + .await?; + if self + .check_user_is_channel_participant(&channel, user_id, &tx) + .await + .is_err() + { + log::info!("user is not a member of channel"); + continue; + } + + let buffer = self.get_channel_buffer(channel.id, &tx).await?; + let mut collaborators = channel_buffer_collaborator::Entity::find() + .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel.id)) + .all(&*tx) + .await?; + + // If the buffer epoch hasn't changed since the client lost + // connection, then the client's buffer can be synchronized with + // the server's buffer. + if buffer.epoch as u64 != client_buffer.epoch { + log::info!("can't rejoin buffer, epoch has changed"); + continue; + } + + // Find the collaborator record for this user's previous lost + // connection. Update it with the new connection id. + let Some(self_collaborator) = + collaborators.iter_mut().find(|c| c.user_id == user_id) + else { + log::info!("can't rejoin buffer, no previous collaborator found"); + continue; + }; + let old_connection_id = self_collaborator.connection(); + *self_collaborator = channel_buffer_collaborator::ActiveModel { + id: ActiveValue::Unchanged(self_collaborator.id), + connection_id: ActiveValue::Set(connection_id.id as i32), + connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)), + connection_lost: ActiveValue::Set(false), + ..Default::default() + } + .update(&*tx) + .await?; + + let client_version = version_from_wire(&client_buffer.version); + let serialization_version = self + .get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &tx) + .await?; + + let mut rows = buffer_operation::Entity::find() + .filter( + buffer_operation::Column::BufferId + .eq(buffer.id) + .and(buffer_operation::Column::Epoch.eq(buffer.epoch)), + ) + .stream(&*tx) + .await?; + + // Find the server's version vector and any operations + // that the client has not seen. + let mut server_version = clock::Global::new(); + let mut operations = Vec::new(); + while let Some(row) = rows.next().await { + let row = row?; + let timestamp = clock::Lamport { + replica_id: row.replica_id as u16, + value: row.lamport_timestamp as u32, + }; + server_version.observe(timestamp); + if !client_version.observed(timestamp) { + operations.push(proto::Operation { + variant: Some(operation_from_storage(row, serialization_version)?), + }) + } + } + + results.push(RejoinedChannelBuffer { + old_connection_id, + buffer: proto::RejoinedChannelBuffer { + channel_id: client_buffer.channel_id, + version: version_to_wire(&server_version), + operations, + collaborators: collaborators + .into_iter() + .map(|collaborator| proto::Collaborator { + peer_id: Some(collaborator.connection().into()), + user_id: collaborator.user_id.to_proto(), + replica_id: collaborator.replica_id.0 as u32, + }) + .collect(), + }, + }); + } + + Ok(results) + }) + .await + } + + /// Clear out any buffer collaborators who are no longer collaborating. + pub async fn clear_stale_channel_buffer_collaborators( + &self, + channel_id: ChannelId, + server_id: ServerId, + ) -> Result { + self.transaction(|tx| async move { + let db_collaborators = channel_buffer_collaborator::Entity::find() + .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) + .all(&*tx) + .await?; + + let mut connection_ids = Vec::new(); + let mut collaborators = Vec::new(); + let mut collaborator_ids_to_remove = Vec::new(); + for db_collaborator in &db_collaborators { + if !db_collaborator.connection_lost + && db_collaborator.connection_server_id == server_id + { + connection_ids.push(db_collaborator.connection()); + collaborators.push(proto::Collaborator { + peer_id: Some(db_collaborator.connection().into()), + replica_id: db_collaborator.replica_id.0 as u32, + user_id: db_collaborator.user_id.to_proto(), + }) + } else { + collaborator_ids_to_remove.push(db_collaborator.id); + } + } + + channel_buffer_collaborator::Entity::delete_many() + .filter(channel_buffer_collaborator::Column::Id.is_in(collaborator_ids_to_remove)) + .exec(&*tx) + .await?; + + Ok(RefreshedChannelBuffer { + connection_ids, + collaborators, + }) + }) + .await + } + + /// Close the channel buffer, and stop receiving updates for it. + pub async fn leave_channel_buffer( + &self, + channel_id: ChannelId, + connection: ConnectionId, + ) -> Result { + self.transaction(|tx| async move { + self.leave_channel_buffer_internal(channel_id, connection, &tx) + .await + }) + .await + } + + /// Close the channel buffer, and stop receiving updates for it. + pub async fn channel_buffer_connection_lost( + &self, + connection: ConnectionId, + tx: &DatabaseTransaction, + ) -> Result<()> { + channel_buffer_collaborator::Entity::update_many() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32)) + .add( + channel_buffer_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .set(channel_buffer_collaborator::ActiveModel { + connection_lost: ActiveValue::set(true), + ..Default::default() + }) + .exec(tx) + .await?; + Ok(()) + } + + /// Close all open channel buffers + pub async fn leave_channel_buffers( + &self, + connection: ConnectionId, + ) -> Result> { + self.transaction(|tx| async move { + #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)] + enum QueryChannelIds { + ChannelId, + } + + let channel_ids: Vec = channel_buffer_collaborator::Entity::find() + .select_only() + .column(channel_buffer_collaborator::Column::ChannelId) + .filter(Condition::all().add( + channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32), + )) + .into_values::<_, QueryChannelIds>() + .all(&*tx) + .await?; + + let mut result = Vec::new(); + for channel_id in channel_ids { + let left_channel_buffer = self + .leave_channel_buffer_internal(channel_id, connection, &tx) + .await?; + result.push(left_channel_buffer); + } + + Ok(result) + }) + .await + } + + async fn leave_channel_buffer_internal( + &self, + channel_id: ChannelId, + connection: ConnectionId, + tx: &DatabaseTransaction, + ) -> Result { + let result = channel_buffer_collaborator::Entity::delete_many() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) + .add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32)) + .add( + channel_buffer_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("not a collaborator on this project"))?; + } + + let mut collaborators = Vec::new(); + let mut connections = Vec::new(); + let mut rows = channel_buffer_collaborator::Entity::find() + .filter( + Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .stream(tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + let connection = row.connection(); + connections.push(connection); + collaborators.push(proto::Collaborator { + peer_id: Some(connection.into()), + replica_id: row.replica_id.0 as u32, + user_id: row.user_id.to_proto(), + }); + } + + drop(rows); + + if collaborators.is_empty() { + self.snapshot_channel_buffer(channel_id, &tx).await?; + } + + Ok(LeftChannelBuffer { + channel_id, + collaborators, + connections, + }) + } + + pub async fn get_channel_buffer_collaborators( + &self, + channel_id: ChannelId, + ) -> Result> { + self.transaction(|tx| async move { + self.get_channel_buffer_collaborators_internal(channel_id, &tx) + .await + }) + .await + } + + async fn get_channel_buffer_collaborators_internal( + &self, + channel_id: ChannelId, + tx: &DatabaseTransaction, + ) -> Result> { + #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)] + enum QueryUserIds { + UserId, + } + + let users: Vec = channel_buffer_collaborator::Entity::find() + .select_only() + .column(channel_buffer_collaborator::Column::UserId) + .filter( + Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .into_values::<_, QueryUserIds>() + .all(tx) + .await?; + + Ok(users) + } + + pub async fn update_channel_buffer( + &self, + channel_id: ChannelId, + user: UserId, + operations: &[proto::Operation], + ) -> Result<(HashSet, i32, Vec)> { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + + let mut requires_write_permission = false; + for op in operations.iter() { + match op.variant { + None | Some(proto::operation::Variant::UpdateSelections(_)) => {} + Some(_) => requires_write_permission = true, + } + } + if requires_write_permission { + self.check_user_is_channel_member(&channel, user, &tx) + .await?; + } else { + self.check_user_is_channel_participant(&channel, user, &tx) + .await?; + } + + let buffer = buffer::Entity::find() + .filter(buffer::Column::ChannelId.eq(channel_id)) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such buffer"))?; + + let serialization_version = self + .get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &tx) + .await?; + + let operations = operations + .iter() + .filter_map(|op| operation_to_storage(op, &buffer, serialization_version)) + .collect::>(); + + let max_version; + + if !operations.is_empty() { + let max_operation = operations + .iter() + .max_by_key(|op| (op.lamport_timestamp.as_ref(), op.replica_id.as_ref())) + .unwrap(); + + max_version = vec![proto::VectorClockEntry { + replica_id: *max_operation.replica_id.as_ref() as u32, + timestamp: *max_operation.lamport_timestamp.as_ref() as u32, + }]; + + // get current channel participants and save the max operation above + self.save_max_operation( + user, + buffer.id, + buffer.epoch, + *max_operation.replica_id.as_ref(), + *max_operation.lamport_timestamp.as_ref(), + &tx, + ) + .await?; + + buffer_operation::Entity::insert_many(operations) + .on_conflict( + OnConflict::columns([ + buffer_operation::Column::BufferId, + buffer_operation::Column::Epoch, + buffer_operation::Column::LamportTimestamp, + buffer_operation::Column::ReplicaId, + ]) + .do_nothing() + .to_owned(), + ) + .exec(&*tx) + .await?; + } else { + max_version = Vec::new(); + } + + let mut connections = HashSet::default(); + let mut rows = channel_buffer_collaborator::Entity::find() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .stream(&*tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + connections.insert(ConnectionId { + id: row.connection_id as u32, + owner_id: row.connection_server_id.0 as u32, + }); + } + + Ok((connections, buffer.epoch, max_version)) + }) + .await + } + + async fn save_max_operation( + &self, + user_id: UserId, + buffer_id: BufferId, + epoch: i32, + replica_id: i32, + lamport_timestamp: i32, + tx: &DatabaseTransaction, + ) -> Result<()> { + buffer::Entity::update(buffer::ActiveModel { + id: ActiveValue::Unchanged(buffer_id), + epoch: ActiveValue::Unchanged(epoch), + latest_operation_epoch: ActiveValue::Set(Some(epoch)), + latest_operation_replica_id: ActiveValue::Set(Some(replica_id)), + latest_operation_lamport_timestamp: ActiveValue::Set(Some(lamport_timestamp)), + channel_id: ActiveValue::NotSet, + }) + .exec(tx) + .await?; + + use observed_buffer_edits::Column; + observed_buffer_edits::Entity::insert(observed_buffer_edits::ActiveModel { + user_id: ActiveValue::Set(user_id), + buffer_id: ActiveValue::Set(buffer_id), + epoch: ActiveValue::Set(epoch), + replica_id: ActiveValue::Set(replica_id), + lamport_timestamp: ActiveValue::Set(lamport_timestamp), + }) + .on_conflict( + OnConflict::columns([Column::UserId, Column::BufferId]) + .update_columns([Column::Epoch, Column::LamportTimestamp, Column::ReplicaId]) + .action_cond_where( + Condition::any().add(Column::Epoch.lt(epoch)).add( + Condition::all().add(Column::Epoch.eq(epoch)).add( + Condition::any() + .add(Column::LamportTimestamp.lt(lamport_timestamp)) + .add( + Column::LamportTimestamp + .eq(lamport_timestamp) + .and(Column::ReplicaId.lt(replica_id)), + ), + ), + ), + ) + .to_owned(), + ) + .exec_without_returning(tx) + .await?; + + Ok(()) + } + + async fn get_buffer_operation_serialization_version( + &self, + buffer_id: BufferId, + epoch: i32, + tx: &DatabaseTransaction, + ) -> Result { + Ok(buffer_snapshot::Entity::find() + .filter(buffer_snapshot::Column::BufferId.eq(buffer_id)) + .filter(buffer_snapshot::Column::Epoch.eq(epoch)) + .select_only() + .column(buffer_snapshot::Column::OperationSerializationVersion) + .into_values::<_, QueryOperationSerializationVersion>() + .one(tx) + .await? + .ok_or_else(|| anyhow!("missing buffer snapshot"))?) + } + + pub async fn get_channel_buffer( + &self, + channel_id: ChannelId, + tx: &DatabaseTransaction, + ) -> Result { + Ok(channel::Model { + id: channel_id, + ..Default::default() + } + .find_related(buffer::Entity) + .one(tx) + .await? + .ok_or_else(|| anyhow!("no such buffer"))?) + } + + async fn get_buffer_state( + &self, + buffer: &buffer::Model, + tx: &DatabaseTransaction, + ) -> Result<( + String, + Vec, + Option, + )> { + let id = buffer.id; + let (base_text, version) = if buffer.epoch > 0 { + let snapshot = buffer_snapshot::Entity::find() + .filter( + buffer_snapshot::Column::BufferId + .eq(id) + .and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)), + ) + .one(tx) + .await? + .ok_or_else(|| anyhow!("no such snapshot"))?; + + let version = snapshot.operation_serialization_version; + (snapshot.text, version) + } else { + (String::new(), storage::SERIALIZATION_VERSION) + }; + + let mut rows = buffer_operation::Entity::find() + .filter( + buffer_operation::Column::BufferId + .eq(id) + .and(buffer_operation::Column::Epoch.eq(buffer.epoch)), + ) + .order_by_asc(buffer_operation::Column::LamportTimestamp) + .order_by_asc(buffer_operation::Column::ReplicaId) + .stream(tx) + .await?; + + let mut operations = Vec::new(); + let mut last_row = None; + while let Some(row) = rows.next().await { + let row = row?; + last_row = Some(buffer_operation::Model { + buffer_id: row.buffer_id, + epoch: row.epoch, + lamport_timestamp: row.lamport_timestamp, + replica_id: row.replica_id, + value: Default::default(), + }); + operations.push(proto::Operation { + variant: Some(operation_from_storage(row, version)?), + }); + } + + Ok((base_text, operations, last_row)) + } + + async fn snapshot_channel_buffer( + &self, + channel_id: ChannelId, + tx: &DatabaseTransaction, + ) -> Result<()> { + let buffer = self.get_channel_buffer(channel_id, tx).await?; + let (base_text, operations, _) = self.get_buffer_state(&buffer, tx).await?; + if operations.is_empty() { + return Ok(()); + } + + let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text); + text_buffer + .apply_ops(operations.into_iter().filter_map(operation_from_wire)) + .unwrap(); + + let base_text = text_buffer.text(); + let epoch = buffer.epoch + 1; + + buffer_snapshot::Model { + buffer_id: buffer.id, + epoch, + text: base_text, + operation_serialization_version: storage::SERIALIZATION_VERSION, + } + .into_active_model() + .insert(tx) + .await?; + + buffer::ActiveModel { + id: ActiveValue::Unchanged(buffer.id), + epoch: ActiveValue::Set(epoch), + latest_operation_epoch: ActiveValue::NotSet, + latest_operation_replica_id: ActiveValue::NotSet, + latest_operation_lamport_timestamp: ActiveValue::NotSet, + channel_id: ActiveValue::NotSet, + } + .save(tx) + .await?; + + Ok(()) + } + + pub async fn observe_buffer_version( + &self, + buffer_id: BufferId, + user_id: UserId, + epoch: i32, + version: &[proto::VectorClockEntry], + ) -> Result<()> { + self.transaction(|tx| async move { + // For now, combine concurrent operations. + let Some(component) = version.iter().max_by_key(|version| version.timestamp) else { + return Ok(()); + }; + self.save_max_operation( + user_id, + buffer_id, + epoch, + component.replica_id as i32, + component.timestamp as i32, + &tx, + ) + .await?; + Ok(()) + }) + .await + } + + pub async fn observed_channel_buffer_changes( + &self, + channel_ids_by_buffer_id: &HashMap, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result> { + let observed_operations = observed_buffer_edits::Entity::find() + .filter(observed_buffer_edits::Column::UserId.eq(user_id)) + .filter( + observed_buffer_edits::Column::BufferId + .is_in(channel_ids_by_buffer_id.keys().copied()), + ) + .all(tx) + .await?; + + Ok(observed_operations + .iter() + .flat_map(|op| { + Some(proto::ChannelBufferVersion { + channel_id: channel_ids_by_buffer_id.get(&op.buffer_id)?.to_proto(), + epoch: op.epoch as u64, + version: vec![proto::VectorClockEntry { + replica_id: op.replica_id as u32, + timestamp: op.lamport_timestamp as u32, + }], + }) + }) + .collect()) + } +} + +fn operation_to_storage( + operation: &proto::Operation, + buffer: &buffer::Model, + _format: i32, +) -> Option { + let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? { + proto::operation::Variant::Edit(operation) => ( + operation.replica_id, + operation.lamport_timestamp, + storage::Operation { + version: version_to_storage(&operation.version), + is_undo: false, + edit_ranges: operation + .ranges + .iter() + .map(|range| storage::Range { + start: range.start, + end: range.end, + }) + .collect(), + edit_texts: operation.new_text.clone(), + undo_counts: Vec::new(), + }, + ), + proto::operation::Variant::Undo(operation) => ( + operation.replica_id, + operation.lamport_timestamp, + storage::Operation { + version: version_to_storage(&operation.version), + is_undo: true, + edit_ranges: Vec::new(), + edit_texts: Vec::new(), + undo_counts: operation + .counts + .iter() + .map(|entry| storage::UndoCount { + replica_id: entry.replica_id, + lamport_timestamp: entry.lamport_timestamp, + count: entry.count, + }) + .collect(), + }, + ), + _ => None?, + }; + + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(replica_id as i32), + lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32), + value: ActiveValue::Set(value.encode_to_vec()), + }) +} + +fn operation_from_storage( + row: buffer_operation::Model, + _format_version: i32, +) -> Result { + let operation = + storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?; + let version = version_from_storage(&operation.version); + Ok(if operation.is_undo { + proto::operation::Variant::Undo(proto::operation::Undo { + replica_id: row.replica_id as u32, + lamport_timestamp: row.lamport_timestamp as u32, + version, + counts: operation + .undo_counts + .iter() + .map(|entry| proto::UndoCount { + replica_id: entry.replica_id, + lamport_timestamp: entry.lamport_timestamp, + count: entry.count, + }) + .collect(), + }) + } else { + proto::operation::Variant::Edit(proto::operation::Edit { + replica_id: row.replica_id as u32, + lamport_timestamp: row.lamport_timestamp as u32, + version, + ranges: operation + .edit_ranges + .into_iter() + .map(|range| proto::Range { + start: range.start, + end: range.end, + }) + .collect(), + new_text: operation.edit_texts, + }) + }) +} + +fn version_to_storage(version: &Vec) -> Vec { + version + .iter() + .map(|entry| storage::VectorClockEntry { + replica_id: entry.replica_id, + timestamp: entry.timestamp, + }) + .collect() +} + +fn version_from_storage(version: &Vec) -> Vec { + version + .iter() + .map(|entry| proto::VectorClockEntry { + replica_id: entry.replica_id, + timestamp: entry.timestamp, + }) + .collect() +} + +// This is currently a manual copy of the deserialization code in the client's language crate +pub fn operation_from_wire(operation: proto::Operation) -> Option { + match operation.variant? { + proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation { + timestamp: clock::Lamport { + replica_id: edit.replica_id as text::ReplicaId, + value: edit.lamport_timestamp, + }, + version: version_from_wire(&edit.version), + ranges: edit + .ranges + .into_iter() + .map(|range| { + text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize) + }) + .collect(), + new_text: edit.new_text.into_iter().map(Arc::from).collect(), + })), + proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo(UndoOperation { + timestamp: clock::Lamport { + replica_id: undo.replica_id as text::ReplicaId, + value: undo.lamport_timestamp, + }, + version: version_from_wire(&undo.version), + counts: undo + .counts + .into_iter() + .map(|c| { + ( + clock::Lamport { + replica_id: c.replica_id as text::ReplicaId, + value: c.lamport_timestamp, + }, + c.count, + ) + }) + .collect(), + })), + _ => None, + } +} + +fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global { + let mut version = clock::Global::new(); + for entry in message { + version.observe(clock::Lamport { + replica_id: entry.replica_id as text::ReplicaId, + value: entry.timestamp, + }); + } + version +} + +fn version_to_wire(version: &clock::Global) -> Vec { + let mut message = Vec::new(); + for entry in version.iter() { + message.push(proto::VectorClockEntry { + replica_id: entry.replica_id as u32, + timestamp: entry.value, + }); + } + message +} + +#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)] +enum QueryOperationSerializationVersion { + OperationSerializationVersion, +} + +mod storage { + #![allow(non_snake_case)] + use prost::Message; + pub const SERIALIZATION_VERSION: i32 = 1; + + #[derive(Message)] + pub struct Operation { + #[prost(message, repeated, tag = "2")] + pub version: Vec, + #[prost(bool, tag = "3")] + pub is_undo: bool, + #[prost(message, repeated, tag = "4")] + pub edit_ranges: Vec, + #[prost(string, repeated, tag = "5")] + pub edit_texts: Vec, + #[prost(message, repeated, tag = "6")] + pub undo_counts: Vec, + } + + #[derive(Message)] + pub struct VectorClockEntry { + #[prost(uint32, tag = "1")] + pub replica_id: u32, + #[prost(uint32, tag = "2")] + pub timestamp: u32, + } + + #[derive(Message)] + pub struct Range { + #[prost(uint64, tag = "1")] + pub start: u64, + #[prost(uint64, tag = "2")] + pub end: u64, + } + + #[derive(Message)] + pub struct UndoCount { + #[prost(uint32, tag = "1")] + pub replica_id: u32, + #[prost(uint32, tag = "2")] + pub lamport_timestamp: u32, + #[prost(uint32, tag = "3")] + pub count: u32, + } +} diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs new file mode 100644 index 0000000..502fcd5 --- /dev/null +++ b/crates/collab/src/db/queries/channels.rs @@ -0,0 +1,1024 @@ +use super::*; +use rpc::{ + proto::{channel_member::Kind, ChannelBufferVersion, VectorClockEntry}, + ErrorCode, ErrorCodeExt, +}; +use sea_orm::{DbBackend, TryGetableMany}; + +impl Database { + #[cfg(test)] + pub async fn all_channels(&self) -> Result> { + self.transaction(move |tx| async move { + let mut channels = Vec::new(); + let mut rows = channel::Entity::find().stream(&*tx).await?; + while let Some(row) = rows.next().await { + let row = row?; + channels.push((row.id, row.name)); + } + Ok(channels) + }) + .await + } + + #[cfg(test)] + pub async fn create_root_channel(&self, name: &str, creator_id: UserId) -> Result { + Ok(self.create_channel(name, None, creator_id).await?.0.id) + } + + #[cfg(test)] + pub async fn create_sub_channel( + &self, + name: &str, + parent: ChannelId, + creator_id: UserId, + ) -> Result { + Ok(self + .create_channel(name, Some(parent), creator_id) + .await? + .0 + .id) + } + + /// Creates a new channel. + pub async fn create_channel( + &self, + name: &str, + parent_channel_id: Option, + admin_id: UserId, + ) -> Result<(channel::Model, Option)> { + let name = Self::sanitize_channel_name(name)?; + self.transaction(move |tx| async move { + let mut parent = None; + let mut membership = None; + + if let Some(parent_channel_id) = parent_channel_id { + let parent_channel = self.get_channel_internal(parent_channel_id, &tx).await?; + self.check_user_is_channel_admin(&parent_channel, admin_id, &tx) + .await?; + parent = Some(parent_channel); + } + + let channel = channel::ActiveModel { + id: ActiveValue::NotSet, + name: ActiveValue::Set(name.to_string()), + visibility: ActiveValue::Set(ChannelVisibility::Members), + parent_path: ActiveValue::Set( + parent + .as_ref() + .map_or(String::new(), |parent| parent.path()), + ), + requires_zed_cla: ActiveValue::NotSet, + } + .insert(&*tx) + .await?; + + if parent.is_none() { + membership = Some( + channel_member::ActiveModel { + id: ActiveValue::NotSet, + channel_id: ActiveValue::Set(channel.id), + user_id: ActiveValue::Set(admin_id), + accepted: ActiveValue::Set(true), + role: ActiveValue::Set(ChannelRole::Admin), + } + .insert(&*tx) + .await?, + ); + } + + Ok((channel, membership)) + }) + .await + } + + /// Adds a user to the specified channel. + pub async fn join_channel( + &self, + channel_id: ChannelId, + user_id: UserId, + connection: ConnectionId, + ) -> Result<(JoinRoom, Option, ChannelRole)> { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + let mut role = self.channel_role_for_user(&channel, user_id, &tx).await?; + + let mut accept_invite_result = None; + + if role.is_none() { + if let Some(invitation) = self + .pending_invite_for_channel(&channel, user_id, &tx) + .await? + { + // note, this may be a parent channel + role = Some(invitation.role); + channel_member::Entity::update(channel_member::ActiveModel { + accepted: ActiveValue::Set(true), + ..invitation.into_active_model() + }) + .exec(&*tx) + .await?; + + accept_invite_result = Some( + self.calculate_membership_updated(&channel, user_id, &tx) + .await?, + ); + + debug_assert!( + self.channel_role_for_user(&channel, user_id, &tx).await? == role + ); + } else if channel.visibility == ChannelVisibility::Public { + role = Some(ChannelRole::Guest); + channel_member::Entity::insert(channel_member::ActiveModel { + id: ActiveValue::NotSet, + channel_id: ActiveValue::Set(channel.root_id()), + user_id: ActiveValue::Set(user_id), + accepted: ActiveValue::Set(true), + role: ActiveValue::Set(ChannelRole::Guest), + }) + .exec(&*tx) + .await?; + + accept_invite_result = Some( + self.calculate_membership_updated(&channel, user_id, &tx) + .await?, + ); + + debug_assert!( + self.channel_role_for_user(&channel, user_id, &tx).await? == role + ); + } + } + + if role.is_none() || role == Some(ChannelRole::Banned) { + Err(ErrorCode::Forbidden.anyhow())? + } + let role = role.unwrap(); + + let live_kit_room = format!("channel-{}", nanoid::nanoid!(30)); + let room_id = self + .get_or_create_channel_room(channel_id, &live_kit_room, &tx) + .await?; + + self.join_channel_room_internal(room_id, user_id, connection, role, &tx) + .await + .map(|jr| (jr, accept_invite_result, role)) + }) + .await + } + + /// Sets the visibility of the given channel. + pub async fn set_channel_visibility( + &self, + channel_id: ChannelId, + visibility: ChannelVisibility, + admin_id: UserId, + ) -> Result { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_admin(&channel, admin_id, &tx) + .await?; + + if visibility == ChannelVisibility::Public { + if let Some(parent_id) = channel.parent_id() { + let parent = self.get_channel_internal(parent_id, &tx).await?; + + if parent.visibility != ChannelVisibility::Public { + Err(ErrorCode::BadPublicNesting + .with_tag("direction", "parent") + .anyhow())?; + } + } + } else if visibility == ChannelVisibility::Members { + if self + .get_channel_descendants_excluding_self([&channel], &tx) + .await? + .into_iter() + .any(|channel| channel.visibility == ChannelVisibility::Public) + { + Err(ErrorCode::BadPublicNesting + .with_tag("direction", "children") + .anyhow())?; + } + } + + let mut model = channel.into_active_model(); + model.visibility = ActiveValue::Set(visibility); + let channel = model.update(&*tx).await?; + + Ok(channel) + }) + .await + } + + #[cfg(test)] + pub async fn set_channel_requires_zed_cla( + &self, + channel_id: ChannelId, + requires_zed_cla: bool, + ) -> Result<()> { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + let mut model = channel.into_active_model(); + model.requires_zed_cla = ActiveValue::Set(requires_zed_cla); + model.update(&*tx).await?; + Ok(()) + }) + .await + } + + /// Deletes the channel with the specified ID. + pub async fn delete_channel( + &self, + channel_id: ChannelId, + user_id: UserId, + ) -> Result<(ChannelId, Vec)> { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_admin(&channel, user_id, &tx) + .await?; + + let channels_to_remove = self + .get_channel_descendants_excluding_self([&channel], &tx) + .await? + .into_iter() + .map(|channel| channel.id) + .chain(Some(channel_id)) + .collect::>(); + + channel::Entity::delete_many() + .filter(channel::Column::Id.is_in(channels_to_remove.iter().copied())) + .exec(&*tx) + .await?; + + Ok((channel.root_id(), channels_to_remove)) + }) + .await + } + + /// Invites a user to a channel as a member. + pub async fn invite_channel_member( + &self, + channel_id: ChannelId, + invitee_id: UserId, + inviter_id: UserId, + role: ChannelRole, + ) -> Result { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_admin(&channel, inviter_id, &tx) + .await?; + if !channel.is_root() { + Err(ErrorCode::NotARootChannel.anyhow())? + } + + channel_member::ActiveModel { + id: ActiveValue::NotSet, + channel_id: ActiveValue::Set(channel_id), + user_id: ActiveValue::Set(invitee_id), + accepted: ActiveValue::Set(false), + role: ActiveValue::Set(role), + } + .insert(&*tx) + .await?; + + let channel = Channel::from_model(channel); + + let notifications = self + .create_notification( + invitee_id, + rpc::Notification::ChannelInvitation { + channel_id: channel_id.to_proto(), + channel_name: channel.name.clone(), + inviter_id: inviter_id.to_proto(), + }, + true, + &tx, + ) + .await? + .into_iter() + .collect(); + + Ok(InviteMemberResult { + channel, + notifications, + }) + }) + .await + } + + fn sanitize_channel_name(name: &str) -> Result<&str> { + let new_name = name.trim().trim_start_matches('#'); + if new_name == "" { + Err(anyhow!("channel name can't be blank"))?; + } + Ok(new_name) + } + + /// Renames the specified channel. + pub async fn rename_channel( + &self, + channel_id: ChannelId, + admin_id: UserId, + new_name: &str, + ) -> Result { + self.transaction(move |tx| async move { + let new_name = Self::sanitize_channel_name(new_name)?.to_string(); + + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_admin(&channel, admin_id, &tx) + .await?; + + let mut model = channel.into_active_model(); + model.name = ActiveValue::Set(new_name.clone()); + let channel = model.update(&*tx).await?; + + Ok(channel) + }) + .await + } + + /// accept or decline an invite to join a channel + pub async fn respond_to_channel_invite( + &self, + channel_id: ChannelId, + user_id: UserId, + accept: bool, + ) -> Result { + self.transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + + let membership_update = if accept { + let rows_affected = channel_member::Entity::update_many() + .set(channel_member::ActiveModel { + accepted: ActiveValue::Set(accept), + ..Default::default() + }) + .filter( + channel_member::Column::ChannelId + .eq(channel_id) + .and(channel_member::Column::UserId.eq(user_id)) + .and(channel_member::Column::Accepted.eq(false)), + ) + .exec(&*tx) + .await? + .rows_affected; + + if rows_affected == 0 { + Err(anyhow!("no such invitation"))?; + } + + Some( + self.calculate_membership_updated(&channel, user_id, &tx) + .await?, + ) + } else { + let rows_affected = channel_member::Entity::delete_many() + .filter( + channel_member::Column::ChannelId + .eq(channel_id) + .and(channel_member::Column::UserId.eq(user_id)) + .and(channel_member::Column::Accepted.eq(false)), + ) + .exec(&*tx) + .await? + .rows_affected; + if rows_affected == 0 { + Err(anyhow!("no such invitation"))?; + } + + None + }; + + Ok(RespondToChannelInvite { + membership_update, + notifications: self + .mark_notification_as_read_with_response( + user_id, + &rpc::Notification::ChannelInvitation { + channel_id: channel_id.to_proto(), + channel_name: Default::default(), + inviter_id: Default::default(), + }, + accept, + &tx, + ) + .await? + .into_iter() + .collect(), + }) + }) + .await + } + + async fn calculate_membership_updated( + &self, + channel: &channel::Model, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result { + let new_channels = self.get_user_channels(user_id, Some(channel), tx).await?; + let removed_channels = self + .get_channel_descendants_excluding_self([channel], tx) + .await? + .into_iter() + .map(|channel| channel.id) + .chain([channel.id]) + .filter(|channel_id| !new_channels.channels.iter().any(|c| c.id == *channel_id)) + .collect::>(); + + Ok(MembershipUpdated { + channel_id: channel.id, + new_channels, + removed_channels, + }) + } + + /// Removes a channel member. + pub async fn remove_channel_member( + &self, + channel_id: ChannelId, + member_id: UserId, + admin_id: UserId, + ) -> Result { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + + if member_id != admin_id { + self.check_user_is_channel_admin(&channel, admin_id, &tx) + .await?; + } + + let result = channel_member::Entity::delete_many() + .filter( + channel_member::Column::ChannelId + .eq(channel_id) + .and(channel_member::Column::UserId.eq(member_id)), + ) + .exec(&*tx) + .await?; + + if result.rows_affected == 0 { + Err(anyhow!("no such member"))?; + } + + Ok(RemoveChannelMemberResult { + membership_update: self + .calculate_membership_updated(&channel, member_id, &tx) + .await?, + notification_id: self + .remove_notification( + member_id, + rpc::Notification::ChannelInvitation { + channel_id: channel_id.to_proto(), + channel_name: Default::default(), + inviter_id: Default::default(), + }, + &tx, + ) + .await?, + }) + }) + .await + } + + /// Returns all channel invites for the user with the given ID. + pub async fn get_channel_invites_for_user(&self, user_id: UserId) -> Result> { + self.transaction(|tx| async move { + let mut role_for_channel: HashMap = HashMap::default(); + + let channel_invites = channel_member::Entity::find() + .filter( + channel_member::Column::UserId + .eq(user_id) + .and(channel_member::Column::Accepted.eq(false)), + ) + .all(&*tx) + .await?; + + for invite in channel_invites { + role_for_channel.insert(invite.channel_id, invite.role); + } + + let channels = channel::Entity::find() + .filter(channel::Column::Id.is_in(role_for_channel.keys().copied())) + .all(&*tx) + .await?; + + let channels = channels.into_iter().map(Channel::from_model).collect(); + + Ok(channels) + }) + .await + } + + /// Returns all channels for the user with the given ID. + pub async fn get_channels_for_user(&self, user_id: UserId) -> Result { + self.transaction(|tx| async move { + let tx = tx; + + self.get_user_channels(user_id, None, &tx).await + }) + .await + } + + /// Returns all channels for the user with the given ID that are descendants + /// of the specified ancestor channel. + pub async fn get_user_channels( + &self, + user_id: UserId, + ancestor_channel: Option<&channel::Model>, + tx: &DatabaseTransaction, + ) -> Result { + let mut filter = channel_member::Column::UserId + .eq(user_id) + .and(channel_member::Column::Accepted.eq(true)); + + if let Some(ancestor) = ancestor_channel { + filter = filter.and(channel_member::Column::ChannelId.eq(ancestor.root_id())); + } + + let channel_memberships = channel_member::Entity::find() + .filter(filter) + .all(tx) + .await?; + + let channels = channel::Entity::find() + .filter(channel::Column::Id.is_in(channel_memberships.iter().map(|m| m.channel_id))) + .all(tx) + .await?; + + let mut descendants = self + .get_channel_descendants_excluding_self(channels.iter(), tx) + .await?; + + for channel in channels { + if let Err(ix) = descendants.binary_search_by_key(&channel.path(), |c| c.path()) { + descendants.insert(ix, channel); + } + } + + let roles_by_channel_id = channel_memberships + .iter() + .map(|membership| (membership.channel_id, membership.role)) + .collect::>(); + + let channels: Vec = descendants + .into_iter() + .filter_map(|channel| { + let parent_role = roles_by_channel_id.get(&channel.root_id())?; + if parent_role.can_see_channel(channel.visibility) { + Some(Channel::from_model(channel)) + } else { + None + } + }) + .collect(); + + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryUserIdsAndChannelIds { + ChannelId, + UserId, + } + + let mut channel_participants: HashMap> = HashMap::default(); + { + let mut rows = room_participant::Entity::find() + .inner_join(room::Entity) + .filter(room::Column::ChannelId.is_in(channels.iter().map(|c| c.id))) + .select_only() + .column(room::Column::ChannelId) + .column(room_participant::Column::UserId) + .into_values::<_, QueryUserIdsAndChannelIds>() + .stream(tx) + .await?; + while let Some(row) = rows.next().await { + let row: (ChannelId, UserId) = row?; + channel_participants.entry(row.0).or_default().push(row.1) + } + } + + let channel_ids = channels.iter().map(|c| c.id).collect::>(); + + let mut channel_ids_by_buffer_id = HashMap::default(); + let mut latest_buffer_versions: Vec = vec![]; + let mut rows = buffer::Entity::find() + .filter(buffer::Column::ChannelId.is_in(channel_ids.iter().copied())) + .stream(tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + channel_ids_by_buffer_id.insert(row.id, row.channel_id); + latest_buffer_versions.push(ChannelBufferVersion { + channel_id: row.channel_id.0 as u64, + epoch: row.latest_operation_epoch.unwrap_or_default() as u64, + version: if let Some((latest_lamport_timestamp, latest_replica_id)) = row + .latest_operation_lamport_timestamp + .zip(row.latest_operation_replica_id) + { + vec![VectorClockEntry { + timestamp: latest_lamport_timestamp as u32, + replica_id: latest_replica_id as u32, + }] + } else { + vec![] + }, + }); + } + drop(rows); + + let latest_channel_messages = self.latest_channel_messages(&channel_ids, tx).await?; + + let observed_buffer_versions = self + .observed_channel_buffer_changes(&channel_ids_by_buffer_id, user_id, tx) + .await?; + + let observed_channel_messages = self + .observed_channel_messages(&channel_ids, user_id, tx) + .await?; + + let hosted_projects = self + .get_hosted_projects(&channel_ids, &roles_by_channel_id, tx) + .await?; + + Ok(ChannelsForUser { + channel_memberships, + channels, + hosted_projects, + channel_participants, + latest_buffer_versions, + latest_channel_messages, + observed_buffer_versions, + observed_channel_messages, + }) + } + + /// Sets the role for the specified channel member. + pub async fn set_channel_member_role( + &self, + channel_id: ChannelId, + admin_id: UserId, + for_user: UserId, + role: ChannelRole, + ) -> Result { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_admin(&channel, admin_id, &tx) + .await?; + + let membership = channel_member::Entity::find() + .filter( + channel_member::Column::ChannelId + .eq(channel_id) + .and(channel_member::Column::UserId.eq(for_user)), + ) + .one(&*tx) + .await?; + + let Some(membership) = membership else { + Err(anyhow!("no such member"))? + }; + + let mut update = membership.into_active_model(); + update.role = ActiveValue::Set(role); + let updated = channel_member::Entity::update(update).exec(&*tx).await?; + + if updated.accepted { + Ok(SetMemberRoleResult::MembershipUpdated( + self.calculate_membership_updated(&channel, for_user, &tx) + .await?, + )) + } else { + Ok(SetMemberRoleResult::InviteUpdated(Channel::from_model( + channel, + ))) + } + }) + .await + } + + /// Returns the details for the specified channel member. + pub async fn get_channel_participant_details( + &self, + channel_id: ChannelId, + filter: &str, + limit: u64, + user_id: UserId, + ) -> Result<(Vec, Vec)> { + let members = self + .transaction(move |tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + let mut query = channel_member::Entity::find() + .find_also_related(user::Entity) + .filter(channel_member::Column::ChannelId.eq(channel.root_id())); + + if cfg!(any(test, sqlite)) && self.pool.get_database_backend() == DbBackend::Sqlite { + query = query.filter(Expr::cust_with_values( + "UPPER(github_login) LIKE ?", + [Self::fuzzy_like_string(&filter.to_uppercase())], + )) + } else { + query = query.filter(Expr::cust_with_values( + "github_login ILIKE $1", + [Self::fuzzy_like_string(filter)], + )) + } + let members = query.order_by( + Expr::cust( + "not role = 'admin', not role = 'member', not role = 'guest', not accepted, github_login", + ), + sea_orm::Order::Asc, + ) + .limit(limit) + .all(&*tx) + .await?; + + Ok(members) + }) + .await?; + + let mut users: Vec = Vec::with_capacity(members.len()); + + let members = members + .into_iter() + .map(|(member, user)| { + if let Some(user) = user { + users.push(proto::User { + id: user.id.to_proto(), + avatar_url: format!( + "https://github.com/{}.png?size=128", + user.github_login + ), + github_login: user.github_login, + }) + } + proto::ChannelMember { + role: member.role.into(), + user_id: member.user_id.to_proto(), + kind: if member.accepted { + Kind::Member + } else { + Kind::Invitee + } + .into(), + } + }) + .collect(); + + Ok((members, users)) + } + + /// Returns whether the given user is an admin in the specified channel. + pub async fn check_user_is_channel_admin( + &self, + channel: &channel::Model, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result { + let role = self.channel_role_for_user(channel, user_id, tx).await?; + match role { + Some(ChannelRole::Admin) => Ok(role.unwrap()), + Some(ChannelRole::Member) + | Some(ChannelRole::Talker) + | Some(ChannelRole::Banned) + | Some(ChannelRole::Guest) + | None => Err(anyhow!( + "user is not a channel admin or channel does not exist" + ))?, + } + } + + /// Returns whether the given user is a member of the specified channel. + pub async fn check_user_is_channel_member( + &self, + channel: &channel::Model, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result { + let channel_role = self.channel_role_for_user(channel, user_id, tx).await?; + match channel_role { + Some(ChannelRole::Admin) | Some(ChannelRole::Member) => Ok(channel_role.unwrap()), + Some(ChannelRole::Banned) + | Some(ChannelRole::Guest) + | Some(ChannelRole::Talker) + | None => Err(anyhow!( + "user is not a channel member or channel does not exist" + ))?, + } + } + + /// Returns whether the given user is a participant in the specified channel. + pub async fn check_user_is_channel_participant( + &self, + channel: &channel::Model, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result { + let role = self.channel_role_for_user(channel, user_id, tx).await?; + match role { + Some(ChannelRole::Admin) + | Some(ChannelRole::Member) + | Some(ChannelRole::Guest) + | Some(ChannelRole::Talker) => Ok(role.unwrap()), + Some(ChannelRole::Banned) | None => Err(anyhow!( + "user is not a channel participant or channel does not exist" + ))?, + } + } + + /// Returns a user's pending invite for the given channel, if one exists. + pub async fn pending_invite_for_channel( + &self, + channel: &channel::Model, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result> { + let row = channel_member::Entity::find() + .filter(channel_member::Column::ChannelId.eq(channel.root_id())) + .filter(channel_member::Column::UserId.eq(user_id)) + .filter(channel_member::Column::Accepted.eq(false)) + .one(tx) + .await?; + + Ok(row) + } + + /// Returns the role for a user in the given channel. + pub async fn channel_role_for_user( + &self, + channel: &channel::Model, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result> { + let membership = channel_member::Entity::find() + .filter( + channel_member::Column::ChannelId + .eq(channel.root_id()) + .and(channel_member::Column::UserId.eq(user_id)) + .and(channel_member::Column::Accepted.eq(true)), + ) + .one(tx) + .await?; + + let Some(membership) = membership else { + return Ok(None); + }; + + if !membership.role.can_see_channel(channel.visibility) { + return Ok(None); + } + + Ok(Some(membership.role)) + } + + // Get the descendants of the given set if channels, ordered by their + // path. + pub(crate) async fn get_channel_descendants_excluding_self( + &self, + channels: impl IntoIterator, + tx: &DatabaseTransaction, + ) -> Result> { + let mut filter = Condition::any(); + for channel in channels.into_iter() { + filter = filter.add(channel::Column::ParentPath.like(channel.descendant_path_filter())); + } + + if filter.is_empty() { + return Ok(vec![]); + } + + Ok(channel::Entity::find() + .filter(filter) + .order_by_asc(Expr::cust("parent_path || id || '/'")) + .all(tx) + .await?) + } + + /// Returns the channel with the given ID. + pub async fn get_channel(&self, channel_id: ChannelId, user_id: UserId) -> Result { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + + Ok(Channel::from_model(channel)) + }) + .await + } + + pub(crate) async fn get_channel_internal( + &self, + channel_id: ChannelId, + tx: &DatabaseTransaction, + ) -> Result { + Ok(channel::Entity::find_by_id(channel_id) + .one(tx) + .await? + .ok_or_else(|| proto::ErrorCode::NoSuchChannel.anyhow())?) + } + + pub(crate) async fn get_or_create_channel_room( + &self, + channel_id: ChannelId, + live_kit_room: &str, + tx: &DatabaseTransaction, + ) -> Result { + let room = room::Entity::find() + .filter(room::Column::ChannelId.eq(channel_id)) + .one(tx) + .await?; + + let room_id = if let Some(room) = room { + room.id + } else { + let result = room::Entity::insert(room::ActiveModel { + channel_id: ActiveValue::Set(Some(channel_id)), + live_kit_room: ActiveValue::Set(live_kit_room.to_string()), + ..Default::default() + }) + .exec(tx) + .await?; + + result.last_insert_id + }; + + Ok(room_id) + } + + /// Move a channel from one parent to another + pub async fn move_channel( + &self, + channel_id: ChannelId, + new_parent_id: ChannelId, + admin_id: UserId, + ) -> Result<(ChannelId, Vec)> { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_admin(&channel, admin_id, &tx) + .await?; + let new_parent = self.get_channel_internal(new_parent_id, &tx).await?; + + if new_parent.root_id() != channel.root_id() { + Err(anyhow!(ErrorCode::WrongMoveTarget))?; + } + + if new_parent + .ancestors_including_self() + .any(|id| id == channel.id) + { + Err(anyhow!(ErrorCode::CircularNesting))?; + } + + if channel.visibility == ChannelVisibility::Public + && new_parent.visibility != ChannelVisibility::Public + { + Err(anyhow!(ErrorCode::BadPublicNesting))?; + } + + let root_id = channel.root_id(); + let old_path = format!("{}{}/", channel.parent_path, channel.id); + let new_path = format!("{}{}/", new_parent.path(), channel.id); + + let mut model = channel.into_active_model(); + model.parent_path = ActiveValue::Set(new_parent.path()); + let channel = model.update(&*tx).await?; + + let descendent_ids = + ChannelId::find_by_statement::(Statement::from_sql_and_values( + self.pool.get_database_backend(), + " + UPDATE channels SET parent_path = REPLACE(parent_path, $1, $2) + WHERE parent_path LIKE $3 || '%' + RETURNING id + ", + [old_path.clone().into(), new_path.into(), old_path.into()], + )) + .all(&*tx) + .await?; + + let all_moved_ids = Some(channel.id).into_iter().chain(descendent_ids); + + let channels = channel::Entity::find() + .filter(channel::Column::Id.is_in(all_moved_ids)) + .all(&*tx) + .await? + .into_iter() + .map(|c| Channel::from_model(c)) + .collect::>(); + + Ok((root_id, channels)) + }) + .await + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] +enum QueryIds { + Id, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] +enum QueryUserIds { + UserId, +} diff --git a/crates/collab/src/db/queries/contacts.rs b/crates/collab/src/db/queries/contacts.rs new file mode 100644 index 0000000..89bb07f --- /dev/null +++ b/crates/collab/src/db/queries/contacts.rs @@ -0,0 +1,361 @@ +use super::*; + +impl Database { + /// Retrieves the contacts for the user with the given ID. + pub async fn get_contacts(&self, user_id: UserId) -> Result> { + #[derive(Debug, FromQueryResult)] + struct ContactWithUserBusyStatuses { + user_id_a: UserId, + user_id_b: UserId, + a_to_b: bool, + accepted: bool, + user_a_busy: bool, + user_b_busy: bool, + } + + self.transaction(|tx| async move { + let user_a_participant = Alias::new("user_a_participant"); + let user_b_participant = Alias::new("user_b_participant"); + let mut db_contacts = contact::Entity::find() + .column_as( + Expr::col((user_a_participant.clone(), room_participant::Column::Id)) + .is_not_null(), + "user_a_busy", + ) + .column_as( + Expr::col((user_b_participant.clone(), room_participant::Column::Id)) + .is_not_null(), + "user_b_busy", + ) + .filter( + contact::Column::UserIdA + .eq(user_id) + .or(contact::Column::UserIdB.eq(user_id)), + ) + .join_as( + JoinType::LeftJoin, + contact::Relation::UserARoomParticipant.def(), + user_a_participant, + ) + .join_as( + JoinType::LeftJoin, + contact::Relation::UserBRoomParticipant.def(), + user_b_participant, + ) + .into_model::() + .stream(&*tx) + .await?; + + let mut contacts = Vec::new(); + while let Some(db_contact) = db_contacts.next().await { + let db_contact = db_contact?; + if db_contact.user_id_a == user_id { + if db_contact.accepted { + contacts.push(Contact::Accepted { + user_id: db_contact.user_id_b, + busy: db_contact.user_b_busy, + }); + } else if db_contact.a_to_b { + contacts.push(Contact::Outgoing { + user_id: db_contact.user_id_b, + }) + } else { + contacts.push(Contact::Incoming { + user_id: db_contact.user_id_b, + }); + } + } else if db_contact.accepted { + contacts.push(Contact::Accepted { + user_id: db_contact.user_id_a, + busy: db_contact.user_a_busy, + }); + } else if db_contact.a_to_b { + contacts.push(Contact::Incoming { + user_id: db_contact.user_id_a, + }); + } else { + contacts.push(Contact::Outgoing { + user_id: db_contact.user_id_a, + }); + } + } + + contacts.sort_unstable_by_key(|contact| contact.user_id()); + + Ok(contacts) + }) + .await + } + + /// Returns whether the given user is a busy (on a call). + pub async fn is_user_busy(&self, user_id: UserId) -> Result { + self.transaction(|tx| async move { + let participant = room_participant::Entity::find() + .filter(room_participant::Column::UserId.eq(user_id)) + .one(&*tx) + .await?; + Ok(participant.is_some()) + }) + .await + } + + /// Returns whether the user with `user_id_1` has the user with `user_id_2` as a contact. + /// + /// In order for this to return `true`, `user_id_2` must have an accepted invite from `user_id_1`. + pub async fn has_contact(&self, user_id_1: UserId, user_id_2: UserId) -> Result { + self.transaction(|tx| async move { + let (id_a, id_b) = if user_id_1 < user_id_2 { + (user_id_1, user_id_2) + } else { + (user_id_2, user_id_1) + }; + + Ok(contact::Entity::find() + .filter( + contact::Column::UserIdA + .eq(id_a) + .and(contact::Column::UserIdB.eq(id_b)) + .and(contact::Column::Accepted.eq(true)), + ) + .one(&*tx) + .await? + .is_some()) + }) + .await + } + + /// Invite the user with `receiver_id` to be a contact of the user with `sender_id`. + pub async fn send_contact_request( + &self, + sender_id: UserId, + receiver_id: UserId, + ) -> Result { + self.transaction(|tx| async move { + let (id_a, id_b, a_to_b) = if sender_id < receiver_id { + (sender_id, receiver_id, true) + } else { + (receiver_id, sender_id, false) + }; + + let rows_affected = contact::Entity::insert(contact::ActiveModel { + user_id_a: ActiveValue::set(id_a), + user_id_b: ActiveValue::set(id_b), + a_to_b: ActiveValue::set(a_to_b), + accepted: ActiveValue::set(false), + should_notify: ActiveValue::set(true), + ..Default::default() + }) + .on_conflict( + OnConflict::columns([contact::Column::UserIdA, contact::Column::UserIdB]) + .values([ + (contact::Column::Accepted, true.into()), + (contact::Column::ShouldNotify, false.into()), + ]) + .action_and_where( + contact::Column::Accepted.eq(false).and( + contact::Column::AToB + .eq(a_to_b) + .and(contact::Column::UserIdA.eq(id_b)) + .or(contact::Column::AToB + .ne(a_to_b) + .and(contact::Column::UserIdA.eq(id_a))), + ), + ) + .to_owned(), + ) + .exec_without_returning(&*tx) + .await?; + + if rows_affected == 0 { + Err(anyhow!("contact already requested"))?; + } + + Ok(self + .create_notification( + receiver_id, + rpc::Notification::ContactRequest { + sender_id: sender_id.to_proto(), + }, + true, + &tx, + ) + .await? + .into_iter() + .collect()) + }) + .await + } + + /// Returns a bool indicating whether the removed contact had originally accepted or not + /// + /// Deletes the contact identified by the requester and responder ids, and then returns + /// whether the deleted contact had originally accepted or was a pending contact request. + /// + /// # Arguments + /// + /// * `requester_id` - The user that initiates this request + /// * `responder_id` - The user that will be removed + pub async fn remove_contact( + &self, + requester_id: UserId, + responder_id: UserId, + ) -> Result<(bool, Option)> { + self.transaction(|tx| async move { + let (id_a, id_b) = if responder_id < requester_id { + (responder_id, requester_id) + } else { + (requester_id, responder_id) + }; + + let contact = contact::Entity::find() + .filter( + contact::Column::UserIdA + .eq(id_a) + .and(contact::Column::UserIdB.eq(id_b)), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such contact"))?; + + contact::Entity::delete_by_id(contact.id).exec(&*tx).await?; + + let mut deleted_notification_id = None; + if !contact.accepted { + deleted_notification_id = self + .remove_notification( + responder_id, + rpc::Notification::ContactRequest { + sender_id: requester_id.to_proto(), + }, + &tx, + ) + .await?; + } + + Ok((contact.accepted, deleted_notification_id)) + }) + .await + } + + /// Dismisses a contact notification for the given user. + pub async fn dismiss_contact_notification( + &self, + user_id: UserId, + contact_user_id: UserId, + ) -> Result<()> { + self.transaction(|tx| async move { + let (id_a, id_b, a_to_b) = if user_id < contact_user_id { + (user_id, contact_user_id, true) + } else { + (contact_user_id, user_id, false) + }; + + let result = contact::Entity::update_many() + .set(contact::ActiveModel { + should_notify: ActiveValue::set(false), + ..Default::default() + }) + .filter( + contact::Column::UserIdA + .eq(id_a) + .and(contact::Column::UserIdB.eq(id_b)) + .and( + contact::Column::AToB + .eq(a_to_b) + .and(contact::Column::Accepted.eq(true)) + .or(contact::Column::AToB + .ne(a_to_b) + .and(contact::Column::Accepted.eq(false))), + ), + ) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("no such contact request"))? + } else { + Ok(()) + } + }) + .await + } + + /// Accept or decline a contact request + pub async fn respond_to_contact_request( + &self, + responder_id: UserId, + requester_id: UserId, + accept: bool, + ) -> Result { + self.transaction(|tx| async move { + let (id_a, id_b, a_to_b) = if responder_id < requester_id { + (responder_id, requester_id, false) + } else { + (requester_id, responder_id, true) + }; + let rows_affected = if accept { + let result = contact::Entity::update_many() + .set(contact::ActiveModel { + accepted: ActiveValue::set(true), + should_notify: ActiveValue::set(true), + ..Default::default() + }) + .filter( + contact::Column::UserIdA + .eq(id_a) + .and(contact::Column::UserIdB.eq(id_b)) + .and(contact::Column::AToB.eq(a_to_b)), + ) + .exec(&*tx) + .await?; + result.rows_affected + } else { + let result = contact::Entity::delete_many() + .filter( + contact::Column::UserIdA + .eq(id_a) + .and(contact::Column::UserIdB.eq(id_b)) + .and(contact::Column::AToB.eq(a_to_b)) + .and(contact::Column::Accepted.eq(false)), + ) + .exec(&*tx) + .await?; + + result.rows_affected + }; + + if rows_affected == 0 { + Err(anyhow!("no such contact request"))? + } + + let mut notifications = Vec::new(); + notifications.extend( + self.mark_notification_as_read_with_response( + responder_id, + &rpc::Notification::ContactRequest { + sender_id: requester_id.to_proto(), + }, + accept, + &tx, + ) + .await?, + ); + + if accept { + notifications.extend( + self.create_notification( + requester_id, + rpc::Notification::ContactRequestAccepted { + responder_id: responder_id.to_proto(), + }, + true, + &tx, + ) + .await?, + ); + } + + Ok(notifications) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/contributors.rs b/crates/collab/src/db/queries/contributors.rs new file mode 100644 index 0000000..703abfb --- /dev/null +++ b/crates/collab/src/db/queries/contributors.rs @@ -0,0 +1,96 @@ +use super::*; + +#[derive(Debug)] +pub enum ContributorSelector { + GitHubUserId { github_user_id: i32 }, + GitHubLogin { github_login: String }, +} + +impl Database { + /// Retrieves the GitHub logins of all users who have signed the CLA. + pub async fn get_contributors(&self) -> Result> { + self.transaction(|tx| async move { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryGithubLogin { + GithubLogin, + } + + Ok(contributor::Entity::find() + .inner_join(user::Entity) + .order_by_asc(contributor::Column::SignedAt) + .select_only() + .column(user::Column::GithubLogin) + .into_values::<_, QueryGithubLogin>() + .all(&*tx) + .await?) + }) + .await + } + + /// Records that a given user has signed the CLA. + pub async fn get_contributor_sign_timestamp( + &self, + selector: &ContributorSelector, + ) -> Result> { + self.transaction(|tx| async move { + let condition = match selector { + ContributorSelector::GitHubUserId { github_user_id } => { + user::Column::GithubUserId.eq(*github_user_id) + } + ContributorSelector::GitHubLogin { github_login } => { + user::Column::GithubLogin.eq(github_login) + } + }; + + if let Some(user) = user::Entity::find().filter(condition).one(&*tx).await? { + if user.admin { + return Ok(Some(user.created_at)); + } + + if let Some(contributor) = + contributor::Entity::find_by_id(user.id).one(&*tx).await? + { + return Ok(Some(contributor.signed_at)); + } + } + + Ok(None) + }) + .await + } + + /// Records that a given user has signed the CLA. + pub async fn add_contributor( + &self, + github_login: &str, + github_user_id: Option, + github_email: Option<&str>, + initial_channel_id: Option, + ) -> Result<()> { + self.transaction(|tx| async move { + let user = self + .get_or_create_user_by_github_account_tx( + github_login, + github_user_id, + github_email, + initial_channel_id, + &tx, + ) + .await?; + + contributor::Entity::insert(contributor::ActiveModel { + user_id: ActiveValue::Set(user.id), + signed_at: ActiveValue::NotSet, + }) + .on_conflict( + OnConflict::column(contributor::Column::UserId) + .do_nothing() + .to_owned(), + ) + .exec_without_returning(&*tx) + .await?; + Ok(()) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/dev_server_projects.rs b/crates/collab/src/db/queries/dev_server_projects.rs new file mode 100644 index 0000000..3c71693 --- /dev/null +++ b/crates/collab/src/db/queries/dev_server_projects.rs @@ -0,0 +1,340 @@ +use anyhow::anyhow; +use rpc::{ + proto::{self}, + ConnectionId, +}; +use sea_orm::{ + ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait, + ModelTrait, QueryFilter, +}; + +use crate::db::ProjectId; + +use super::{ + dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId, + DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId, +}; + +impl Database { + pub async fn get_dev_server_project( + &self, + dev_server_project_id: DevServerProjectId, + ) -> crate::Result { + self.transaction(|tx| async move { + Ok( + dev_server_project::Entity::find_by_id(dev_server_project_id) + .one(&*tx) + .await? + .ok_or_else(|| { + anyhow!("no dev server project with id {}", dev_server_project_id) + })?, + ) + }) + .await + } + + pub async fn get_projects_for_dev_server( + &self, + dev_server_id: DevServerId, + ) -> crate::Result> { + self.transaction(|tx| async move { + self.get_projects_for_dev_server_internal(dev_server_id, &tx) + .await + }) + .await + } + + pub async fn get_projects_for_dev_server_internal( + &self, + dev_server_id: DevServerId, + tx: &DatabaseTransaction, + ) -> crate::Result> { + let servers = dev_server_project::Entity::find() + .filter(dev_server_project::Column::DevServerId.eq(dev_server_id)) + .find_also_related(project::Entity) + .all(tx) + .await?; + Ok(servers + .into_iter() + .map(|(dev_server_project, project)| proto::DevServerProject { + id: dev_server_project.id.to_proto(), + project_id: project.map(|p| p.id.to_proto()), + dev_server_id: dev_server_project.dev_server_id.to_proto(), + path: dev_server_project.path, + }) + .collect()) + } + + pub async fn dev_server_project_ids_for_user( + &self, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> crate::Result> { + let dev_servers = dev_server::Entity::find() + .filter(dev_server::Column::UserId.eq(user_id)) + .find_with_related(dev_server_project::Entity) + .all(tx) + .await?; + + Ok(dev_servers + .into_iter() + .flat_map(|(_, projects)| projects.into_iter().map(|p| p.id)) + .collect()) + } + + pub async fn owner_for_dev_server_project( + &self, + dev_server_project_id: DevServerProjectId, + tx: &DatabaseTransaction, + ) -> crate::Result { + let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id) + .find_also_related(dev_server::Entity) + .one(tx) + .await? + .and_then(|(_, dev_server)| dev_server) + .ok_or_else(|| anyhow!("no dev server project"))?; + + Ok(dev_server.user_id) + } + + pub async fn get_stale_dev_server_projects( + &self, + connection: ConnectionId, + ) -> crate::Result> { + self.transaction(|tx| async move { + let projects = project::Entity::find() + .filter( + Condition::all() + .add(project::Column::HostConnectionId.eq(connection.id)) + .add(project::Column::HostConnectionServerId.eq(connection.owner_id)), + ) + .all(&*tx) + .await?; + + Ok(projects.into_iter().map(|p| p.id).collect()) + }) + .await + } + + pub async fn create_dev_server_project( + &self, + dev_server_id: DevServerId, + path: &str, + user_id: UserId, + ) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> { + self.transaction(|tx| async move { + let dev_server = dev_server::Entity::find_by_id(dev_server_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?; + if dev_server.user_id != user_id { + return Err(anyhow!("not your dev server"))?; + } + + let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel { + id: ActiveValue::NotSet, + dev_server_id: ActiveValue::Set(dev_server_id), + path: ActiveValue::Set(path.to_string()), + }) + .exec_with_returning(&*tx) + .await?; + + let status = self + .dev_server_projects_update_internal(user_id, &tx) + .await?; + + Ok((project, status)) + }) + .await + } + + pub async fn delete_dev_server_project( + &self, + dev_server_project_id: DevServerProjectId, + dev_server_id: DevServerId, + user_id: UserId, + ) -> crate::Result<(Vec, proto::DevServerProjectsUpdate)> { + self.transaction(|tx| async move { + project::Entity::delete_many() + .filter(project::Column::DevServerProjectId.eq(dev_server_project_id)) + .exec(&*tx) + .await?; + let result = dev_server_project::Entity::delete_by_id(dev_server_project_id) + .exec(&*tx) + .await?; + if result.rows_affected != 1 { + return Err(anyhow!( + "no dev server project with id {}", + dev_server_project_id + ))?; + } + + let status = self + .dev_server_projects_update_internal(user_id, &tx) + .await?; + + let projects = self + .get_projects_for_dev_server_internal(dev_server_id, &tx) + .await?; + Ok((projects, status)) + }) + .await + } + + pub async fn share_dev_server_project( + &self, + dev_server_project_id: DevServerProjectId, + dev_server_id: DevServerId, + connection: ConnectionId, + worktrees: &[proto::WorktreeMetadata], + ) -> crate::Result<( + proto::DevServerProject, + UserId, + proto::DevServerProjectsUpdate, + )> { + self.transaction(|tx| async move { + let dev_server = dev_server::Entity::find_by_id(dev_server_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?; + + let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id) + .one(&*tx) + .await? + .ok_or_else(|| { + anyhow!("no dev server project with id {}", dev_server_project_id) + })?; + + if dev_server_project.dev_server_id != dev_server_id { + return Err(anyhow!("dev server project shared from wrong server"))?; + } + + let project = project::ActiveModel { + room_id: ActiveValue::Set(None), + host_user_id: ActiveValue::Set(None), + host_connection_id: ActiveValue::set(Some(connection.id as i32)), + host_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + id: ActiveValue::NotSet, + hosted_project_id: ActiveValue::Set(None), + dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)), + } + .insert(&*tx) + .await?; + + if !worktrees.is_empty() { + worktree::Entity::insert_many(worktrees.iter().map(|worktree| { + worktree::ActiveModel { + id: ActiveValue::set(worktree.id as i64), + project_id: ActiveValue::set(project.id), + abs_path: ActiveValue::set(worktree.abs_path.clone()), + root_name: ActiveValue::set(worktree.root_name.clone()), + visible: ActiveValue::set(worktree.visible), + scan_id: ActiveValue::set(0), + completed_scan_id: ActiveValue::set(0), + } + })) + .exec(&*tx) + .await?; + } + + let status = self + .dev_server_projects_update_internal(dev_server.user_id, &tx) + .await?; + + Ok(( + dev_server_project.to_proto(Some(project)), + dev_server.user_id, + status, + )) + }) + .await + } + + pub async fn reshare_dev_server_projects( + &self, + reshared_projects: &Vec, + dev_server_id: DevServerId, + connection: ConnectionId, + ) -> crate::Result> { + // todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?) + self.transaction(|tx| async move { + let mut ret = Vec::new(); + for reshared_project in reshared_projects { + let project_id = ProjectId::from_proto(reshared_project.project_id); + let (project, dev_server_project) = project::Entity::find_by_id(project_id) + .find_also_related(dev_server_project::Entity) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("project does not exist"))?; + + if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) { + return Err(anyhow!("dev server project reshared from wrong server"))?; + } + + let Ok(old_connection_id) = project.host_connection() else { + return Err(anyhow!("dev server project was not shared"))?; + }; + + project::Entity::update(project::ActiveModel { + id: ActiveValue::set(project_id), + host_connection_id: ActiveValue::set(Some(connection.id as i32)), + host_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + ..Default::default() + }) + .exec(&*tx) + .await?; + + let collaborators = project + .find_related(project_collaborator::Entity) + .all(&*tx) + .await?; + + self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx) + .await?; + + ret.push(super::ResharedProject { + id: project_id, + old_connection_id, + collaborators: collaborators + .iter() + .map(|collaborator| super::ProjectCollaborator { + connection_id: collaborator.connection(), + user_id: collaborator.user_id, + replica_id: collaborator.replica_id, + is_host: collaborator.is_host, + }) + .collect(), + worktrees: reshared_project.worktrees.clone(), + }); + } + Ok(ret) + }) + .await + } + + pub async fn rejoin_dev_server_projects( + &self, + rejoined_projects: &Vec, + user_id: UserId, + connection_id: ConnectionId, + ) -> crate::Result> { + // todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?) + self.transaction(|tx| async move { + let mut ret = Vec::new(); + for rejoined_project in rejoined_projects { + if let Some(project) = self + .rejoin_project_internal(&tx, rejoined_project, user_id, connection_id) + .await? + { + ret.push(project); + } + } + Ok(ret) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/dev_servers.rs b/crates/collab/src/db/queries/dev_servers.rs new file mode 100644 index 0000000..8eb3d43 --- /dev/null +++ b/crates/collab/src/db/queries/dev_servers.rs @@ -0,0 +1,196 @@ +use rpc::proto; +use sea_orm::{ + ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter, +}; + +use super::{dev_server, dev_server_project, Database, DevServerId, UserId}; + +impl Database { + pub async fn get_dev_server( + &self, + dev_server_id: DevServerId, + ) -> crate::Result { + self.transaction(|tx| async move { + Ok(dev_server::Entity::find_by_id(dev_server_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?) + }) + .await + } + + pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result> { + self.transaction(|tx| async move { + Ok(dev_server::Entity::find() + .filter(dev_server::Column::UserId.eq(user_id)) + .all(&*tx) + .await?) + }) + .await + } + + pub async fn dev_server_projects_update( + &self, + user_id: UserId, + ) -> crate::Result { + self.transaction(|tx| async move { + self.dev_server_projects_update_internal(user_id, &tx).await + }) + .await + } + + pub async fn dev_server_projects_update_internal( + &self, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> crate::Result { + let dev_servers = dev_server::Entity::find() + .filter(dev_server::Column::UserId.eq(user_id)) + .all(tx) + .await?; + + let dev_server_projects = dev_server_project::Entity::find() + .filter( + dev_server_project::Column::DevServerId + .is_in(dev_servers.iter().map(|d| d.id).collect::>()), + ) + .find_also_related(super::project::Entity) + .all(tx) + .await?; + + Ok(proto::DevServerProjectsUpdate { + dev_servers: dev_servers + .into_iter() + .map(|d| d.to_proto(proto::DevServerStatus::Offline)) + .collect(), + dev_server_projects: dev_server_projects + .into_iter() + .map(|(dev_server_project, project)| dev_server_project.to_proto(project)) + .collect(), + }) + } + + pub async fn create_dev_server( + &self, + name: &str, + ssh_connection_string: Option<&str>, + hashed_access_token: &str, + user_id: UserId, + ) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> { + self.transaction(|tx| async move { + if name.trim().is_empty() { + return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; + } + + let dev_server = dev_server::Entity::insert(dev_server::ActiveModel { + id: ActiveValue::NotSet, + hashed_token: ActiveValue::Set(hashed_access_token.to_string()), + name: ActiveValue::Set(name.trim().to_string()), + user_id: ActiveValue::Set(user_id), + ssh_connection_string: ActiveValue::Set( + ssh_connection_string.map(ToOwned::to_owned), + ), + }) + .exec_with_returning(&*tx) + .await?; + + let dev_server_projects = self + .dev_server_projects_update_internal(user_id, &tx) + .await?; + + Ok((dev_server, dev_server_projects)) + }) + .await + } + + pub async fn update_dev_server_token( + &self, + id: DevServerId, + hashed_token: &str, + user_id: UserId, + ) -> crate::Result { + self.transaction(|tx| async move { + let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else { + return Err(anyhow::anyhow!("no dev server with id {}", id))?; + }; + if dev_server.user_id != user_id { + return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; + } + + dev_server::Entity::update(dev_server::ActiveModel { + hashed_token: ActiveValue::Set(hashed_token.to_string()), + ..dev_server.clone().into_active_model() + }) + .exec(&*tx) + .await?; + + let dev_server_projects = self + .dev_server_projects_update_internal(user_id, &tx) + .await?; + + Ok(dev_server_projects) + }) + .await + } + + pub async fn rename_dev_server( + &self, + id: DevServerId, + name: &str, + user_id: UserId, + ) -> crate::Result { + self.transaction(|tx| async move { + let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else { + return Err(anyhow::anyhow!("no dev server with id {}", id))?; + }; + if dev_server.user_id != user_id || name.trim().is_empty() { + return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; + } + + dev_server::Entity::update(dev_server::ActiveModel { + name: ActiveValue::Set(name.trim().to_string()), + ..dev_server.clone().into_active_model() + }) + .exec(&*tx) + .await?; + + let dev_server_projects = self + .dev_server_projects_update_internal(user_id, &tx) + .await?; + + Ok(dev_server_projects) + }) + .await + } + + pub async fn delete_dev_server( + &self, + id: DevServerId, + user_id: UserId, + ) -> crate::Result { + self.transaction(|tx| async move { + let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else { + return Err(anyhow::anyhow!("no dev server with id {}", id))?; + }; + if dev_server.user_id != user_id { + return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; + } + + dev_server_project::Entity::delete_many() + .filter(dev_server_project::Column::DevServerId.eq(id)) + .exec(&*tx) + .await?; + + dev_server::Entity::delete(dev_server.into_active_model()) + .exec(&*tx) + .await?; + + let dev_server_projects = self + .dev_server_projects_update_internal(user_id, &tx) + .await?; + + Ok(dev_server_projects) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/embeddings.rs b/crates/collab/src/db/queries/embeddings.rs new file mode 100644 index 0000000..d901b59 --- /dev/null +++ b/crates/collab/src/db/queries/embeddings.rs @@ -0,0 +1,94 @@ +use super::*; +use time::Duration; +use time::OffsetDateTime; + +impl Database { + pub async fn get_embeddings( + &self, + model: &str, + digests: &[Vec], + ) -> Result, Vec>> { + self.weak_transaction(|tx| async move { + let embeddings = { + let mut db_embeddings = embedding::Entity::find() + .filter( + embedding::Column::Model.eq(model).and( + embedding::Column::Digest + .is_in(digests.iter().map(|digest| digest.as_slice())), + ), + ) + .stream(&*tx) + .await?; + + let mut embeddings = HashMap::default(); + while let Some(db_embedding) = db_embeddings.next().await { + let db_embedding = db_embedding?; + embeddings.insert(db_embedding.digest, db_embedding.dimensions); + } + embeddings + }; + + if !embeddings.is_empty() { + let now = OffsetDateTime::now_utc(); + let retrieved_at = PrimitiveDateTime::new(now.date(), now.time()); + + embedding::Entity::update_many() + .filter( + embedding::Column::Digest + .is_in(embeddings.keys().map(|digest| digest.as_slice())), + ) + .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at)) + .exec(&*tx) + .await?; + } + + Ok(embeddings) + }) + .await + } + + pub async fn save_embeddings( + &self, + model: &str, + embeddings: &HashMap, Vec>, + ) -> Result<()> { + self.weak_transaction(|tx| async move { + embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| { + let now_offset_datetime = OffsetDateTime::now_utc(); + let retrieved_at = + PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time()); + + embedding::ActiveModel { + model: ActiveValue::set(model.to_string()), + digest: ActiveValue::set(digest.clone()), + dimensions: ActiveValue::set(dimensions.clone()), + retrieved_at: ActiveValue::set(retrieved_at), + } + })) + .on_conflict( + OnConflict::columns([embedding::Column::Model, embedding::Column::Digest]) + .do_nothing() + .to_owned(), + ) + .exec_without_returning(&*tx) + .await?; + Ok(()) + }) + .await + } + + pub async fn purge_old_embeddings(&self) -> Result<()> { + self.weak_transaction(|tx| async move { + embedding::Entity::delete_many() + .filter( + embedding::Column::RetrievedAt + .lte(OffsetDateTime::now_utc() - Duration::days(60)), + ) + .exec(&*tx) + .await?; + + Ok(()) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs new file mode 100644 index 0000000..d6938fd --- /dev/null +++ b/crates/collab/src/db/queries/extensions.rs @@ -0,0 +1,385 @@ +use std::str::FromStr; + +use chrono::Utc; +use sea_orm::sea_query::IntoCondition; +use util::ResultExt; + +use super::*; + +impl Database { + pub async fn get_extensions( + &self, + filter: Option<&str>, + max_schema_version: i32, + limit: usize, + ) -> Result> { + self.transaction(|tx| async move { + let mut condition = Condition::all() + .add( + extension::Column::LatestVersion + .into_expr() + .eq(extension_version::Column::Version.into_expr()), + ) + .add(extension_version::Column::SchemaVersion.lte(max_schema_version)); + if let Some(filter) = filter { + let fuzzy_name_filter = Self::fuzzy_like_string(filter); + condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter)); + } + + self.get_extensions_where(condition, Some(limit as u64), &tx) + .await + }) + .await + } + + pub async fn get_extensions_by_ids( + &self, + ids: &[&str], + constraints: Option<&ExtensionVersionConstraints>, + ) -> Result> { + self.transaction(|tx| async move { + let extensions = extension::Entity::find() + .filter(extension::Column::ExternalId.is_in(ids.iter().copied())) + .all(&*tx) + .await?; + + let mut max_versions = self + .get_latest_versions_for_extensions(&extensions, constraints, &tx) + .await?; + + Ok(extensions + .into_iter() + .filter_map(|extension| { + let (version, _) = max_versions.remove(&extension.id)?; + Some(metadata_from_extension_and_version(extension, version)) + }) + .collect()) + }) + .await + } + + async fn get_latest_versions_for_extensions( + &self, + extensions: &[extension::Model], + constraints: Option<&ExtensionVersionConstraints>, + tx: &DatabaseTransaction, + ) -> Result> { + let mut versions = extension_version::Entity::find() + .filter( + extension_version::Column::ExtensionId + .is_in(extensions.iter().map(|extension| extension.id)), + ) + .stream(tx) + .await?; + + let mut max_versions = + HashMap::::default(); + while let Some(version) = versions.next().await { + let version = version?; + let Some(extension_version) = SemanticVersion::from_str(&version.version).log_err() + else { + continue; + }; + + if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) { + if max_extension_version > &extension_version { + continue; + } + } + + if let Some(constraints) = constraints { + if !constraints + .schema_versions + .contains(&version.schema_version) + { + continue; + } + + if let Some(wasm_api_version) = version.wasm_api_version.as_ref() { + if let Some(version) = SemanticVersion::from_str(wasm_api_version).log_err() { + if !constraints.wasm_api_versions.contains(&version) { + continue; + } + } else { + continue; + } + } + } + + max_versions.insert(version.extension_id, (version, extension_version)); + } + + Ok(max_versions) + } + + /// Returns all of the versions for the extension with the given ID. + pub async fn get_extension_versions( + &self, + extension_id: &str, + ) -> Result> { + self.transaction(|tx| async move { + let condition = extension::Column::ExternalId + .eq(extension_id) + .into_condition(); + + self.get_extensions_where(condition, None, &tx).await + }) + .await + } + + async fn get_extensions_where( + &self, + condition: Condition, + limit: Option, + tx: &DatabaseTransaction, + ) -> Result> { + let extensions = extension::Entity::find() + .inner_join(extension_version::Entity) + .select_also(extension_version::Entity) + .filter(condition) + .order_by_desc(extension::Column::TotalDownloadCount) + .order_by_asc(extension::Column::Name) + .limit(limit) + .all(tx) + .await?; + + Ok(extensions + .into_iter() + .filter_map(|(extension, version)| { + Some(metadata_from_extension_and_version(extension, version?)) + }) + .collect()) + } + + pub async fn get_extension( + &self, + extension_id: &str, + constraints: Option<&ExtensionVersionConstraints>, + ) -> Result> { + self.transaction(|tx| async move { + let extension = extension::Entity::find() + .filter(extension::Column::ExternalId.eq(extension_id)) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such extension: {extension_id}"))?; + + let extensions = [extension]; + let mut versions = self + .get_latest_versions_for_extensions(&extensions, constraints, &tx) + .await?; + let [extension] = extensions; + + Ok(versions.remove(&extension.id).map(|(max_version, _)| { + metadata_from_extension_and_version(extension, max_version) + })) + }) + .await + } + + pub async fn get_extension_version( + &self, + extension_id: &str, + version: &str, + ) -> Result> { + self.transaction(|tx| async move { + let extension = extension::Entity::find() + .filter(extension::Column::ExternalId.eq(extension_id)) + .filter(extension_version::Column::Version.eq(version)) + .inner_join(extension_version::Entity) + .select_also(extension_version::Entity) + .one(&*tx) + .await?; + + Ok(extension.and_then(|(extension, version)| { + Some(metadata_from_extension_and_version(extension, version?)) + })) + }) + .await + } + + pub async fn get_known_extension_versions<'a>(&self) -> Result>> { + self.transaction(|tx| async move { + let mut extension_external_ids_by_id = HashMap::default(); + + let mut rows = extension::Entity::find().stream(&*tx).await?; + while let Some(row) = rows.next().await { + let row = row?; + extension_external_ids_by_id.insert(row.id, row.external_id); + } + drop(rows); + + let mut known_versions_by_extension_id: HashMap> = + HashMap::default(); + let mut rows = extension_version::Entity::find().stream(&*tx).await?; + while let Some(row) = rows.next().await { + let row = row?; + + let Some(extension_id) = extension_external_ids_by_id.get(&row.extension_id) else { + continue; + }; + + let versions = known_versions_by_extension_id + .entry(extension_id.clone()) + .or_default(); + if let Err(ix) = versions.binary_search(&row.version) { + versions.insert(ix, row.version); + } + } + drop(rows); + + Ok(known_versions_by_extension_id) + }) + .await + } + + pub async fn insert_extension_versions( + &self, + versions_by_extension_id: &HashMap<&str, Vec>, + ) -> Result<()> { + self.transaction(|tx| async move { + for (external_id, versions) in versions_by_extension_id { + if versions.is_empty() { + continue; + } + + let latest_version = versions + .iter() + .max_by_key(|version| &version.version) + .unwrap(); + + let insert = extension::Entity::insert(extension::ActiveModel { + name: ActiveValue::Set(latest_version.name.clone()), + external_id: ActiveValue::Set(external_id.to_string()), + id: ActiveValue::NotSet, + latest_version: ActiveValue::Set(latest_version.version.to_string()), + total_download_count: ActiveValue::NotSet, + }) + .on_conflict( + OnConflict::columns([extension::Column::ExternalId]) + .update_column(extension::Column::ExternalId) + .to_owned(), + ); + + let extension = if tx.support_returning() { + insert.exec_with_returning(&*tx).await? + } else { + // Sqlite + insert.exec_without_returning(&*tx).await?; + extension::Entity::find() + .filter(extension::Column::ExternalId.eq(*external_id)) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("failed to insert extension"))? + }; + + extension_version::Entity::insert_many(versions.iter().map(|version| { + extension_version::ActiveModel { + extension_id: ActiveValue::Set(extension.id), + published_at: ActiveValue::Set(version.published_at), + version: ActiveValue::Set(version.version.to_string()), + authors: ActiveValue::Set(version.authors.join(", ")), + repository: ActiveValue::Set(version.repository.clone()), + description: ActiveValue::Set(version.description.clone()), + schema_version: ActiveValue::Set(version.schema_version), + wasm_api_version: ActiveValue::Set(version.wasm_api_version.clone()), + download_count: ActiveValue::NotSet, + } + })) + .on_conflict(OnConflict::new().do_nothing().to_owned()) + .exec_without_returning(&*tx) + .await?; + + if let Ok(db_version) = semver::Version::parse(&extension.latest_version) { + if db_version >= latest_version.version { + continue; + } + } + + let mut extension = extension.into_active_model(); + extension.latest_version = ActiveValue::Set(latest_version.version.to_string()); + extension.name = ActiveValue::set(latest_version.name.clone()); + extension::Entity::update(extension).exec(&*tx).await?; + } + + Ok(()) + }) + .await + } + + pub async fn record_extension_download(&self, extension: &str, version: &str) -> Result { + self.transaction(|tx| async move { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryId { + Id, + } + + let extension_id: Option = extension::Entity::find() + .filter(extension::Column::ExternalId.eq(extension)) + .select_only() + .column(extension::Column::Id) + .into_values::<_, QueryId>() + .one(&*tx) + .await?; + let Some(extension_id) = extension_id else { + return Ok(false); + }; + + extension_version::Entity::update_many() + .col_expr( + extension_version::Column::DownloadCount, + extension_version::Column::DownloadCount.into_expr().add(1), + ) + .filter( + extension_version::Column::ExtensionId + .eq(extension_id) + .and(extension_version::Column::Version.eq(version)), + ) + .exec(&*tx) + .await?; + + extension::Entity::update_many() + .col_expr( + extension::Column::TotalDownloadCount, + extension::Column::TotalDownloadCount.into_expr().add(1), + ) + .filter(extension::Column::Id.eq(extension_id)) + .exec(&*tx) + .await?; + + Ok(true) + }) + .await + } +} + +fn metadata_from_extension_and_version( + extension: extension::Model, + version: extension_version::Model, +) -> ExtensionMetadata { + ExtensionMetadata { + id: extension.external_id.into(), + manifest: rpc::ExtensionApiManifest { + name: extension.name, + version: version.version.into(), + authors: version + .authors + .split(',') + .map(|author| author.trim().to_string()) + .collect::>(), + description: Some(version.description), + repository: version.repository, + schema_version: Some(version.schema_version), + wasm_api_version: version.wasm_api_version, + }, + + published_at: convert_time_to_chrono(version.published_at), + download_count: extension.total_download_count as u64, + } +} + +pub fn convert_time_to_chrono(time: time::PrimitiveDateTime) -> chrono::DateTime { + chrono::DateTime::from_naive_utc_and_offset( + chrono::NaiveDateTime::from_timestamp_opt(time.assume_utc().unix_timestamp(), 0).unwrap(), + Utc, + ) +} diff --git a/crates/collab/src/db/queries/hosted_projects.rs b/crates/collab/src/db/queries/hosted_projects.rs new file mode 100644 index 0000000..9ddafe3 --- /dev/null +++ b/crates/collab/src/db/queries/hosted_projects.rs @@ -0,0 +1,85 @@ +use rpc::{proto, ErrorCode}; + +use super::*; + +impl Database { + pub async fn get_hosted_projects( + &self, + channel_ids: &Vec, + roles: &HashMap, + tx: &DatabaseTransaction, + ) -> Result> { + let projects = hosted_project::Entity::find() + .find_also_related(project::Entity) + .filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0))) + .all(tx) + .await? + .into_iter() + .flat_map(|(hosted_project, project)| { + if hosted_project.deleted_at.is_some() { + return None; + } + match hosted_project.visibility { + ChannelVisibility::Public => {} + ChannelVisibility::Members => { + let is_visible = roles + .get(&hosted_project.channel_id) + .map(|role| role.can_see_all_descendants()) + .unwrap_or(false); + if !is_visible { + return None; + } + } + }; + Some(proto::HostedProject { + project_id: project?.id.to_proto(), + channel_id: hosted_project.channel_id.to_proto(), + name: hosted_project.name.clone(), + visibility: hosted_project.visibility.into(), + }) + }) + .collect(); + + Ok(projects) + } + + pub async fn get_hosted_project( + &self, + hosted_project_id: HostedProjectId, + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result<(hosted_project::Model, ChannelRole)> { + let project = hosted_project::Entity::find_by_id(hosted_project_id) + .one(tx) + .await? + .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?; + let channel = channel::Entity::find_by_id(project.channel_id) + .one(tx) + .await? + .ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?; + + let role = match project.visibility { + ChannelVisibility::Public => { + self.check_user_is_channel_participant(&channel, user_id, tx) + .await? + } + ChannelVisibility::Members => { + self.check_user_is_channel_member(&channel, user_id, tx) + .await? + } + }; + + Ok((project, role)) + } + + pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result { + self.transaction(|tx| async move { + Ok(project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .map(|project| project.hosted_project_id.is_some()) + .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/messages.rs b/crates/collab/src/db/queries/messages.rs new file mode 100644 index 0000000..bc82120 --- /dev/null +++ b/crates/collab/src/db/queries/messages.rs @@ -0,0 +1,725 @@ +use super::*; +use rpc::Notification; +use sea_orm::{SelectColumns, TryInsertResult}; +use time::OffsetDateTime; +use util::ResultExt; + +impl Database { + /// Inserts a record representing a user joining the chat for a given channel. + pub async fn join_channel_chat( + &self, + channel_id: ChannelId, + connection_id: ConnectionId, + user_id: UserId, + ) -> Result<()> { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + channel_chat_participant::ActiveModel { + id: ActiveValue::NotSet, + channel_id: ActiveValue::Set(channel_id), + user_id: ActiveValue::Set(user_id), + connection_id: ActiveValue::Set(connection_id.id as i32), + connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)), + } + .insert(&*tx) + .await?; + Ok(()) + }) + .await + } + + /// Removes `channel_chat_participant` records associated with the given connection ID. + pub async fn channel_chat_connection_lost( + &self, + connection_id: ConnectionId, + tx: &DatabaseTransaction, + ) -> Result<()> { + channel_chat_participant::Entity::delete_many() + .filter( + Condition::all() + .add( + channel_chat_participant::Column::ConnectionServerId + .eq(connection_id.owner_id), + ) + .add(channel_chat_participant::Column::ConnectionId.eq(connection_id.id)), + ) + .exec(tx) + .await?; + Ok(()) + } + + /// Removes `channel_chat_participant` records associated with the given user ID so they + /// will no longer get chat notifications. + pub async fn leave_channel_chat( + &self, + channel_id: ChannelId, + connection_id: ConnectionId, + _user_id: UserId, + ) -> Result<()> { + self.transaction(|tx| async move { + channel_chat_participant::Entity::delete_many() + .filter( + Condition::all() + .add( + channel_chat_participant::Column::ConnectionServerId + .eq(connection_id.owner_id), + ) + .add(channel_chat_participant::Column::ConnectionId.eq(connection_id.id)) + .add(channel_chat_participant::Column::ChannelId.eq(channel_id)), + ) + .exec(&*tx) + .await?; + + Ok(()) + }) + .await + } + + /// Retrieves the messages in the specified channel. + /// + /// Use `before_message_id` to paginate through the channel's messages. + pub async fn get_channel_messages( + &self, + channel_id: ChannelId, + user_id: UserId, + count: usize, + before_message_id: Option, + ) -> Result> { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + + let mut condition = + Condition::all().add(channel_message::Column::ChannelId.eq(channel_id)); + + if let Some(before_message_id) = before_message_id { + condition = condition.add(channel_message::Column::Id.lt(before_message_id)); + } + + let rows = channel_message::Entity::find() + .filter(condition) + .order_by_desc(channel_message::Column::Id) + .limit(count as u64) + .all(&*tx) + .await?; + + self.load_channel_messages(rows, &tx).await + }) + .await + } + + /// Returns the channel messages with the given IDs. + pub async fn get_channel_messages_by_id( + &self, + user_id: UserId, + message_ids: &[MessageId], + ) -> Result> { + self.transaction(|tx| async move { + let rows = channel_message::Entity::find() + .filter(channel_message::Column::Id.is_in(message_ids.iter().copied())) + .order_by_desc(channel_message::Column::Id) + .all(&*tx) + .await?; + + let mut channels = HashMap::::default(); + for row in &rows { + channels.insert( + row.channel_id, + self.get_channel_internal(row.channel_id, &tx).await?, + ); + } + + for (_, channel) in channels { + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + } + + let messages = self.load_channel_messages(rows, &tx).await?; + Ok(messages) + }) + .await + } + + async fn load_channel_messages( + &self, + rows: Vec, + tx: &DatabaseTransaction, + ) -> Result> { + let mut messages = rows + .into_iter() + .map(|row| { + let nonce = row.nonce.as_u64_pair(); + proto::ChannelMessage { + id: row.id.to_proto(), + sender_id: row.sender_id.to_proto(), + body: row.body, + timestamp: row.sent_at.assume_utc().unix_timestamp() as u64, + mentions: vec![], + nonce: Some(proto::Nonce { + upper_half: nonce.0, + lower_half: nonce.1, + }), + reply_to_message_id: row.reply_to_message_id.map(|id| id.to_proto()), + edited_at: row + .edited_at + .map(|t| t.assume_utc().unix_timestamp() as u64), + } + }) + .collect::>(); + messages.reverse(); + + let mut mentions = channel_message_mention::Entity::find() + .filter(channel_message_mention::Column::MessageId.is_in(messages.iter().map(|m| m.id))) + .order_by_asc(channel_message_mention::Column::MessageId) + .order_by_asc(channel_message_mention::Column::StartOffset) + .stream(tx) + .await?; + + let mut message_ix = 0; + while let Some(mention) = mentions.next().await { + let mention = mention?; + let message_id = mention.message_id.to_proto(); + while let Some(message) = messages.get_mut(message_ix) { + if message.id < message_id { + message_ix += 1; + } else { + if message.id == message_id { + message.mentions.push(proto::ChatMention { + range: Some(proto::Range { + start: mention.start_offset as u64, + end: mention.end_offset as u64, + }), + user_id: mention.user_id.to_proto(), + }); + } + break; + } + } + } + + Ok(messages) + } + + fn format_mentions_to_entities( + &self, + message_id: MessageId, + body: &str, + mentions: &[proto::ChatMention], + ) -> Result> { + Ok(mentions + .iter() + .filter_map(|mention| { + let range = mention.range.as_ref()?; + if !body.is_char_boundary(range.start as usize) + || !body.is_char_boundary(range.end as usize) + { + return None; + } + Some(channel_message_mention::ActiveModel { + message_id: ActiveValue::Set(message_id), + start_offset: ActiveValue::Set(range.start as i32), + end_offset: ActiveValue::Set(range.end as i32), + user_id: ActiveValue::Set(UserId::from_proto(mention.user_id)), + }) + }) + .collect::>()) + } + + /// Creates a new channel message. + #[allow(clippy::too_many_arguments)] + pub async fn create_channel_message( + &self, + channel_id: ChannelId, + user_id: UserId, + body: &str, + mentions: &[proto::ChatMention], + timestamp: OffsetDateTime, + nonce: u128, + reply_to_message_id: Option, + ) -> Result { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + + let mut rows = channel_chat_participant::Entity::find() + .filter(channel_chat_participant::Column::ChannelId.eq(channel_id)) + .stream(&*tx) + .await?; + + let mut is_participant = false; + let mut participant_connection_ids = HashSet::default(); + let mut participant_user_ids = Vec::new(); + while let Some(row) = rows.next().await { + let row = row?; + if row.user_id == user_id { + is_participant = true; + } + participant_user_ids.push(row.user_id); + participant_connection_ids.insert(row.connection()); + } + drop(rows); + + if !is_participant { + Err(anyhow!("not a chat participant"))?; + } + + let timestamp = timestamp.to_offset(time::UtcOffset::UTC); + let timestamp = time::PrimitiveDateTime::new(timestamp.date(), timestamp.time()); + + let result = channel_message::Entity::insert(channel_message::ActiveModel { + channel_id: ActiveValue::Set(channel_id), + sender_id: ActiveValue::Set(user_id), + body: ActiveValue::Set(body.to_string()), + sent_at: ActiveValue::Set(timestamp), + nonce: ActiveValue::Set(Uuid::from_u128(nonce)), + id: ActiveValue::NotSet, + reply_to_message_id: ActiveValue::Set(reply_to_message_id), + edited_at: ActiveValue::NotSet, + }) + .on_conflict( + OnConflict::columns([ + channel_message::Column::SenderId, + channel_message::Column::Nonce, + ]) + .do_nothing() + .to_owned(), + ) + .do_nothing() + .exec(&*tx) + .await?; + + let message_id; + let mut notifications = Vec::new(); + match result { + TryInsertResult::Inserted(result) => { + message_id = result.last_insert_id; + let mentioned_user_ids = + mentions.iter().map(|m| m.user_id).collect::>(); + + let mentions = self.format_mentions_to_entities(message_id, body, mentions)?; + if !mentions.is_empty() { + channel_message_mention::Entity::insert_many(mentions) + .exec(&*tx) + .await?; + } + + for mentioned_user in mentioned_user_ids { + notifications.extend( + self.create_notification( + UserId::from_proto(mentioned_user), + rpc::Notification::ChannelMessageMention { + message_id: message_id.to_proto(), + sender_id: user_id.to_proto(), + channel_id: channel_id.to_proto(), + }, + false, + &tx, + ) + .await?, + ); + } + + self.observe_channel_message_internal(channel_id, user_id, message_id, &tx) + .await?; + } + _ => { + message_id = channel_message::Entity::find() + .filter(channel_message::Column::Nonce.eq(Uuid::from_u128(nonce))) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("failed to insert message"))? + .id; + } + } + + Ok(CreatedChannelMessage { + message_id, + participant_connection_ids, + notifications, + }) + }) + .await + } + + pub async fn observe_channel_message( + &self, + channel_id: ChannelId, + user_id: UserId, + message_id: MessageId, + ) -> Result { + self.transaction(|tx| async move { + self.observe_channel_message_internal(channel_id, user_id, message_id, &tx) + .await?; + let mut batch = NotificationBatch::default(); + batch.extend( + self.mark_notification_as_read( + user_id, + &Notification::ChannelMessageMention { + message_id: message_id.to_proto(), + sender_id: Default::default(), + channel_id: Default::default(), + }, + &tx, + ) + .await?, + ); + Ok(batch) + }) + .await + } + + async fn observe_channel_message_internal( + &self, + channel_id: ChannelId, + user_id: UserId, + message_id: MessageId, + tx: &DatabaseTransaction, + ) -> Result<()> { + observed_channel_messages::Entity::insert(observed_channel_messages::ActiveModel { + user_id: ActiveValue::Set(user_id), + channel_id: ActiveValue::Set(channel_id), + channel_message_id: ActiveValue::Set(message_id), + }) + .on_conflict( + OnConflict::columns([ + observed_channel_messages::Column::ChannelId, + observed_channel_messages::Column::UserId, + ]) + .update_column(observed_channel_messages::Column::ChannelMessageId) + .action_cond_where(observed_channel_messages::Column::ChannelMessageId.lt(message_id)) + .to_owned(), + ) + // TODO: Try to upgrade SeaORM so we don't have to do this hack around their bug + .exec_without_returning(tx) + .await?; + Ok(()) + } + + pub async fn observed_channel_messages( + &self, + channel_ids: &[ChannelId], + user_id: UserId, + tx: &DatabaseTransaction, + ) -> Result> { + let rows = observed_channel_messages::Entity::find() + .filter(observed_channel_messages::Column::UserId.eq(user_id)) + .filter( + observed_channel_messages::Column::ChannelId + .is_in(channel_ids.iter().map(|id| id.0)), + ) + .all(tx) + .await?; + + Ok(rows + .into_iter() + .map(|message| proto::ChannelMessageId { + channel_id: message.channel_id.to_proto(), + message_id: message.channel_message_id.to_proto(), + }) + .collect()) + } + + pub async fn latest_channel_messages( + &self, + channel_ids: &[ChannelId], + tx: &DatabaseTransaction, + ) -> Result> { + let mut values = String::new(); + for id in channel_ids { + if !values.is_empty() { + values.push_str(", "); + } + write!(&mut values, "({})", id).unwrap(); + } + + if values.is_empty() { + return Ok(Vec::default()); + } + + let sql = format!( + r#" + SELECT + * + FROM ( + SELECT + *, + row_number() OVER ( + PARTITION BY channel_id + ORDER BY id DESC + ) as row_number + FROM channel_messages + WHERE + channel_id in ({values}) + ) AS messages + WHERE + row_number = 1 + "#, + ); + + let stmt = Statement::from_string(self.pool.get_database_backend(), sql); + let mut last_messages = channel_message::Model::find_by_statement(stmt) + .stream(tx) + .await?; + + let mut results = Vec::new(); + while let Some(result) = last_messages.next().await { + let message = result?; + results.push(proto::ChannelMessageId { + channel_id: message.channel_id.to_proto(), + message_id: message.id.to_proto(), + }); + } + + Ok(results) + } + + fn get_notification_kind_id_by_name(&self, notification_kind: &str) -> Option { + self.notification_kinds_by_id + .iter() + .find(|(_, kind)| **kind == notification_kind) + .map(|kind| kind.0 .0) + } + + /// Removes the channel message with the given ID. + pub async fn remove_channel_message( + &self, + channel_id: ChannelId, + message_id: MessageId, + user_id: UserId, + ) -> Result<(Vec, Vec)> { + self.transaction(|tx| async move { + let mut rows = channel_chat_participant::Entity::find() + .filter(channel_chat_participant::Column::ChannelId.eq(channel_id)) + .stream(&*tx) + .await?; + + let mut is_participant = false; + let mut participant_connection_ids = Vec::new(); + while let Some(row) = rows.next().await { + let row = row?; + if row.user_id == user_id { + is_participant = true; + } + participant_connection_ids.push(row.connection()); + } + drop(rows); + + if !is_participant { + Err(anyhow!("not a chat participant"))?; + } + + let result = channel_message::Entity::delete_by_id(message_id) + .filter(channel_message::Column::SenderId.eq(user_id)) + .exec(&*tx) + .await?; + + if result.rows_affected == 0 { + let channel = self.get_channel_internal(channel_id, &tx).await?; + if self + .check_user_is_channel_admin(&channel, user_id, &tx) + .await + .is_ok() + { + let result = channel_message::Entity::delete_by_id(message_id) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("no such message"))?; + } + } else { + Err(anyhow!("operation could not be completed"))?; + } + } + + let notification_kind_id = + self.get_notification_kind_id_by_name("ChannelMessageMention"); + + let existing_notifications = notification::Entity::find() + .filter(notification::Column::EntityId.eq(message_id)) + .filter(notification::Column::Kind.eq(notification_kind_id)) + .select_column(notification::Column::Id) + .all(&*tx) + .await?; + + let existing_notification_ids = existing_notifications + .into_iter() + .map(|notification| notification.id) + .collect(); + + // remove all the mention notifications for this message + notification::Entity::delete_many() + .filter(notification::Column::EntityId.eq(message_id)) + .filter(notification::Column::Kind.eq(notification_kind_id)) + .exec(&*tx) + .await?; + + Ok((participant_connection_ids, existing_notification_ids)) + }) + .await + } + + /// Updates the channel message with the given ID, body and timestamp(edited_at). + pub async fn update_channel_message( + &self, + channel_id: ChannelId, + message_id: MessageId, + user_id: UserId, + body: &str, + mentions: &[proto::ChatMention], + edited_at: OffsetDateTime, + ) -> Result { + self.transaction(|tx| async move { + let channel = self.get_channel_internal(channel_id, &tx).await?; + self.check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + + let mut rows = channel_chat_participant::Entity::find() + .filter(channel_chat_participant::Column::ChannelId.eq(channel_id)) + .stream(&*tx) + .await?; + + let mut is_participant = false; + let mut participant_connection_ids = Vec::new(); + let mut participant_user_ids = Vec::new(); + while let Some(row) = rows.next().await { + let row = row?; + if row.user_id == user_id { + is_participant = true; + } + participant_user_ids.push(row.user_id); + participant_connection_ids.push(row.connection()); + } + drop(rows); + + if !is_participant { + Err(anyhow!("not a chat participant"))?; + } + + let channel_message = channel_message::Entity::find_by_id(message_id) + .filter(channel_message::Column::SenderId.eq(user_id)) + .one(&*tx) + .await?; + + let Some(channel_message) = channel_message else { + Err(anyhow!("Channel message not found"))? + }; + + let edited_at = edited_at.to_offset(time::UtcOffset::UTC); + let edited_at = time::PrimitiveDateTime::new(edited_at.date(), edited_at.time()); + + let updated_message = channel_message::ActiveModel { + body: ActiveValue::Set(body.to_string()), + edited_at: ActiveValue::Set(Some(edited_at)), + reply_to_message_id: ActiveValue::Unchanged(channel_message.reply_to_message_id), + id: ActiveValue::Unchanged(message_id), + channel_id: ActiveValue::Unchanged(channel_id), + sender_id: ActiveValue::Unchanged(user_id), + sent_at: ActiveValue::Unchanged(channel_message.sent_at), + nonce: ActiveValue::Unchanged(channel_message.nonce), + }; + + let result = channel_message::Entity::update_many() + .set(updated_message) + .filter(channel_message::Column::Id.eq(message_id)) + .filter(channel_message::Column::SenderId.eq(user_id)) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + return Err(anyhow!( + "Attempted to edit a message (id: {message_id}) which does not exist anymore." + ))?; + } + + // we have to fetch the old mentions, + // so we don't send a notification when the message has been edited that you are mentioned in + let old_mentions = channel_message_mention::Entity::find() + .filter(channel_message_mention::Column::MessageId.eq(message_id)) + .all(&*tx) + .await?; + + // remove all existing mentions + channel_message_mention::Entity::delete_many() + .filter(channel_message_mention::Column::MessageId.eq(message_id)) + .exec(&*tx) + .await?; + + let new_mentions = self.format_mentions_to_entities(message_id, body, mentions)?; + if !new_mentions.is_empty() { + // insert new mentions + channel_message_mention::Entity::insert_many(new_mentions) + .exec(&*tx) + .await?; + } + + let mut update_mention_user_ids = HashSet::default(); + let mut new_mention_user_ids = + mentions.iter().map(|m| m.user_id).collect::>(); + // Filter out users that were mentioned before + for mention in &old_mentions { + if new_mention_user_ids.contains(&mention.user_id.to_proto()) { + update_mention_user_ids.insert(mention.user_id.to_proto()); + } + + new_mention_user_ids.remove(&mention.user_id.to_proto()); + } + + let notification_kind_id = + self.get_notification_kind_id_by_name("ChannelMessageMention"); + + let existing_notifications = notification::Entity::find() + .filter(notification::Column::EntityId.eq(message_id)) + .filter(notification::Column::Kind.eq(notification_kind_id)) + .all(&*tx) + .await?; + + // determine which notifications should be updated or deleted + let mut deleted_notification_ids = HashSet::default(); + let mut updated_mention_notifications = Vec::new(); + for notification in existing_notifications { + if update_mention_user_ids.contains(¬ification.recipient_id.to_proto()) { + if let Some(notification) = + self::notifications::model_to_proto(self, notification).log_err() + { + updated_mention_notifications.push(notification); + } + } else { + deleted_notification_ids.insert(notification.id); + } + } + + let mut notifications = Vec::new(); + for mentioned_user in new_mention_user_ids { + notifications.extend( + self.create_notification( + UserId::from_proto(mentioned_user), + rpc::Notification::ChannelMessageMention { + message_id: message_id.to_proto(), + sender_id: user_id.to_proto(), + channel_id: channel_id.to_proto(), + }, + false, + &tx, + ) + .await?, + ); + } + + Ok(UpdatedChannelMessage { + message_id, + participant_connection_ids, + notifications, + reply_to_message_id: channel_message.reply_to_message_id, + timestamp: channel_message.sent_at, + deleted_mention_notification_ids: deleted_notification_ids + .into_iter() + .collect::>(), + updated_mention_notifications, + }) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/notifications.rs b/crates/collab/src/db/queries/notifications.rs new file mode 100644 index 0000000..e0993f0 --- /dev/null +++ b/crates/collab/src/db/queries/notifications.rs @@ -0,0 +1,269 @@ +use super::*; +use rpc::Notification; +use util::ResultExt; + +impl Database { + /// Initializes the different kinds of notifications by upserting records for them. + pub async fn initialize_notification_kinds(&mut self) -> Result<()> { + notification_kind::Entity::insert_many(Notification::all_variant_names().iter().map( + |kind| notification_kind::ActiveModel { + name: ActiveValue::Set(kind.to_string()), + ..Default::default() + }, + )) + .on_conflict(OnConflict::new().do_nothing().to_owned()) + .exec_without_returning(&self.pool) + .await?; + + let mut rows = notification_kind::Entity::find().stream(&self.pool).await?; + while let Some(row) = rows.next().await { + let row = row?; + self.notification_kinds_by_name.insert(row.name, row.id); + } + + for name in Notification::all_variant_names() { + if let Some(id) = self.notification_kinds_by_name.get(*name).copied() { + self.notification_kinds_by_id.insert(id, name); + } + } + + Ok(()) + } + + /// Returns the notifications for the given recipient. + pub async fn get_notifications( + &self, + recipient_id: UserId, + limit: usize, + before_id: Option, + ) -> Result> { + self.transaction(|tx| async move { + let mut result = Vec::new(); + let mut condition = + Condition::all().add(notification::Column::RecipientId.eq(recipient_id)); + + if let Some(before_id) = before_id { + condition = condition.add(notification::Column::Id.lt(before_id)); + } + + let mut rows = notification::Entity::find() + .filter(condition) + .order_by_desc(notification::Column::Id) + .limit(limit as u64) + .stream(&*tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + if let Some(proto) = model_to_proto(self, row).log_err() { + result.push(proto); + } + } + result.reverse(); + Ok(result) + }) + .await + } + + /// Creates a notification. If `avoid_duplicates` is set to true, then avoid + /// creating a new notification if the given recipient already has an + /// unread notification with the given kind and entity id. + pub async fn create_notification( + &self, + recipient_id: UserId, + notification: Notification, + avoid_duplicates: bool, + tx: &DatabaseTransaction, + ) -> Result> { + if avoid_duplicates { + if self + .find_notification(recipient_id, ¬ification, tx) + .await? + .is_some() + { + return Ok(None); + } + } + + let proto = notification.to_proto(); + let kind = notification_kind_from_proto(self, &proto)?; + let model = notification::ActiveModel { + recipient_id: ActiveValue::Set(recipient_id), + kind: ActiveValue::Set(kind), + entity_id: ActiveValue::Set(proto.entity_id.map(|id| id as i32)), + content: ActiveValue::Set(proto.content.clone()), + ..Default::default() + } + .save(tx) + .await?; + + Ok(Some(( + recipient_id, + proto::Notification { + id: model.id.as_ref().to_proto(), + kind: proto.kind, + timestamp: model.created_at.as_ref().assume_utc().unix_timestamp() as u64, + is_read: false, + response: None, + content: proto.content, + entity_id: proto.entity_id, + }, + ))) + } + + /// Remove an unread notification with the given recipient, kind and + /// entity id. + pub async fn remove_notification( + &self, + recipient_id: UserId, + notification: Notification, + tx: &DatabaseTransaction, + ) -> Result> { + let id = self + .find_notification(recipient_id, ¬ification, tx) + .await?; + if let Some(id) = id { + notification::Entity::delete_by_id(id).exec(tx).await?; + } + Ok(id) + } + + /// Populate the response for the notification with the given kind and + /// entity id. + pub async fn mark_notification_as_read_with_response( + &self, + recipient_id: UserId, + notification: &Notification, + response: bool, + tx: &DatabaseTransaction, + ) -> Result> { + self.mark_notification_as_read_internal(recipient_id, notification, Some(response), tx) + .await + } + + /// Marks the given notification as read. + pub async fn mark_notification_as_read( + &self, + recipient_id: UserId, + notification: &Notification, + tx: &DatabaseTransaction, + ) -> Result> { + self.mark_notification_as_read_internal(recipient_id, notification, None, tx) + .await + } + + /// Marks the notification with the given ID as read. + pub async fn mark_notification_as_read_by_id( + &self, + recipient_id: UserId, + notification_id: NotificationId, + ) -> Result { + self.transaction(|tx| async move { + let row = notification::Entity::update(notification::ActiveModel { + id: ActiveValue::Unchanged(notification_id), + recipient_id: ActiveValue::Unchanged(recipient_id), + is_read: ActiveValue::Set(true), + ..Default::default() + }) + .exec(&*tx) + .await?; + Ok(model_to_proto(self, row) + .map(|notification| (recipient_id, notification)) + .into_iter() + .collect()) + }) + .await + } + + async fn mark_notification_as_read_internal( + &self, + recipient_id: UserId, + notification: &Notification, + response: Option, + tx: &DatabaseTransaction, + ) -> Result> { + if let Some(id) = self + .find_notification(recipient_id, notification, tx) + .await? + { + let row = notification::Entity::update(notification::ActiveModel { + id: ActiveValue::Unchanged(id), + recipient_id: ActiveValue::Unchanged(recipient_id), + is_read: ActiveValue::Set(true), + response: if let Some(response) = response { + ActiveValue::Set(Some(response)) + } else { + ActiveValue::NotSet + }, + ..Default::default() + }) + .exec(tx) + .await?; + Ok(model_to_proto(self, row) + .map(|notification| (recipient_id, notification)) + .ok()) + } else { + Ok(None) + } + } + + /// Find an unread notification by its recipient, kind and entity id. + async fn find_notification( + &self, + recipient_id: UserId, + notification: &Notification, + tx: &DatabaseTransaction, + ) -> Result> { + let proto = notification.to_proto(); + let kind = notification_kind_from_proto(self, &proto)?; + + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryIds { + Id, + } + + Ok(notification::Entity::find() + .select_only() + .column(notification::Column::Id) + .filter( + Condition::all() + .add(notification::Column::RecipientId.eq(recipient_id)) + .add(notification::Column::IsRead.eq(false)) + .add(notification::Column::Kind.eq(kind)) + .add(if proto.entity_id.is_some() { + notification::Column::EntityId.eq(proto.entity_id) + } else { + notification::Column::EntityId.is_null() + }), + ) + .into_values::<_, QueryIds>() + .one(tx) + .await?) + } +} + +pub fn model_to_proto(this: &Database, row: notification::Model) -> Result { + let kind = this + .notification_kinds_by_id + .get(&row.kind) + .ok_or_else(|| anyhow!("Unknown notification kind"))?; + Ok(proto::Notification { + id: row.id.to_proto(), + kind: kind.to_string(), + timestamp: row.created_at.assume_utc().unix_timestamp() as u64, + is_read: row.is_read, + response: row.response, + content: row.content, + entity_id: row.entity_id.map(|id| id as u64), + }) +} + +fn notification_kind_from_proto( + this: &Database, + proto: &proto::Notification, +) -> Result { + Ok(this + .notification_kinds_by_name + .get(&proto.kind) + .copied() + .ok_or_else(|| anyhow!("invalid notification kind {:?}", proto.kind))?) +} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs new file mode 100644 index 0000000..2a7c3e1 --- /dev/null +++ b/crates/collab/src/db/queries/projects.rs @@ -0,0 +1,1307 @@ +use util::ResultExt; + +use super::*; + +impl Database { + /// Returns the count of all projects, excluding ones marked as admin. + pub async fn project_count_excluding_admins(&self) -> Result { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryAs { + Count, + } + + self.transaction(|tx| async move { + Ok(project::Entity::find() + .select_only() + .column_as(project::Column::Id.count(), QueryAs::Count) + .inner_join(user::Entity) + .filter(user::Column::Admin.eq(false)) + .into_values::<_, QueryAs>() + .one(&*tx) + .await? + .unwrap_or(0i64) as usize) + }) + .await + } + + /// Shares a project with the given room. + pub async fn share_project( + &self, + room_id: RoomId, + connection: ConnectionId, + worktrees: &[proto::WorktreeMetadata], + dev_server_project_id: Option, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + let participant = room_participant::Entity::find() + .filter( + Condition::all() + .add( + room_participant::Column::AnsweringConnectionId + .eq(connection.id as i32), + ) + .add( + room_participant::Column::AnsweringConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("could not find participant"))?; + if participant.room_id != room_id { + return Err(anyhow!("shared project on unexpected room"))?; + } + if !participant + .role + .unwrap_or(ChannelRole::Member) + .can_edit_projects() + { + return Err(anyhow!("guests cannot share projects"))?; + } + + if let Some(dev_server_project_id) = dev_server_project_id { + let project = project::Entity::find() + .filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id))) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no remote project"))?; + + if project.room_id.is_some() { + return Err(anyhow!("project already shared"))?; + }; + + let project = project::Entity::update(project::ActiveModel { + room_id: ActiveValue::Set(Some(room_id)), + ..project.into_active_model() + }) + .exec(&*tx) + .await?; + + // todo! check user is a project-collaborator + let room = self.get_room(room_id, &tx).await?; + return Ok((project.id, room)); + } + + let project = project::ActiveModel { + room_id: ActiveValue::set(Some(participant.room_id)), + host_user_id: ActiveValue::set(Some(participant.user_id)), + host_connection_id: ActiveValue::set(Some(connection.id as i32)), + host_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + id: ActiveValue::NotSet, + hosted_project_id: ActiveValue::Set(None), + dev_server_project_id: ActiveValue::Set(None), + } + .insert(&*tx) + .await?; + + if !worktrees.is_empty() { + worktree::Entity::insert_many(worktrees.iter().map(|worktree| { + worktree::ActiveModel { + id: ActiveValue::set(worktree.id as i64), + project_id: ActiveValue::set(project.id), + abs_path: ActiveValue::set(worktree.abs_path.clone()), + root_name: ActiveValue::set(worktree.root_name.clone()), + visible: ActiveValue::set(worktree.visible), + scan_id: ActiveValue::set(0), + completed_scan_id: ActiveValue::set(0), + } + })) + .exec(&*tx) + .await?; + } + + project_collaborator::ActiveModel { + project_id: ActiveValue::set(project.id), + connection_id: ActiveValue::set(connection.id as i32), + connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)), + user_id: ActiveValue::set(participant.user_id), + replica_id: ActiveValue::set(ReplicaId(0)), + is_host: ActiveValue::set(true), + ..Default::default() + } + .insert(&*tx) + .await?; + + let room = self.get_room(room_id, &tx).await?; + Ok((project.id, room)) + }) + .await + } + + pub async fn delete_project(&self, project_id: ProjectId) -> Result<()> { + self.weak_transaction(|tx| async move { + project::Entity::delete_by_id(project_id).exec(&*tx).await?; + Ok(()) + }) + .await + } + + /// Unshares the given project. + pub async fn unshare_project( + &self, + project_id: ProjectId, + connection: ConnectionId, + user_id: Option, + ) -> Result, Vec)>> { + self.project_transaction(project_id, |tx| async move { + let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("project not found"))?; + let room = if let Some(room_id) = project.room_id { + Some(self.get_room(room_id, &tx).await?) + } else { + None + }; + if project.host_connection()? == connection { + return Ok((true, room, guest_connection_ids)); + } + if let Some(dev_server_project_id) = project.dev_server_project_id { + if let Some(user_id) = user_id { + if user_id + != self + .owner_for_dev_server_project(dev_server_project_id, &tx) + .await? + { + Err(anyhow!("cannot unshare a project hosted by another user"))? + } + project::Entity::update(project::ActiveModel { + room_id: ActiveValue::Set(None), + ..project.into_active_model() + }) + .exec(&*tx) + .await?; + return Ok((false, room, guest_connection_ids)); + } + } + + Err(anyhow!("cannot unshare a project hosted by another user"))? + }) + .await + } + + /// Updates the worktrees associated with the given project. + pub async fn update_project( + &self, + project_id: ProjectId, + connection: ConnectionId, + worktrees: &[proto::WorktreeMetadata], + ) -> Result, Vec)>> { + self.project_transaction(project_id, |tx| async move { + let project = project::Entity::find_by_id(project_id) + .filter( + Condition::all() + .add(project::Column::HostConnectionId.eq(connection.id as i32)) + .add( + project::Column::HostConnectionServerId.eq(connection.owner_id as i32), + ), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + + self.update_project_worktrees(project.id, worktrees, &tx) + .await?; + + let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?; + + let room = if let Some(room_id) = project.room_id { + Some(self.get_room(room_id, &tx).await?) + } else { + None + }; + + Ok((room, guest_connection_ids)) + }) + .await + } + + pub(in crate::db) async fn update_project_worktrees( + &self, + project_id: ProjectId, + worktrees: &[proto::WorktreeMetadata], + tx: &DatabaseTransaction, + ) -> Result<()> { + if !worktrees.is_empty() { + worktree::Entity::insert_many(worktrees.iter().map(|worktree| worktree::ActiveModel { + id: ActiveValue::set(worktree.id as i64), + project_id: ActiveValue::set(project_id), + abs_path: ActiveValue::set(worktree.abs_path.clone()), + root_name: ActiveValue::set(worktree.root_name.clone()), + visible: ActiveValue::set(worktree.visible), + scan_id: ActiveValue::set(0), + completed_scan_id: ActiveValue::set(0), + })) + .on_conflict( + OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id]) + .update_column(worktree::Column::RootName) + .to_owned(), + ) + .exec(tx) + .await?; + } + + worktree::Entity::delete_many() + .filter(worktree::Column::ProjectId.eq(project_id).and( + worktree::Column::Id.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)), + )) + .exec(tx) + .await?; + + Ok(()) + } + + pub async fn update_worktree( + &self, + update: &proto::UpdateWorktree, + connection: ConnectionId, + ) -> Result>> { + let project_id = ProjectId::from_proto(update.project_id); + let worktree_id = update.worktree_id as i64; + self.project_transaction(project_id, |tx| async move { + // Ensure the update comes from the host. + let _project = project::Entity::find_by_id(project_id) + .filter( + Condition::all() + .add(project::Column::HostConnectionId.eq(connection.id as i32)) + .add( + project::Column::HostConnectionServerId.eq(connection.owner_id as i32), + ), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + + // Update metadata. + worktree::Entity::update(worktree::ActiveModel { + id: ActiveValue::set(worktree_id), + project_id: ActiveValue::set(project_id), + root_name: ActiveValue::set(update.root_name.clone()), + scan_id: ActiveValue::set(update.scan_id as i64), + completed_scan_id: if update.is_last_update { + ActiveValue::set(update.scan_id as i64) + } else { + ActiveValue::default() + }, + abs_path: ActiveValue::set(update.abs_path.clone()), + ..Default::default() + }) + .exec(&*tx) + .await?; + + if !update.updated_entries.is_empty() { + worktree_entry::Entity::insert_many(update.updated_entries.iter().map(|entry| { + let mtime = entry.mtime.clone().unwrap_or_default(); + worktree_entry::ActiveModel { + project_id: ActiveValue::set(project_id), + worktree_id: ActiveValue::set(worktree_id), + id: ActiveValue::set(entry.id as i64), + is_dir: ActiveValue::set(entry.is_dir), + path: ActiveValue::set(entry.path.clone()), + inode: ActiveValue::set(entry.inode as i64), + mtime_seconds: ActiveValue::set(mtime.seconds as i64), + mtime_nanos: ActiveValue::set(mtime.nanos as i32), + is_symlink: ActiveValue::set(entry.is_symlink), + is_ignored: ActiveValue::set(entry.is_ignored), + is_external: ActiveValue::set(entry.is_external), + git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)), + is_deleted: ActiveValue::set(false), + scan_id: ActiveValue::set(update.scan_id as i64), + } + })) + .on_conflict( + OnConflict::columns([ + worktree_entry::Column::ProjectId, + worktree_entry::Column::WorktreeId, + worktree_entry::Column::Id, + ]) + .update_columns([ + worktree_entry::Column::IsDir, + worktree_entry::Column::Path, + worktree_entry::Column::Inode, + worktree_entry::Column::MtimeSeconds, + worktree_entry::Column::MtimeNanos, + worktree_entry::Column::IsSymlink, + worktree_entry::Column::IsIgnored, + worktree_entry::Column::GitStatus, + worktree_entry::Column::ScanId, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + } + + if !update.removed_entries.is_empty() { + worktree_entry::Entity::update_many() + .filter( + worktree_entry::Column::ProjectId + .eq(project_id) + .and(worktree_entry::Column::WorktreeId.eq(worktree_id)) + .and( + worktree_entry::Column::Id + .is_in(update.removed_entries.iter().map(|id| *id as i64)), + ), + ) + .set(worktree_entry::ActiveModel { + is_deleted: ActiveValue::Set(true), + scan_id: ActiveValue::Set(update.scan_id as i64), + ..Default::default() + }) + .exec(&*tx) + .await?; + } + + if !update.updated_repositories.is_empty() { + worktree_repository::Entity::insert_many(update.updated_repositories.iter().map( + |repository| worktree_repository::ActiveModel { + project_id: ActiveValue::set(project_id), + worktree_id: ActiveValue::set(worktree_id), + work_directory_id: ActiveValue::set(repository.work_directory_id as i64), + scan_id: ActiveValue::set(update.scan_id as i64), + branch: ActiveValue::set(repository.branch.clone()), + is_deleted: ActiveValue::set(false), + }, + )) + .on_conflict( + OnConflict::columns([ + worktree_repository::Column::ProjectId, + worktree_repository::Column::WorktreeId, + worktree_repository::Column::WorkDirectoryId, + ]) + .update_columns([ + worktree_repository::Column::ScanId, + worktree_repository::Column::Branch, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + } + + if !update.removed_repositories.is_empty() { + worktree_repository::Entity::update_many() + .filter( + worktree_repository::Column::ProjectId + .eq(project_id) + .and(worktree_repository::Column::WorktreeId.eq(worktree_id)) + .and( + worktree_repository::Column::WorkDirectoryId + .is_in(update.removed_repositories.iter().map(|id| *id as i64)), + ), + ) + .set(worktree_repository::ActiveModel { + is_deleted: ActiveValue::Set(true), + scan_id: ActiveValue::Set(update.scan_id as i64), + ..Default::default() + }) + .exec(&*tx) + .await?; + } + + let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; + Ok(connection_ids) + }) + .await + } + + /// Updates the diagnostic summary for the given connection. + pub async fn update_diagnostic_summary( + &self, + update: &proto::UpdateDiagnosticSummary, + connection: ConnectionId, + ) -> Result>> { + let project_id = ProjectId::from_proto(update.project_id); + let worktree_id = update.worktree_id as i64; + self.project_transaction(project_id, |tx| async move { + let summary = update + .summary + .as_ref() + .ok_or_else(|| anyhow!("invalid summary"))?; + + // Ensure the update comes from the host. + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + if project.host_connection()? != connection { + return Err(anyhow!("can't update a project hosted by someone else"))?; + } + + // Update summary. + worktree_diagnostic_summary::Entity::insert(worktree_diagnostic_summary::ActiveModel { + project_id: ActiveValue::set(project_id), + worktree_id: ActiveValue::set(worktree_id), + path: ActiveValue::set(summary.path.clone()), + language_server_id: ActiveValue::set(summary.language_server_id as i64), + error_count: ActiveValue::set(summary.error_count as i32), + warning_count: ActiveValue::set(summary.warning_count as i32), + }) + .on_conflict( + OnConflict::columns([ + worktree_diagnostic_summary::Column::ProjectId, + worktree_diagnostic_summary::Column::WorktreeId, + worktree_diagnostic_summary::Column::Path, + ]) + .update_columns([ + worktree_diagnostic_summary::Column::LanguageServerId, + worktree_diagnostic_summary::Column::ErrorCount, + worktree_diagnostic_summary::Column::WarningCount, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + + let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; + Ok(connection_ids) + }) + .await + } + + /// Starts the language server for the given connection. + pub async fn start_language_server( + &self, + update: &proto::StartLanguageServer, + connection: ConnectionId, + ) -> Result>> { + let project_id = ProjectId::from_proto(update.project_id); + self.project_transaction(project_id, |tx| async move { + let server = update + .server + .as_ref() + .ok_or_else(|| anyhow!("invalid language server"))?; + + // Ensure the update comes from the host. + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + if project.host_connection()? != connection { + return Err(anyhow!("can't update a project hosted by someone else"))?; + } + + // Add the newly-started language server. + language_server::Entity::insert(language_server::ActiveModel { + project_id: ActiveValue::set(project_id), + id: ActiveValue::set(server.id as i64), + name: ActiveValue::set(server.name.clone()), + }) + .on_conflict( + OnConflict::columns([ + language_server::Column::ProjectId, + language_server::Column::Id, + ]) + .update_column(language_server::Column::Name) + .to_owned(), + ) + .exec(&*tx) + .await?; + + let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; + Ok(connection_ids) + }) + .await + } + + /// Updates the worktree settings for the given connection. + pub async fn update_worktree_settings( + &self, + update: &proto::UpdateWorktreeSettings, + connection: ConnectionId, + ) -> Result>> { + let project_id = ProjectId::from_proto(update.project_id); + self.project_transaction(project_id, |tx| async move { + // Ensure the update comes from the host. + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + if project.host_connection()? != connection { + return Err(anyhow!("can't update a project hosted by someone else"))?; + } + + if let Some(content) = &update.content { + worktree_settings_file::Entity::insert(worktree_settings_file::ActiveModel { + project_id: ActiveValue::Set(project_id), + worktree_id: ActiveValue::Set(update.worktree_id as i64), + path: ActiveValue::Set(update.path.clone()), + content: ActiveValue::Set(content.clone()), + }) + .on_conflict( + OnConflict::columns([ + worktree_settings_file::Column::ProjectId, + worktree_settings_file::Column::WorktreeId, + worktree_settings_file::Column::Path, + ]) + .update_column(worktree_settings_file::Column::Content) + .to_owned(), + ) + .exec(&*tx) + .await?; + } else { + worktree_settings_file::Entity::delete(worktree_settings_file::ActiveModel { + project_id: ActiveValue::Set(project_id), + worktree_id: ActiveValue::Set(update.worktree_id as i64), + path: ActiveValue::Set(update.path.clone()), + ..Default::default() + }) + .exec(&*tx) + .await?; + } + + let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; + Ok(connection_ids) + }) + .await + } + + /// Adds the given connection to the specified hosted project + pub async fn join_hosted_project( + &self, + id: ProjectId, + user_id: UserId, + connection: ConnectionId, + ) -> Result<(Project, ReplicaId)> { + self.transaction(|tx| async move { + let (project, hosted_project) = project::Entity::find_by_id(id) + .find_also_related(hosted_project::Entity) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("hosted project is no longer shared"))?; + + let Some(hosted_project) = hosted_project else { + return Err(anyhow!("project is not hosted"))?; + }; + + let channel = channel::Entity::find_by_id(hosted_project.channel_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such channel"))?; + + let role = self + .check_user_is_channel_participant(&channel, user_id, &tx) + .await?; + + self.join_project_internal(project, user_id, connection, role, &tx) + .await + }) + .await + } + + pub async fn get_project(&self, id: ProjectId) -> Result { + self.transaction(|tx| async move { + Ok(project::Entity::find_by_id(id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?) + }) + .await + } + + pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result { + self.transaction(|tx| async move { + Ok(project::Entity::find() + .filter(project::Column::DevServerProjectId.eq(id)) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?) + }) + .await + } + + /// Adds the given connection to the specified project + /// in the current room. + pub async fn join_project( + &self, + project_id: ProjectId, + connection: ConnectionId, + user_id: UserId, + ) -> Result> { + self.project_transaction(project_id, |tx| async move { + let (project, role) = self + .access_project( + project_id, + connection, + PrincipalId::UserId(user_id), + Capability::ReadOnly, + &tx, + ) + .await?; + self.join_project_internal(project, user_id, connection, role, &tx) + .await + }) + .await + } + + async fn join_project_internal( + &self, + project: project::Model, + user_id: UserId, + connection: ConnectionId, + role: ChannelRole, + tx: &DatabaseTransaction, + ) -> Result<(Project, ReplicaId)> { + let mut collaborators = project + .find_related(project_collaborator::Entity) + .all(tx) + .await?; + let replica_ids = collaborators + .iter() + .map(|c| c.replica_id) + .collect::>(); + let mut replica_id = ReplicaId(1); + while replica_ids.contains(&replica_id) { + replica_id.0 += 1; + } + let new_collaborator = project_collaborator::ActiveModel { + project_id: ActiveValue::set(project.id), + connection_id: ActiveValue::set(connection.id as i32), + connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)), + user_id: ActiveValue::set(user_id), + replica_id: ActiveValue::set(replica_id), + is_host: ActiveValue::set(false), + ..Default::default() + } + .insert(tx) + .await?; + collaborators.push(new_collaborator); + + let db_worktrees = project.find_related(worktree::Entity).all(tx).await?; + let mut worktrees = db_worktrees + .into_iter() + .map(|db_worktree| { + ( + db_worktree.id as u64, + Worktree { + id: db_worktree.id as u64, + abs_path: db_worktree.abs_path, + root_name: db_worktree.root_name, + visible: db_worktree.visible, + entries: Default::default(), + repository_entries: Default::default(), + diagnostic_summaries: Default::default(), + settings_files: Default::default(), + scan_id: db_worktree.scan_id as u64, + completed_scan_id: db_worktree.completed_scan_id as u64, + }, + ) + }) + .collect::>(); + + // Populate worktree entries. + { + let mut db_entries = worktree_entry::Entity::find() + .filter( + Condition::all() + .add(worktree_entry::Column::ProjectId.eq(project.id)) + .add(worktree_entry::Column::IsDeleted.eq(false)), + ) + .stream(tx) + .await?; + while let Some(db_entry) = db_entries.next().await { + let db_entry = db_entry?; + if let Some(worktree) = worktrees.get_mut(&(db_entry.worktree_id as u64)) { + worktree.entries.push(proto::Entry { + id: db_entry.id as u64, + is_dir: db_entry.is_dir, + path: db_entry.path, + inode: db_entry.inode as u64, + mtime: Some(proto::Timestamp { + seconds: db_entry.mtime_seconds as u64, + nanos: db_entry.mtime_nanos as u32, + }), + is_symlink: db_entry.is_symlink, + is_ignored: db_entry.is_ignored, + is_external: db_entry.is_external, + git_status: db_entry.git_status.map(|status| status as i32), + }); + } + } + } + + // Populate repository entries. + { + let mut db_repository_entries = worktree_repository::Entity::find() + .filter( + Condition::all() + .add(worktree_repository::Column::ProjectId.eq(project.id)) + .add(worktree_repository::Column::IsDeleted.eq(false)), + ) + .stream(tx) + .await?; + while let Some(db_repository_entry) = db_repository_entries.next().await { + let db_repository_entry = db_repository_entry?; + if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64)) + { + worktree.repository_entries.insert( + db_repository_entry.work_directory_id as u64, + proto::RepositoryEntry { + work_directory_id: db_repository_entry.work_directory_id as u64, + branch: db_repository_entry.branch, + }, + ); + } + } + } + + // Populate worktree diagnostic summaries. + { + let mut db_summaries = worktree_diagnostic_summary::Entity::find() + .filter(worktree_diagnostic_summary::Column::ProjectId.eq(project.id)) + .stream(tx) + .await?; + while let Some(db_summary) = db_summaries.next().await { + let db_summary = db_summary?; + if let Some(worktree) = worktrees.get_mut(&(db_summary.worktree_id as u64)) { + worktree + .diagnostic_summaries + .push(proto::DiagnosticSummary { + path: db_summary.path, + language_server_id: db_summary.language_server_id as u64, + error_count: db_summary.error_count as u32, + warning_count: db_summary.warning_count as u32, + }); + } + } + } + + // Populate worktree settings files + { + let mut db_settings_files = worktree_settings_file::Entity::find() + .filter(worktree_settings_file::Column::ProjectId.eq(project.id)) + .stream(tx) + .await?; + while let Some(db_settings_file) = db_settings_files.next().await { + let db_settings_file = db_settings_file?; + if let Some(worktree) = worktrees.get_mut(&(db_settings_file.worktree_id as u64)) { + worktree.settings_files.push(WorktreeSettingsFile { + path: db_settings_file.path, + content: db_settings_file.content, + }); + } + } + } + + // Populate language servers. + let language_servers = project + .find_related(language_server::Entity) + .all(tx) + .await?; + + let project = Project { + id: project.id, + role, + collaborators: collaborators + .into_iter() + .map(|collaborator| ProjectCollaborator { + connection_id: collaborator.connection(), + user_id: collaborator.user_id, + replica_id: collaborator.replica_id, + is_host: collaborator.is_host, + }) + .collect(), + worktrees, + language_servers: language_servers + .into_iter() + .map(|language_server| proto::LanguageServer { + id: language_server.id as u64, + name: language_server.name, + }) + .collect(), + dev_server_project_id: project.dev_server_project_id, + }; + Ok((project, replica_id as ReplicaId)) + } + + pub async fn leave_hosted_project( + &self, + project_id: ProjectId, + connection: ConnectionId, + ) -> Result { + self.transaction(|tx| async move { + let result = project_collaborator::Entity::delete_many() + .filter( + Condition::all() + .add(project_collaborator::Column::ProjectId.eq(project_id)) + .add(project_collaborator::Column::ConnectionId.eq(connection.id as i32)) + .add( + project_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + return Err(anyhow!("not in the project"))?; + } + + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + let collaborators = project + .find_related(project_collaborator::Entity) + .all(&*tx) + .await?; + let connection_ids = collaborators + .into_iter() + .map(|collaborator| collaborator.connection()) + .collect(); + Ok(LeftProject { + id: project.id, + connection_ids, + should_unshare: false, + }) + }) + .await + } + + /// Removes the given connection from the specified project. + pub async fn leave_project( + &self, + project_id: ProjectId, + connection: ConnectionId, + ) -> Result, LeftProject)>> { + self.project_transaction(project_id, |tx| async move { + let result = project_collaborator::Entity::delete_many() + .filter( + Condition::all() + .add(project_collaborator::Column::ProjectId.eq(project_id)) + .add(project_collaborator::Column::ConnectionId.eq(connection.id as i32)) + .add( + project_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("not a collaborator on this project"))?; + } + + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + let collaborators = project + .find_related(project_collaborator::Entity) + .all(&*tx) + .await?; + let connection_ids: Vec = collaborators + .into_iter() + .map(|collaborator| collaborator.connection()) + .collect(); + + follower::Entity::delete_many() + .filter( + Condition::any() + .add( + Condition::all() + .add(follower::Column::ProjectId.eq(Some(project_id))) + .add( + follower::Column::LeaderConnectionServerId + .eq(connection.owner_id), + ) + .add(follower::Column::LeaderConnectionId.eq(connection.id)), + ) + .add( + Condition::all() + .add(follower::Column::ProjectId.eq(Some(project_id))) + .add( + follower::Column::FollowerConnectionServerId + .eq(connection.owner_id), + ) + .add(follower::Column::FollowerConnectionId.eq(connection.id)), + ), + ) + .exec(&*tx) + .await?; + + let room = if let Some(room_id) = project.room_id { + Some(self.get_room(room_id, &tx).await?) + } else { + None + }; + + let left_project = LeftProject { + id: project_id, + should_unshare: connection == project.host_connection()?, + connection_ids, + }; + Ok((room, left_project)) + }) + .await + } + + pub async fn check_user_is_project_host( + &self, + project_id: ProjectId, + connection_id: ConnectionId, + ) -> Result<()> { + self.project_transaction(project_id, |tx| async move { + project::Entity::find() + .filter( + Condition::all() + .add(project::Column::Id.eq(project_id)) + .add(project::Column::HostConnectionId.eq(Some(connection_id.id as i32))) + .add( + project::Column::HostConnectionServerId + .eq(Some(connection_id.owner_id as i32)), + ), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("failed to read project host"))?; + + Ok(()) + }) + .await + .map(|guard| guard.into_inner()) + } + + /// Returns the current project if the given user is authorized to access it with the specified capability. + pub async fn access_project( + &self, + project_id: ProjectId, + connection_id: ConnectionId, + principal_id: PrincipalId, + capability: Capability, + tx: &DatabaseTransaction, + ) -> Result<(project::Model, ChannelRole)> { + let (mut project, dev_server_project) = project::Entity::find_by_id(project_id) + .find_also_related(dev_server_project::Entity) + .one(tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + + let user_id = match principal_id { + PrincipalId::DevServerId(_) => { + if project + .host_connection() + .is_ok_and(|connection| connection == connection_id) + { + return Ok((project, ChannelRole::Admin)); + } + return Err(anyhow!("not the project host"))?; + } + PrincipalId::UserId(user_id) => user_id, + }; + + let role_from_room = if let Some(room_id) = project.room_id { + room_participant::Entity::find() + .filter(room_participant::Column::RoomId.eq(room_id)) + .filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id)) + .one(tx) + .await? + .and_then(|participant| participant.role) + } else { + None + }; + let role_from_dev_server = if let Some(dev_server_project) = dev_server_project { + let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id) + .one(tx) + .await? + .ok_or_else(|| anyhow!("no such channel"))?; + if user_id == dev_server.user_id { + // If the user left the room "uncleanly" they may rejoin the + // remote project before leave_room runs. IN that case kick + // the project out of the room pre-emptively. + if role_from_room.is_none() { + project = project::Entity::update(project::ActiveModel { + room_id: ActiveValue::Set(None), + ..project.into_active_model() + }) + .exec(tx) + .await?; + } + Some(ChannelRole::Admin) + } else { + None + } + } else { + None + }; + + let role = role_from_dev_server + .or(role_from_room) + .unwrap_or(ChannelRole::Banned); + + match capability { + Capability::ReadWrite => { + if !role.can_edit_projects() { + return Err(anyhow!("not authorized to edit projects"))?; + } + } + Capability::ReadOnly => { + if !role.can_read_projects() { + return Err(anyhow!("not authorized to read projects"))?; + } + } + } + + Ok((project, role)) + } + + /// Returns the host connection for a read-only request to join a shared project. + pub async fn host_for_read_only_project_request( + &self, + project_id: ProjectId, + connection_id: ConnectionId, + user_id: UserId, + ) -> Result { + self.project_transaction(project_id, |tx| async move { + let (project, _) = self + .access_project( + project_id, + connection_id, + PrincipalId::UserId(user_id), + Capability::ReadOnly, + &tx, + ) + .await?; + project.host_connection() + }) + .await + .map(|guard| guard.into_inner()) + } + + /// Returns the host connection for a request to join a shared project. + pub async fn host_for_mutating_project_request( + &self, + project_id: ProjectId, + connection_id: ConnectionId, + user_id: UserId, + ) -> Result { + self.project_transaction(project_id, |tx| async move { + let (project, _) = self + .access_project( + project_id, + connection_id, + PrincipalId::UserId(user_id), + Capability::ReadWrite, + &tx, + ) + .await?; + project.host_connection() + }) + .await + .map(|guard| guard.into_inner()) + } + + pub async fn connections_for_buffer_update( + &self, + project_id: ProjectId, + principal_id: PrincipalId, + connection_id: ConnectionId, + capability: Capability, + ) -> Result)>> { + self.project_transaction(project_id, |tx| async move { + // Authorize + let (project, _) = self + .access_project(project_id, connection_id, principal_id, capability, &tx) + .await?; + + let host_connection_id = project.host_connection()?; + + let collaborators = project_collaborator::Entity::find() + .filter(project_collaborator::Column::ProjectId.eq(project_id)) + .all(&*tx) + .await?; + + let guest_connection_ids = collaborators + .into_iter() + .filter_map(|collaborator| { + if collaborator.is_host { + None + } else { + Some(collaborator.connection()) + } + }) + .collect(); + + Ok((host_connection_id, guest_connection_ids)) + }) + .await + } + + /// Returns the connection IDs in the given project. + /// + /// The provided `connection_id` must also be a collaborator in the project, + /// otherwise an error will be returned. + pub async fn project_connection_ids( + &self, + project_id: ProjectId, + connection_id: ConnectionId, + exclude_dev_server: bool, + ) -> Result>> { + self.project_transaction(project_id, |tx| async move { + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such project"))?; + + let mut collaborators = project_collaborator::Entity::find() + .filter(project_collaborator::Column::ProjectId.eq(project_id)) + .stream(&*tx) + .await?; + + let mut connection_ids = HashSet::default(); + if let Some(host_connection) = project.host_connection().log_err() { + if !exclude_dev_server { + connection_ids.insert(host_connection); + } + } + + while let Some(collaborator) = collaborators.next().await { + let collaborator = collaborator?; + connection_ids.insert(collaborator.connection()); + } + + if connection_ids.contains(&connection_id) + || Some(connection_id) == project.host_connection().ok() + { + Ok(connection_ids) + } else { + Err(anyhow!( + "can only send project updates to a project you're in" + ))? + } + }) + .await + } + + async fn project_guest_connection_ids( + &self, + project_id: ProjectId, + tx: &DatabaseTransaction, + ) -> Result> { + let mut collaborators = project_collaborator::Entity::find() + .filter( + project_collaborator::Column::ProjectId + .eq(project_id) + .and(project_collaborator::Column::IsHost.eq(false)), + ) + .stream(tx) + .await?; + + let mut guest_connection_ids = Vec::new(); + while let Some(collaborator) = collaborators.next().await { + let collaborator = collaborator?; + guest_connection_ids.push(collaborator.connection()); + } + Ok(guest_connection_ids) + } + + /// Returns the [`RoomId`] for the given project. + pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result> { + self.transaction(|tx| async move { + Ok(project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .and_then(|project| project.room_id)) + }) + .await + } + + pub async fn check_room_participants( + &self, + room_id: RoomId, + leader_id: ConnectionId, + follower_id: ConnectionId, + ) -> Result<()> { + self.transaction(|tx| async move { + use room_participant::Column; + + let count = room_participant::Entity::find() + .filter( + Condition::all().add(Column::RoomId.eq(room_id)).add( + Condition::any() + .add(Column::AnsweringConnectionId.eq(leader_id.id as i32).and( + Column::AnsweringConnectionServerId.eq(leader_id.owner_id as i32), + )) + .add(Column::AnsweringConnectionId.eq(follower_id.id as i32).and( + Column::AnsweringConnectionServerId.eq(follower_id.owner_id as i32), + )), + ), + ) + .count(&*tx) + .await?; + + if count < 2 { + Err(anyhow!("not room participants"))?; + } + + Ok(()) + }) + .await + } + + /// Adds the given follower connection as a follower of the given leader connection. + pub async fn follow( + &self, + room_id: RoomId, + project_id: ProjectId, + leader_connection: ConnectionId, + follower_connection: ConnectionId, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + follower::ActiveModel { + room_id: ActiveValue::set(room_id), + project_id: ActiveValue::set(project_id), + leader_connection_server_id: ActiveValue::set(ServerId( + leader_connection.owner_id as i32, + )), + leader_connection_id: ActiveValue::set(leader_connection.id as i32), + follower_connection_server_id: ActiveValue::set(ServerId( + follower_connection.owner_id as i32, + )), + follower_connection_id: ActiveValue::set(follower_connection.id as i32), + ..Default::default() + } + .insert(&*tx) + .await?; + + let room = self.get_room(room_id, &tx).await?; + Ok(room) + }) + .await + } + + /// Removes the given follower connection as a follower of the given leader connection. + pub async fn unfollow( + &self, + room_id: RoomId, + project_id: ProjectId, + leader_connection: ConnectionId, + follower_connection: ConnectionId, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + follower::Entity::delete_many() + .filter( + Condition::all() + .add(follower::Column::RoomId.eq(room_id)) + .add(follower::Column::ProjectId.eq(project_id)) + .add( + follower::Column::LeaderConnectionServerId + .eq(leader_connection.owner_id), + ) + .add(follower::Column::LeaderConnectionId.eq(leader_connection.id)) + .add( + follower::Column::FollowerConnectionServerId + .eq(follower_connection.owner_id), + ) + .add(follower::Column::FollowerConnectionId.eq(follower_connection.id)), + ) + .exec(&*tx) + .await?; + + let room = self.get_room(room_id, &tx).await?; + Ok(room) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/rate_buckets.rs b/crates/collab/src/db/queries/rate_buckets.rs new file mode 100644 index 0000000..58b6217 --- /dev/null +++ b/crates/collab/src/db/queries/rate_buckets.rs @@ -0,0 +1,58 @@ +use super::*; +use crate::db::tables::rate_buckets; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; + +impl Database { + /// Saves the rate limit for the given user and rate limit name if the last_refill is later + /// than the currently saved timestamp. + pub async fn save_rate_buckets(&self, buckets: &[rate_buckets::Model]) -> Result<()> { + if buckets.is_empty() { + return Ok(()); + } + + self.transaction(|tx| async move { + rate_buckets::Entity::insert_many(buckets.iter().map(|bucket| { + rate_buckets::ActiveModel { + user_id: ActiveValue::Set(bucket.user_id), + rate_limit_name: ActiveValue::Set(bucket.rate_limit_name.clone()), + token_count: ActiveValue::Set(bucket.token_count), + last_refill: ActiveValue::Set(bucket.last_refill), + } + })) + .on_conflict( + OnConflict::columns([ + rate_buckets::Column::UserId, + rate_buckets::Column::RateLimitName, + ]) + .update_columns([ + rate_buckets::Column::TokenCount, + rate_buckets::Column::LastRefill, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + + Ok(()) + }) + .await + } + + /// Retrieves the rate limit for the given user and rate limit name. + pub async fn get_rate_bucket( + &self, + user_id: UserId, + rate_limit_name: &str, + ) -> Result> { + self.transaction(|tx| async move { + let rate_limit = rate_buckets::Entity::find() + .filter(rate_buckets::Column::UserId.eq(user_id)) + .filter(rate_buckets::Column::RateLimitName.eq(rate_limit_name)) + .one(&*tx) + .await?; + + Ok(rate_limit) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/remote_projects.rs b/crates/collab/src/db/queries/remote_projects.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/crates/collab/src/db/queries/remote_projects.rs @@ -0,0 +1 @@ + diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs new file mode 100644 index 0000000..185bcf7 --- /dev/null +++ b/crates/collab/src/db/queries/rooms.rs @@ -0,0 +1,1371 @@ +use super::*; + +impl Database { + /// Clears all room participants in rooms attached to a stale server. + pub async fn clear_stale_room_participants( + &self, + room_id: RoomId, + new_server_id: ServerId, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + let stale_participant_filter = Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add(room_participant::Column::AnsweringConnectionId.is_not_null()) + .add(room_participant::Column::AnsweringConnectionServerId.ne(new_server_id)); + + let stale_participant_user_ids = room_participant::Entity::find() + .filter(stale_participant_filter.clone()) + .all(&*tx) + .await? + .into_iter() + .map(|participant| participant.user_id) + .collect::>(); + + // Delete participants who failed to reconnect and cancel their calls. + let mut canceled_calls_to_user_ids = Vec::new(); + room_participant::Entity::delete_many() + .filter(stale_participant_filter) + .exec(&*tx) + .await?; + let called_participants = room_participant::Entity::find() + .filter( + Condition::all() + .add( + room_participant::Column::CallingUserId + .is_in(stale_participant_user_ids.iter().copied()), + ) + .add(room_participant::Column::AnsweringConnectionId.is_null()), + ) + .all(&*tx) + .await?; + room_participant::Entity::delete_many() + .filter( + room_participant::Column::Id + .is_in(called_participants.iter().map(|participant| participant.id)), + ) + .exec(&*tx) + .await?; + canceled_calls_to_user_ids.extend( + called_participants + .into_iter() + .map(|participant| participant.user_id), + ); + + let (channel, room) = self.get_channel_room(room_id, &tx).await?; + if channel.is_none() { + // Delete the room if it becomes empty. + if room.participants.is_empty() { + project::Entity::delete_many() + .filter(project::Column::RoomId.eq(room_id)) + .exec(&*tx) + .await?; + room::Entity::delete_by_id(room_id).exec(&*tx).await?; + } + }; + + Ok(RefreshedRoom { + room, + channel, + stale_participant_user_ids, + canceled_calls_to_user_ids, + }) + }) + .await + } + + /// Returns the incoming calls for user with the given ID. + pub async fn incoming_call_for_user( + &self, + user_id: UserId, + ) -> Result> { + self.transaction(|tx| async move { + let pending_participant = room_participant::Entity::find() + .filter( + room_participant::Column::UserId + .eq(user_id) + .and(room_participant::Column::AnsweringConnectionId.is_null()), + ) + .one(&*tx) + .await?; + + if let Some(pending_participant) = pending_participant { + let room = self.get_room(pending_participant.room_id, &tx).await?; + Ok(Self::build_incoming_call(&room, user_id)) + } else { + Ok(None) + } + }) + .await + } + + /// Creates a new room. + pub async fn create_room( + &self, + user_id: UserId, + connection: ConnectionId, + live_kit_room: &str, + ) -> Result { + self.transaction(|tx| async move { + let room = room::ActiveModel { + live_kit_room: ActiveValue::set(live_kit_room.into()), + ..Default::default() + } + .insert(&*tx) + .await?; + room_participant::ActiveModel { + room_id: ActiveValue::set(room.id), + user_id: ActiveValue::set(user_id), + answering_connection_id: ActiveValue::set(Some(connection.id as i32)), + answering_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + answering_connection_lost: ActiveValue::set(false), + calling_user_id: ActiveValue::set(user_id), + calling_connection_id: ActiveValue::set(connection.id as i32), + calling_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + participant_index: ActiveValue::set(Some(0)), + role: ActiveValue::set(Some(ChannelRole::Admin)), + + id: ActiveValue::NotSet, + location_kind: ActiveValue::NotSet, + location_project_id: ActiveValue::NotSet, + initial_project_id: ActiveValue::NotSet, + } + .insert(&*tx) + .await?; + + let room = self.get_room(room.id, &tx).await?; + Ok(room) + }) + .await + } + + pub async fn call( + &self, + room_id: RoomId, + calling_user_id: UserId, + calling_connection: ConnectionId, + called_user_id: UserId, + initial_project_id: Option, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + let caller = room_participant::Entity::find() + .filter( + room_participant::Column::UserId + .eq(calling_user_id) + .and(room_participant::Column::RoomId.eq(room_id)), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("user is not in the room"))?; + + let called_user_role = match caller.role.unwrap_or(ChannelRole::Member) { + ChannelRole::Admin | ChannelRole::Member => ChannelRole::Member, + ChannelRole::Guest | ChannelRole::Talker => ChannelRole::Guest, + ChannelRole::Banned => return Err(anyhow!("banned users cannot invite").into()), + }; + + room_participant::ActiveModel { + room_id: ActiveValue::set(room_id), + user_id: ActiveValue::set(called_user_id), + answering_connection_lost: ActiveValue::set(false), + participant_index: ActiveValue::NotSet, + calling_user_id: ActiveValue::set(calling_user_id), + calling_connection_id: ActiveValue::set(calling_connection.id as i32), + calling_connection_server_id: ActiveValue::set(Some(ServerId( + calling_connection.owner_id as i32, + ))), + initial_project_id: ActiveValue::set(initial_project_id), + role: ActiveValue::set(Some(called_user_role)), + + id: ActiveValue::NotSet, + answering_connection_id: ActiveValue::NotSet, + answering_connection_server_id: ActiveValue::NotSet, + location_kind: ActiveValue::NotSet, + location_project_id: ActiveValue::NotSet, + } + .insert(&*tx) + .await?; + + let room = self.get_room(room_id, &tx).await?; + let incoming_call = Self::build_incoming_call(&room, called_user_id) + .ok_or_else(|| anyhow!("failed to build incoming call"))?; + Ok((room, incoming_call)) + }) + .await + } + + pub async fn call_failed( + &self, + room_id: RoomId, + called_user_id: UserId, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + room_participant::Entity::delete_many() + .filter( + room_participant::Column::RoomId + .eq(room_id) + .and(room_participant::Column::UserId.eq(called_user_id)), + ) + .exec(&*tx) + .await?; + let room = self.get_room(room_id, &tx).await?; + Ok(room) + }) + .await + } + + pub async fn decline_call( + &self, + expected_room_id: Option, + user_id: UserId, + ) -> Result>> { + self.optional_room_transaction(|tx| async move { + let mut filter = Condition::all() + .add(room_participant::Column::UserId.eq(user_id)) + .add(room_participant::Column::AnsweringConnectionId.is_null()); + if let Some(room_id) = expected_room_id { + filter = filter.add(room_participant::Column::RoomId.eq(room_id)); + } + let participant = room_participant::Entity::find() + .filter(filter) + .one(&*tx) + .await?; + + let participant = if let Some(participant) = participant { + participant + } else if expected_room_id.is_some() { + return Err(anyhow!("could not find call to decline"))?; + } else { + return Ok(None); + }; + + let room_id = participant.room_id; + room_participant::Entity::delete(participant.into_active_model()) + .exec(&*tx) + .await?; + + let room = self.get_room(room_id, &tx).await?; + Ok(Some((room_id, room))) + }) + .await + } + + pub async fn cancel_call( + &self, + room_id: RoomId, + calling_connection: ConnectionId, + called_user_id: UserId, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + let participant = room_participant::Entity::find() + .filter( + Condition::all() + .add(room_participant::Column::UserId.eq(called_user_id)) + .add(room_participant::Column::RoomId.eq(room_id)) + .add( + room_participant::Column::CallingConnectionId + .eq(calling_connection.id as i32), + ) + .add( + room_participant::Column::CallingConnectionServerId + .eq(calling_connection.owner_id as i32), + ) + .add(room_participant::Column::AnsweringConnectionId.is_null()), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no call to cancel"))?; + + room_participant::Entity::delete(participant.into_active_model()) + .exec(&*tx) + .await?; + + let room = self.get_room(room_id, &tx).await?; + Ok(room) + }) + .await + } + + pub async fn join_room( + &self, + room_id: RoomId, + user_id: UserId, + connection: ConnectionId, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryChannelId { + ChannelId, + } + + let channel_id: Option = room::Entity::find() + .select_only() + .column(room::Column::ChannelId) + .filter(room::Column::Id.eq(room_id)) + .into_values::<_, QueryChannelId>() + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such room"))?; + + if channel_id.is_some() { + Err(anyhow!("tried to join channel call directly"))? + } + + let participant_index = self + .get_next_participant_index_internal(room_id, &tx) + .await?; + + let result = room_participant::Entity::update_many() + .filter( + Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add(room_participant::Column::UserId.eq(user_id)) + .add(room_participant::Column::AnsweringConnectionId.is_null()), + ) + .set(room_participant::ActiveModel { + participant_index: ActiveValue::Set(Some(participant_index)), + answering_connection_id: ActiveValue::set(Some(connection.id as i32)), + answering_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + answering_connection_lost: ActiveValue::set(false), + ..Default::default() + }) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("room does not exist or was already joined"))?; + } + + let room = self.get_room(room_id, &tx).await?; + Ok(JoinRoom { + room, + channel: None, + }) + }) + .await + } + + pub async fn stale_room_connection(&self, user_id: UserId) -> Result> { + self.transaction(|tx| async move { + let participant = room_participant::Entity::find() + .filter(room_participant::Column::UserId.eq(user_id)) + .one(&*tx) + .await?; + Ok(participant.and_then(|p| p.answering_connection())) + }) + .await + } + + async fn get_next_participant_index_internal( + &self, + room_id: RoomId, + tx: &DatabaseTransaction, + ) -> Result { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryParticipantIndices { + ParticipantIndex, + } + let existing_participant_indices: Vec = room_participant::Entity::find() + .filter( + room_participant::Column::RoomId + .eq(room_id) + .and(room_participant::Column::ParticipantIndex.is_not_null()), + ) + .select_only() + .column(room_participant::Column::ParticipantIndex) + .into_values::<_, QueryParticipantIndices>() + .all(tx) + .await?; + + let mut participant_index = 0; + while existing_participant_indices.contains(&participant_index) { + participant_index += 1; + } + + Ok(participant_index) + } + + /// Returns the channel ID for the given room, if it has one. + pub async fn channel_id_for_room(&self, room_id: RoomId) -> Result> { + self.transaction(|tx| async move { + let room: Option = room::Entity::find() + .filter(room::Column::Id.eq(room_id)) + .one(&*tx) + .await?; + + Ok(room.and_then(|room| room.channel_id)) + }) + .await + } + + pub(crate) async fn join_channel_room_internal( + &self, + room_id: RoomId, + user_id: UserId, + connection: ConnectionId, + role: ChannelRole, + tx: &DatabaseTransaction, + ) -> Result { + let participant_index = self + .get_next_participant_index_internal(room_id, tx) + .await?; + + // If someone has been invited into the room, accept the invite instead of inserting + let result = room_participant::Entity::update_many() + .filter( + Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add(room_participant::Column::UserId.eq(user_id)) + .add(room_participant::Column::AnsweringConnectionId.is_null()), + ) + .set(room_participant::ActiveModel { + participant_index: ActiveValue::Set(Some(participant_index)), + answering_connection_id: ActiveValue::set(Some(connection.id as i32)), + answering_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + answering_connection_lost: ActiveValue::set(false), + ..Default::default() + }) + .exec(tx) + .await?; + + if result.rows_affected == 0 { + room_participant::Entity::insert(room_participant::ActiveModel { + room_id: ActiveValue::set(room_id), + user_id: ActiveValue::set(user_id), + answering_connection_id: ActiveValue::set(Some(connection.id as i32)), + answering_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + answering_connection_lost: ActiveValue::set(false), + calling_user_id: ActiveValue::set(user_id), + calling_connection_id: ActiveValue::set(connection.id as i32), + calling_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + participant_index: ActiveValue::Set(Some(participant_index)), + role: ActiveValue::set(Some(role)), + id: ActiveValue::NotSet, + location_kind: ActiveValue::NotSet, + location_project_id: ActiveValue::NotSet, + initial_project_id: ActiveValue::NotSet, + }) + .exec(tx) + .await?; + } + + let (channel, room) = self.get_channel_room(room_id, &tx).await?; + let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?; + Ok(JoinRoom { + room, + channel: Some(channel), + }) + } + + pub async fn rejoin_room( + &self, + rejoin_room: proto::RejoinRoom, + user_id: UserId, + connection: ConnectionId, + ) -> Result> { + let room_id = RoomId::from_proto(rejoin_room.id); + self.room_transaction(room_id, |tx| async { + let tx = tx; + let participant_update = room_participant::Entity::update_many() + .filter( + Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add(room_participant::Column::UserId.eq(user_id)) + .add(room_participant::Column::AnsweringConnectionId.is_not_null()), + ) + .set(room_participant::ActiveModel { + answering_connection_id: ActiveValue::set(Some(connection.id as i32)), + answering_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + answering_connection_lost: ActiveValue::set(false), + ..Default::default() + }) + .exec(&*tx) + .await?; + if participant_update.rows_affected == 0 { + return Err(anyhow!("room does not exist or was already joined"))?; + } + + let mut reshared_projects = Vec::new(); + for reshared_project in &rejoin_room.reshared_projects { + let project_id = ProjectId::from_proto(reshared_project.project_id); + let project = project::Entity::find_by_id(project_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("project does not exist"))?; + if project.host_user_id != Some(user_id) { + return Err(anyhow!("no such project"))?; + } + + let mut collaborators = project + .find_related(project_collaborator::Entity) + .all(&*tx) + .await?; + let host_ix = collaborators + .iter() + .position(|collaborator| { + collaborator.user_id == user_id && collaborator.is_host + }) + .ok_or_else(|| anyhow!("host not found among collaborators"))?; + let host = collaborators.swap_remove(host_ix); + let old_connection_id = host.connection(); + + project::Entity::update(project::ActiveModel { + host_connection_id: ActiveValue::set(Some(connection.id as i32)), + host_connection_server_id: ActiveValue::set(Some(ServerId( + connection.owner_id as i32, + ))), + ..project.into_active_model() + }) + .exec(&*tx) + .await?; + project_collaborator::Entity::update(project_collaborator::ActiveModel { + connection_id: ActiveValue::set(connection.id as i32), + connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)), + ..host.into_active_model() + }) + .exec(&*tx) + .await?; + + self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx) + .await?; + + reshared_projects.push(ResharedProject { + id: project_id, + old_connection_id, + collaborators: collaborators + .iter() + .map(|collaborator| ProjectCollaborator { + connection_id: collaborator.connection(), + user_id: collaborator.user_id, + replica_id: collaborator.replica_id, + is_host: collaborator.is_host, + }) + .collect(), + worktrees: reshared_project.worktrees.clone(), + }); + } + + project::Entity::delete_many() + .filter( + Condition::all() + .add(project::Column::RoomId.eq(room_id)) + .add(project::Column::HostUserId.eq(user_id)) + .add( + project::Column::Id + .is_not_in(reshared_projects.iter().map(|project| project.id)), + ), + ) + .exec(&*tx) + .await?; + + let mut rejoined_projects = Vec::new(); + for rejoined_project in &rejoin_room.rejoined_projects { + if let Some(rejoined_project) = self + .rejoin_project_internal(&tx, rejoined_project, user_id, connection) + .await? + { + rejoined_projects.push(rejoined_project); + } + } + + let (channel, room) = self.get_channel_room(room_id, &tx).await?; + + Ok(RejoinedRoom { + room, + channel, + rejoined_projects, + reshared_projects, + }) + }) + .await + } + + pub async fn rejoin_project_internal( + &self, + tx: &DatabaseTransaction, + rejoined_project: &proto::RejoinProject, + user_id: UserId, + connection: ConnectionId, + ) -> Result> { + let project_id = ProjectId::from_proto(rejoined_project.id); + let Some(project) = project::Entity::find_by_id(project_id).one(tx).await? else { + return Ok(None); + }; + + let mut worktrees = Vec::new(); + let db_worktrees = project.find_related(worktree::Entity).all(tx).await?; + for db_worktree in db_worktrees { + let mut worktree = RejoinedWorktree { + id: db_worktree.id as u64, + abs_path: db_worktree.abs_path, + root_name: db_worktree.root_name, + visible: db_worktree.visible, + updated_entries: Default::default(), + removed_entries: Default::default(), + updated_repositories: Default::default(), + removed_repositories: Default::default(), + diagnostic_summaries: Default::default(), + settings_files: Default::default(), + scan_id: db_worktree.scan_id as u64, + completed_scan_id: db_worktree.completed_scan_id as u64, + }; + + let rejoined_worktree = rejoined_project + .worktrees + .iter() + .find(|worktree| worktree.id == db_worktree.id as u64); + + // File entries + { + let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree { + worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id) + } else { + worktree_entry::Column::IsDeleted.eq(false) + }; + + let mut db_entries = worktree_entry::Entity::find() + .filter( + Condition::all() + .add(worktree_entry::Column::ProjectId.eq(project.id)) + .add(worktree_entry::Column::WorktreeId.eq(worktree.id)) + .add(entry_filter), + ) + .stream(tx) + .await?; + + while let Some(db_entry) = db_entries.next().await { + let db_entry = db_entry?; + if db_entry.is_deleted { + worktree.removed_entries.push(db_entry.id as u64); + } else { + worktree.updated_entries.push(proto::Entry { + id: db_entry.id as u64, + is_dir: db_entry.is_dir, + path: db_entry.path, + inode: db_entry.inode as u64, + mtime: Some(proto::Timestamp { + seconds: db_entry.mtime_seconds as u64, + nanos: db_entry.mtime_nanos as u32, + }), + is_symlink: db_entry.is_symlink, + is_ignored: db_entry.is_ignored, + is_external: db_entry.is_external, + git_status: db_entry.git_status.map(|status| status as i32), + }); + } + } + } + + // Repository Entries + { + let repository_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree { + worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id) + } else { + worktree_repository::Column::IsDeleted.eq(false) + }; + + let mut db_repositories = worktree_repository::Entity::find() + .filter( + Condition::all() + .add(worktree_repository::Column::ProjectId.eq(project.id)) + .add(worktree_repository::Column::WorktreeId.eq(worktree.id)) + .add(repository_entry_filter), + ) + .stream(tx) + .await?; + + while let Some(db_repository) = db_repositories.next().await { + let db_repository = db_repository?; + if db_repository.is_deleted { + worktree + .removed_repositories + .push(db_repository.work_directory_id as u64); + } else { + worktree.updated_repositories.push(proto::RepositoryEntry { + work_directory_id: db_repository.work_directory_id as u64, + branch: db_repository.branch, + }); + } + } + } + + worktrees.push(worktree); + } + + let language_servers = project + .find_related(language_server::Entity) + .all(tx) + .await? + .into_iter() + .map(|language_server| proto::LanguageServer { + id: language_server.id as u64, + name: language_server.name, + }) + .collect::>(); + + { + let mut db_settings_files = worktree_settings_file::Entity::find() + .filter(worktree_settings_file::Column::ProjectId.eq(project_id)) + .stream(tx) + .await?; + while let Some(db_settings_file) = db_settings_files.next().await { + let db_settings_file = db_settings_file?; + if let Some(worktree) = worktrees + .iter_mut() + .find(|w| w.id == db_settings_file.worktree_id as u64) + { + worktree.settings_files.push(WorktreeSettingsFile { + path: db_settings_file.path, + content: db_settings_file.content, + }); + } + } + } + + let mut collaborators = project + .find_related(project_collaborator::Entity) + .all(tx) + .await?; + let self_collaborator = if let Some(self_collaborator_ix) = collaborators + .iter() + .position(|collaborator| collaborator.user_id == user_id) + { + collaborators.swap_remove(self_collaborator_ix) + } else { + return Ok(None); + }; + let old_connection_id = self_collaborator.connection(); + project_collaborator::Entity::update(project_collaborator::ActiveModel { + connection_id: ActiveValue::set(connection.id as i32), + connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)), + ..self_collaborator.into_active_model() + }) + .exec(tx) + .await?; + + let collaborators = collaborators + .into_iter() + .map(|collaborator| ProjectCollaborator { + connection_id: collaborator.connection(), + user_id: collaborator.user_id, + replica_id: collaborator.replica_id, + is_host: collaborator.is_host, + }) + .collect::>(); + + return Ok(Some(RejoinedProject { + id: project_id, + old_connection_id, + collaborators, + worktrees, + language_servers, + })); + } + + pub async fn leave_room( + &self, + connection: ConnectionId, + ) -> Result>> { + self.optional_room_transaction(|tx| async move { + let leaving_participant = room_participant::Entity::find() + .filter( + Condition::all() + .add( + room_participant::Column::AnsweringConnectionId + .eq(connection.id as i32), + ) + .add( + room_participant::Column::AnsweringConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .one(&*tx) + .await?; + + if let Some(leaving_participant) = leaving_participant { + // Leave room. + let room_id = leaving_participant.room_id; + room_participant::Entity::delete_by_id(leaving_participant.id) + .exec(&*tx) + .await?; + + // Cancel pending calls initiated by the leaving user. + let called_participants = room_participant::Entity::find() + .filter( + Condition::all() + .add( + room_participant::Column::CallingUserId + .eq(leaving_participant.user_id), + ) + .add(room_participant::Column::AnsweringConnectionId.is_null()), + ) + .all(&*tx) + .await?; + room_participant::Entity::delete_many() + .filter( + room_participant::Column::Id + .is_in(called_participants.iter().map(|participant| participant.id)), + ) + .exec(&*tx) + .await?; + let canceled_calls_to_user_ids = called_participants + .into_iter() + .map(|participant| participant.user_id) + .collect(); + + // Detect left projects. + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryProjectIds { + ProjectId, + } + let project_ids: Vec = project_collaborator::Entity::find() + .select_only() + .column_as( + project_collaborator::Column::ProjectId, + QueryProjectIds::ProjectId, + ) + .filter( + Condition::all() + .add( + project_collaborator::Column::ConnectionId.eq(connection.id as i32), + ) + .add( + project_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .into_values::<_, QueryProjectIds>() + .all(&*tx) + .await?; + + // if any project in the room has a remote-project-id that belongs to a dev server that this user owns. + let dev_server_projects_for_user = self + .dev_server_project_ids_for_user(leaving_participant.user_id, &tx) + .await?; + + let dev_server_projects_to_unshare = project::Entity::find() + .filter( + Condition::all() + .add(project::Column::RoomId.eq(room_id)) + .add( + project::Column::DevServerProjectId + .is_in(dev_server_projects_for_user.clone()), + ), + ) + .all(&*tx) + .await? + .into_iter() + .map(|project| project.id) + .collect::>(); + let mut left_projects = HashMap::default(); + let mut collaborators = project_collaborator::Entity::find() + .filter(project_collaborator::Column::ProjectId.is_in(project_ids)) + .stream(&*tx) + .await?; + + while let Some(collaborator) = collaborators.next().await { + let collaborator = collaborator?; + let left_project = + left_projects + .entry(collaborator.project_id) + .or_insert(LeftProject { + id: collaborator.project_id, + connection_ids: Default::default(), + should_unshare: false, + }); + + let collaborator_connection_id = collaborator.connection(); + if collaborator_connection_id != connection { + left_project.connection_ids.push(collaborator_connection_id); + } + + if (collaborator.is_host && collaborator.connection() == connection) + || dev_server_projects_to_unshare.contains(&collaborator.project_id) + { + left_project.should_unshare = true; + } + } + drop(collaborators); + + // Leave projects. + project_collaborator::Entity::delete_many() + .filter( + Condition::all() + .add( + project_collaborator::Column::ConnectionId.eq(connection.id as i32), + ) + .add( + project_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(&*tx) + .await?; + + follower::Entity::delete_many() + .filter( + Condition::all() + .add(follower::Column::FollowerConnectionId.eq(connection.id as i32)), + ) + .exec(&*tx) + .await?; + + // Unshare projects. + project::Entity::delete_many() + .filter( + Condition::all() + .add(project::Column::RoomId.eq(room_id)) + .add(project::Column::HostConnectionId.eq(connection.id as i32)) + .add( + project::Column::HostConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(&*tx) + .await?; + + if !dev_server_projects_to_unshare.is_empty() { + project::Entity::update_many() + .filter(project::Column::Id.is_in(dev_server_projects_to_unshare)) + .set(project::ActiveModel { + room_id: ActiveValue::Set(None), + ..Default::default() + }) + .exec(&*tx) + .await?; + } + + let (channel, room) = self.get_channel_room(room_id, &tx).await?; + let deleted = if room.participants.is_empty() { + let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?; + result.rows_affected > 0 + } else { + false + }; + + let left_room = LeftRoom { + room, + channel, + left_projects, + canceled_calls_to_user_ids, + deleted, + }; + + if left_room.room.participants.is_empty() { + self.rooms.remove(&room_id); + } + + Ok(Some((room_id, left_room))) + } else { + Ok(None) + } + }) + .await + } + + /// Updates the location of a participant in the given room. + pub async fn update_room_participant_location( + &self, + room_id: RoomId, + connection: ConnectionId, + location: proto::ParticipantLocation, + ) -> Result> { + self.room_transaction(room_id, |tx| async { + let tx = tx; + let location_kind; + let location_project_id; + match location + .variant + .as_ref() + .ok_or_else(|| anyhow!("invalid location"))? + { + proto::participant_location::Variant::SharedProject(project) => { + location_kind = 0; + location_project_id = Some(ProjectId::from_proto(project.id)); + } + proto::participant_location::Variant::UnsharedProject(_) => { + location_kind = 1; + location_project_id = None; + } + proto::participant_location::Variant::External(_) => { + location_kind = 2; + location_project_id = None; + } + } + + let result = room_participant::Entity::update_many() + .filter( + Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add( + room_participant::Column::AnsweringConnectionId + .eq(connection.id as i32), + ) + .add( + room_participant::Column::AnsweringConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .set(room_participant::ActiveModel { + location_kind: ActiveValue::set(Some(location_kind)), + location_project_id: ActiveValue::set(location_project_id), + ..Default::default() + }) + .exec(&*tx) + .await?; + + if result.rows_affected == 1 { + let room = self.get_room(room_id, &tx).await?; + Ok(room) + } else { + Err(anyhow!("could not update room participant location"))? + } + }) + .await + } + + /// Sets the role of a participant in the given room. + pub async fn set_room_participant_role( + &self, + admin_id: UserId, + room_id: RoomId, + user_id: UserId, + role: ChannelRole, + ) -> Result> { + self.room_transaction(room_id, |tx| async move { + room_participant::Entity::find() + .filter( + Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add(room_participant::Column::UserId.eq(admin_id)) + .add(room_participant::Column::Role.eq(ChannelRole::Admin)), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("only admins can set participant role"))?; + + if role.requires_cla() { + self.check_user_has_signed_cla(user_id, room_id, &tx) + .await?; + } + + let result = room_participant::Entity::update_many() + .filter( + Condition::all() + .add(room_participant::Column::RoomId.eq(room_id)) + .add(room_participant::Column::UserId.eq(user_id)), + ) + .set(room_participant::ActiveModel { + role: ActiveValue::set(Some(role)), + ..Default::default() + }) + .exec(&*tx) + .await?; + + if result.rows_affected != 1 { + Err(anyhow!("could not update room participant role"))?; + } + self.get_room(room_id, &tx).await + }) + .await + } + + async fn check_user_has_signed_cla( + &self, + user_id: UserId, + room_id: RoomId, + tx: &DatabaseTransaction, + ) -> Result<()> { + let channel = room::Entity::find_by_id(room_id) + .one(tx) + .await? + .ok_or_else(|| anyhow!("could not find room"))? + .find_related(channel::Entity) + .one(tx) + .await?; + + if let Some(channel) = channel { + let requires_zed_cla = channel.requires_zed_cla + || channel::Entity::find() + .filter( + channel::Column::Id + .is_in(channel.ancestors()) + .and(channel::Column::RequiresZedCla.eq(true)), + ) + .count(tx) + .await? + > 0; + if requires_zed_cla { + if contributor::Entity::find() + .filter(contributor::Column::UserId.eq(user_id)) + .one(tx) + .await? + .is_none() + { + Err(anyhow!("user has not signed the Zed CLA"))?; + } + } + } + Ok(()) + } + + pub async fn connection_lost(&self, connection: ConnectionId) -> Result<()> { + self.transaction(|tx| async move { + self.room_connection_lost(connection, &tx).await?; + self.channel_buffer_connection_lost(connection, &tx).await?; + self.channel_chat_connection_lost(connection, &tx).await?; + Ok(()) + }) + .await + } + + pub async fn room_connection_lost( + &self, + connection: ConnectionId, + tx: &DatabaseTransaction, + ) -> Result<()> { + let participant = room_participant::Entity::find() + .filter( + Condition::all() + .add(room_participant::Column::AnsweringConnectionId.eq(connection.id as i32)) + .add( + room_participant::Column::AnsweringConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .one(tx) + .await?; + + if let Some(participant) = participant { + room_participant::Entity::update(room_participant::ActiveModel { + answering_connection_lost: ActiveValue::set(true), + ..participant.into_active_model() + }) + .exec(tx) + .await?; + } + Ok(()) + } + + fn build_incoming_call( + room: &proto::Room, + called_user_id: UserId, + ) -> Option { + let pending_participant = room + .pending_participants + .iter() + .find(|participant| participant.user_id == called_user_id.to_proto())?; + + Some(proto::IncomingCall { + room_id: room.id, + calling_user_id: pending_participant.calling_user_id, + participant_user_ids: room + .participants + .iter() + .map(|participant| participant.user_id) + .collect(), + initial_project: room.participants.iter().find_map(|participant| { + let initial_project_id = pending_participant.initial_project_id?; + participant + .projects + .iter() + .find(|project| project.id == initial_project_id) + .cloned() + }), + }) + } + + pub async fn get_room(&self, room_id: RoomId, tx: &DatabaseTransaction) -> Result { + let (_, room) = self.get_channel_room(room_id, tx).await?; + Ok(room) + } + + pub async fn room_connection_ids( + &self, + room_id: RoomId, + connection_id: ConnectionId, + ) -> Result>> { + self.room_transaction(room_id, |tx| async move { + let mut participants = room_participant::Entity::find() + .filter(room_participant::Column::RoomId.eq(room_id)) + .stream(&*tx) + .await?; + + let mut is_participant = false; + let mut connection_ids = HashSet::default(); + while let Some(participant) = participants.next().await { + let participant = participant?; + if let Some(answering_connection) = participant.answering_connection() { + if answering_connection == connection_id { + is_participant = true; + } else { + connection_ids.insert(answering_connection); + } + } + } + + if !is_participant { + Err(anyhow!("not a room participant"))?; + } + + Ok(connection_ids) + }) + .await + } + + async fn get_channel_room( + &self, + room_id: RoomId, + tx: &DatabaseTransaction, + ) -> Result<(Option, proto::Room)> { + let db_room = room::Entity::find_by_id(room_id) + .one(tx) + .await? + .ok_or_else(|| anyhow!("could not find room"))?; + + let mut db_participants = db_room + .find_related(room_participant::Entity) + .stream(tx) + .await?; + let mut participants = HashMap::default(); + let mut pending_participants = Vec::new(); + while let Some(db_participant) = db_participants.next().await { + let db_participant = db_participant?; + if let ( + Some(answering_connection_id), + Some(answering_connection_server_id), + Some(participant_index), + ) = ( + db_participant.answering_connection_id, + db_participant.answering_connection_server_id, + db_participant.participant_index, + ) { + let location = match ( + db_participant.location_kind, + db_participant.location_project_id, + ) { + (Some(0), Some(project_id)) => { + Some(proto::participant_location::Variant::SharedProject( + proto::participant_location::SharedProject { + id: project_id.to_proto(), + }, + )) + } + (Some(1), _) => Some(proto::participant_location::Variant::UnsharedProject( + Default::default(), + )), + _ => Some(proto::participant_location::Variant::External( + Default::default(), + )), + }; + + let answering_connection = ConnectionId { + owner_id: answering_connection_server_id.0 as u32, + id: answering_connection_id as u32, + }; + participants.insert( + answering_connection, + proto::Participant { + user_id: db_participant.user_id.to_proto(), + peer_id: Some(answering_connection.into()), + projects: Default::default(), + location: Some(proto::ParticipantLocation { variant: location }), + participant_index: participant_index as u32, + role: db_participant.role.unwrap_or(ChannelRole::Member).into(), + }, + ); + } else { + pending_participants.push(proto::PendingParticipant { + user_id: db_participant.user_id.to_proto(), + calling_user_id: db_participant.calling_user_id.to_proto(), + initial_project_id: db_participant.initial_project_id.map(|id| id.to_proto()), + }); + } + } + drop(db_participants); + + let db_projects = db_room + .find_related(project::Entity) + .find_with_related(worktree::Entity) + .all(tx) + .await?; + + for (db_project, db_worktrees) in db_projects { + let host_connection = db_project.host_connection()?; + if let Some(participant) = participants.get_mut(&host_connection) { + participant.projects.push(proto::ParticipantProject { + id: db_project.id.to_proto(), + worktree_root_names: Default::default(), + }); + let project = participant.projects.last_mut().unwrap(); + + for db_worktree in db_worktrees { + if db_worktree.visible { + project.worktree_root_names.push(db_worktree.root_name); + } + } + } else if let Some(dev_server_project_id) = db_project.dev_server_project_id { + let host = self + .owner_for_dev_server_project(dev_server_project_id, tx) + .await?; + if let Some((_, participant)) = participants + .iter_mut() + .find(|(_, v)| v.user_id == host.to_proto()) + { + participant.projects.push(proto::ParticipantProject { + id: db_project.id.to_proto(), + worktree_root_names: Default::default(), + }); + let project = participant.projects.last_mut().unwrap(); + + for db_worktree in db_worktrees { + if db_worktree.visible { + project.worktree_root_names.push(db_worktree.root_name); + } + } + } + } + } + + let mut db_followers = db_room.find_related(follower::Entity).stream(tx).await?; + let mut followers = Vec::new(); + while let Some(db_follower) = db_followers.next().await { + let db_follower = db_follower?; + followers.push(proto::Follower { + leader_id: Some(db_follower.leader_connection().into()), + follower_id: Some(db_follower.follower_connection().into()), + project_id: db_follower.project_id.to_proto(), + }); + } + drop(db_followers); + + let channel = if let Some(channel_id) = db_room.channel_id { + Some(self.get_channel_internal(channel_id, tx).await?) + } else { + None + }; + + Ok(( + channel, + proto::Room { + id: db_room.id.to_proto(), + live_kit_room: db_room.live_kit_room, + participants: participants.into_values().collect(), + pending_participants, + followers, + }, + )) + } +} diff --git a/crates/collab/src/db/queries/servers.rs b/crates/collab/src/db/queries/servers.rs new file mode 100644 index 0000000..f4e01be --- /dev/null +++ b/crates/collab/src/db/queries/servers.rs @@ -0,0 +1,105 @@ +use super::*; + +impl Database { + /// Creates a new server in the given environment. + pub async fn create_server(&self, environment: &str) -> Result { + self.transaction(|tx| async move { + let server = server::ActiveModel { + environment: ActiveValue::set(environment.into()), + ..Default::default() + } + .insert(&*tx) + .await?; + Ok(server.id) + }) + .await + } + + /// Returns the IDs of resources associated with stale servers. + /// + /// A server is stale if it is in the specified `environment` and does not + /// match the provided `new_server_id`. + pub async fn stale_server_resource_ids( + &self, + environment: &str, + new_server_id: ServerId, + ) -> Result<(Vec, Vec)> { + self.transaction(|tx| async move { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryRoomIds { + RoomId, + } + + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryChannelIds { + ChannelId, + } + + let stale_server_epochs = self + .stale_server_ids(environment, new_server_id, &tx) + .await?; + let room_ids = room_participant::Entity::find() + .select_only() + .column(room_participant::Column::RoomId) + .distinct() + .filter( + room_participant::Column::AnsweringConnectionServerId + .is_in(stale_server_epochs.iter().copied()), + ) + .into_values::<_, QueryRoomIds>() + .all(&*tx) + .await?; + let channel_ids = channel_buffer_collaborator::Entity::find() + .select_only() + .column(channel_buffer_collaborator::Column::ChannelId) + .distinct() + .filter( + channel_buffer_collaborator::Column::ConnectionServerId + .is_in(stale_server_epochs.iter().copied()), + ) + .into_values::<_, QueryChannelIds>() + .all(&*tx) + .await?; + + Ok((room_ids, channel_ids)) + }) + .await + } + + /// Deletes any stale servers in the environment that don't match the `new_server_id`. + pub async fn delete_stale_servers( + &self, + environment: &str, + new_server_id: ServerId, + ) -> Result<()> { + self.transaction(|tx| async move { + server::Entity::delete_many() + .filter( + Condition::all() + .add(server::Column::Environment.eq(environment)) + .add(server::Column::Id.ne(new_server_id)), + ) + .exec(&*tx) + .await?; + Ok(()) + }) + .await + } + + async fn stale_server_ids( + &self, + environment: &str, + new_server_id: ServerId, + tx: &DatabaseTransaction, + ) -> Result> { + let stale_servers = server::Entity::find() + .filter( + Condition::all() + .add(server::Column::Environment.eq(environment)) + .add(server::Column::Id.ne(new_server_id)), + ) + .all(tx) + .await?; + Ok(stale_servers.into_iter().map(|server| server.id).collect()) + } +} diff --git a/crates/collab/src/db/queries/users.rs b/crates/collab/src/db/queries/users.rs new file mode 100644 index 0000000..ff5c4d5 --- /dev/null +++ b/crates/collab/src/db/queries/users.rs @@ -0,0 +1,312 @@ +use super::*; + +impl Database { + /// Creates a new user. + pub async fn create_user( + &self, + email_address: &str, + admin: bool, + params: NewUserParams, + ) -> Result { + self.transaction(|tx| async { + let tx = tx; + let user = user::Entity::insert(user::ActiveModel { + email_address: ActiveValue::set(Some(email_address.into())), + github_login: ActiveValue::set(params.github_login.clone()), + github_user_id: ActiveValue::set(Some(params.github_user_id)), + admin: ActiveValue::set(admin), + metrics_id: ActiveValue::set(Uuid::new_v4()), + ..Default::default() + }) + .on_conflict( + OnConflict::column(user::Column::GithubLogin) + .update_columns([ + user::Column::Admin, + user::Column::EmailAddress, + user::Column::GithubUserId, + ]) + .to_owned(), + ) + .exec_with_returning(&*tx) + .await?; + + Ok(NewUserResult { + user_id: user.id, + metrics_id: user.metrics_id.to_string(), + signup_device_id: None, + inviting_user_id: None, + }) + }) + .await + } + + /// Returns a user by ID. There are no access checks here, so this should only be used internally. + pub async fn get_user_by_id(&self, id: UserId) -> Result> { + self.transaction(|tx| async move { Ok(user::Entity::find_by_id(id).one(&*tx).await?) }) + .await + } + + /// Returns all users by ID. There are no access checks here, so this should only be used internally. + pub async fn get_users_by_ids(&self, ids: Vec) -> Result> { + if ids.len() >= 10000_usize { + return Err(anyhow!("too many users"))?; + } + self.transaction(|tx| async { + let tx = tx; + Ok(user::Entity::find() + .filter(user::Column::Id.is_in(ids.iter().copied())) + .all(&*tx) + .await?) + }) + .await + } + + /// Returns a user by GitHub login. There are no access checks here, so this should only be used internally. + pub async fn get_user_by_github_login(&self, github_login: &str) -> Result> { + self.transaction(|tx| async move { + Ok(user::Entity::find() + .filter(user::Column::GithubLogin.eq(github_login)) + .one(&*tx) + .await?) + }) + .await + } + + pub async fn get_or_create_user_by_github_account( + &self, + github_login: &str, + github_user_id: Option, + github_email: Option<&str>, + initial_channel_id: Option, + ) -> Result { + self.transaction(|tx| async move { + self.get_or_create_user_by_github_account_tx( + github_login, + github_user_id, + github_email, + initial_channel_id, + &tx, + ) + .await + }) + .await + } + + pub async fn get_or_create_user_by_github_account_tx( + &self, + github_login: &str, + github_user_id: Option, + github_email: Option<&str>, + initial_channel_id: Option, + tx: &DatabaseTransaction, + ) -> Result { + if let Some(github_user_id) = github_user_id { + if let Some(user_by_github_user_id) = user::Entity::find() + .filter(user::Column::GithubUserId.eq(github_user_id)) + .one(tx) + .await? + { + let mut user_by_github_user_id = user_by_github_user_id.into_active_model(); + user_by_github_user_id.github_login = ActiveValue::set(github_login.into()); + Ok(user_by_github_user_id.update(tx).await?) + } else if let Some(user_by_github_login) = user::Entity::find() + .filter(user::Column::GithubLogin.eq(github_login)) + .one(tx) + .await? + { + let mut user_by_github_login = user_by_github_login.into_active_model(); + user_by_github_login.github_user_id = ActiveValue::set(Some(github_user_id)); + Ok(user_by_github_login.update(tx).await?) + } else { + let user = user::Entity::insert(user::ActiveModel { + email_address: ActiveValue::set(github_email.map(|email| email.into())), + github_login: ActiveValue::set(github_login.into()), + github_user_id: ActiveValue::set(Some(github_user_id)), + admin: ActiveValue::set(false), + invite_count: ActiveValue::set(0), + invite_code: ActiveValue::set(None), + metrics_id: ActiveValue::set(Uuid::new_v4()), + ..Default::default() + }) + .exec_with_returning(tx) + .await?; + if let Some(channel_id) = initial_channel_id { + channel_member::Entity::insert(channel_member::ActiveModel { + id: ActiveValue::NotSet, + channel_id: ActiveValue::Set(channel_id), + user_id: ActiveValue::Set(user.id), + accepted: ActiveValue::Set(true), + role: ActiveValue::Set(ChannelRole::Guest), + }) + .exec(tx) + .await?; + } + Ok(user) + } + } else { + let user = user::Entity::find() + .filter(user::Column::GithubLogin.eq(github_login)) + .one(tx) + .await? + .ok_or_else(|| anyhow!("no such user {}", github_login))?; + Ok(user) + } + } + + /// get_all_users returns the next page of users. To get more call again with + /// the same limit and the page incremented by 1. + pub async fn get_all_users(&self, page: u32, limit: u32) -> Result> { + self.transaction(|tx| async move { + Ok(user::Entity::find() + .order_by_asc(user::Column::GithubLogin) + .limit(limit as u64) + .offset(page as u64 * limit as u64) + .all(&*tx) + .await?) + }) + .await + } + + /// Returns the metrics id for the user. + pub async fn get_user_metrics_id(&self, id: UserId) -> Result { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryAs { + MetricsId, + } + + self.transaction(|tx| async move { + let metrics_id: Uuid = user::Entity::find_by_id(id) + .select_only() + .column(user::Column::MetricsId) + .into_values::<_, QueryAs>() + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("could not find user"))?; + Ok(metrics_id.to_string()) + }) + .await + } + + /// Sets "connected_once" on the user for analytics. + pub async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()> { + self.transaction(|tx| async move { + user::Entity::update_many() + .filter(user::Column::Id.eq(id)) + .set(user::ActiveModel { + connected_once: ActiveValue::set(connected_once), + ..Default::default() + }) + .exec(&*tx) + .await?; + Ok(()) + }) + .await + } + + /// hard delete the user. + pub async fn destroy_user(&self, id: UserId) -> Result<()> { + self.transaction(|tx| async move { + access_token::Entity::delete_many() + .filter(access_token::Column::UserId.eq(id)) + .exec(&*tx) + .await?; + user::Entity::delete_by_id(id).exec(&*tx).await?; + Ok(()) + }) + .await + } + + /// Find users where github_login ILIKE name_query. + pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result> { + self.transaction(|tx| async { + let tx = tx; + let like_string = Self::fuzzy_like_string(name_query); + let query = " + SELECT users.* + FROM users + WHERE github_login ILIKE $1 + ORDER BY github_login <-> $2 + LIMIT $3 + "; + + Ok(user::Entity::find() + .from_raw_sql(Statement::from_sql_and_values( + self.pool.get_database_backend(), + query, + vec![like_string.into(), name_query.into(), limit.into()], + )) + .all(&*tx) + .await?) + }) + .await + } + + /// fuzzy_like_string creates a string for matching in-order using fuzzy_search_users. + /// e.g. "cir" would become "%c%i%r%" + pub fn fuzzy_like_string(string: &str) -> String { + let mut result = String::with_capacity(string.len() * 2 + 1); + for c in string.chars() { + if c.is_alphanumeric() { + result.push('%'); + result.push(c); + } + } + result.push('%'); + result + } + + /// Creates a new feature flag. + pub async fn create_user_flag(&self, flag: &str) -> Result { + self.transaction(|tx| async move { + let flag = feature_flag::Entity::insert(feature_flag::ActiveModel { + flag: ActiveValue::set(flag.to_string()), + ..Default::default() + }) + .exec(&*tx) + .await? + .last_insert_id; + + Ok(flag) + }) + .await + } + + /// Add the given user to the feature flag + pub async fn add_user_flag(&self, user: UserId, flag: FlagId) -> Result<()> { + self.transaction(|tx| async move { + user_feature::Entity::insert(user_feature::ActiveModel { + user_id: ActiveValue::set(user), + feature_id: ActiveValue::set(flag), + }) + .exec(&*tx) + .await?; + + Ok(()) + }) + .await + } + + /// Returns the active flags for the user. + pub async fn get_user_flags(&self, user: UserId) -> Result> { + self.transaction(|tx| async move { + #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] + enum QueryAs { + Flag, + } + + let flags = user::Model { + id: user, + ..Default::default() + } + .find_linked(user::UserFlags) + .select_only() + .column(feature_flag::Column::Flag) + .into_values::<_, QueryAs>() + .all(&*tx) + .await?; + + Ok(flags) + }) + .await + } +} diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs new file mode 100644 index 0000000..b9a3c5d --- /dev/null +++ b/crates/collab/src/db/tables.rs @@ -0,0 +1,40 @@ +pub mod access_token; +pub mod buffer; +pub mod buffer_operation; +pub mod buffer_snapshot; +pub mod channel; +pub mod channel_buffer_collaborator; +pub mod channel_chat_participant; +pub mod channel_member; +pub mod channel_message; +pub mod channel_message_mention; +pub mod contact; +pub mod contributor; +pub mod dev_server; +pub mod dev_server_project; +pub mod embedding; +pub mod extension; +pub mod extension_version; +pub mod feature_flag; +pub mod follower; +pub mod hosted_project; +pub mod language_server; +pub mod notification; +pub mod notification_kind; +pub mod observed_buffer_edits; +pub mod observed_channel_messages; +pub mod project; +pub mod project_collaborator; +pub mod rate_buckets; +pub mod room; +pub mod room_participant; +pub mod server; +pub mod signup; +pub mod user; +pub mod user_feature; +pub mod worktree; +pub mod worktree_diagnostic_summary; +pub mod worktree_entry; +pub mod worktree_repository; +pub mod worktree_repository_statuses; +pub mod worktree_settings_file; diff --git a/crates/collab/src/db/tables/access_token.rs b/crates/collab/src/db/tables/access_token.rs new file mode 100644 index 0000000..22635fb --- /dev/null +++ b/crates/collab/src/db/tables/access_token.rs @@ -0,0 +1,30 @@ +use crate::db::{AccessTokenId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "access_tokens")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: AccessTokenId, + pub user_id: UserId, + pub impersonated_user_id: Option, + pub hash: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/buffer.rs b/crates/collab/src/db/tables/buffer.rs new file mode 100644 index 0000000..e35746a --- /dev/null +++ b/crates/collab/src/db/tables/buffer.rs @@ -0,0 +1,48 @@ +use crate::db::{BufferId, ChannelId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "buffers")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: BufferId, + pub epoch: i32, + pub channel_id: ChannelId, + pub latest_operation_epoch: Option, + pub latest_operation_lamport_timestamp: Option, + pub latest_operation_replica_id: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::buffer_operation::Entity")] + Operations, + #[sea_orm(has_many = "super::buffer_snapshot::Entity")] + Snapshots, + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Operations.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Snapshots.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/buffer_operation.rs b/crates/collab/src/db/tables/buffer_operation.rs new file mode 100644 index 0000000..37bd4be --- /dev/null +++ b/crates/collab/src/db/tables/buffer_operation.rs @@ -0,0 +1,34 @@ +use crate::db::BufferId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "buffer_operations")] +pub struct Model { + #[sea_orm(primary_key)] + pub buffer_id: BufferId, + #[sea_orm(primary_key)] + pub epoch: i32, + #[sea_orm(primary_key)] + pub lamport_timestamp: i32, + #[sea_orm(primary_key)] + pub replica_id: i32, + pub value: Vec, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::buffer::Entity", + from = "Column::BufferId", + to = "super::buffer::Column::Id" + )] + Buffer, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/buffer_snapshot.rs b/crates/collab/src/db/tables/buffer_snapshot.rs new file mode 100644 index 0000000..c9de665 --- /dev/null +++ b/crates/collab/src/db/tables/buffer_snapshot.rs @@ -0,0 +1,31 @@ +use crate::db::BufferId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "buffer_snapshots")] +pub struct Model { + #[sea_orm(primary_key)] + pub buffer_id: BufferId, + #[sea_orm(primary_key)] + pub epoch: i32, + pub text: String, + pub operation_serialization_version: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::buffer::Entity", + from = "Column::BufferId", + to = "super::buffer::Column::Id" + )] + Buffer, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/channel.rs b/crates/collab/src/db/tables/channel.rs new file mode 100644 index 0000000..7625e47 --- /dev/null +++ b/crates/collab/src/db/tables/channel.rs @@ -0,0 +1,92 @@ +use crate::db::{ChannelId, ChannelVisibility}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channels")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ChannelId, + pub name: String, + pub visibility: ChannelVisibility, + pub parent_path: String, + pub requires_zed_cla: bool, +} + +impl Model { + pub fn parent_id(&self) -> Option { + self.ancestors().last() + } + + pub fn is_root(&self) -> bool { + self.parent_path.is_empty() + } + + pub fn root_id(&self) -> ChannelId { + self.ancestors().next().unwrap_or(self.id) + } + + pub fn ancestors(&self) -> impl Iterator + '_ { + self.parent_path + .trim_end_matches('/') + .split('/') + .filter_map(|id| Some(ChannelId::from_proto(id.parse().ok()?))) + } + + pub fn ancestors_including_self(&self) -> impl Iterator + '_ { + self.ancestors().chain(Some(self.id)) + } + + pub fn path(&self) -> String { + format!("{}{}/", self.parent_path, self.id) + } + + pub fn descendant_path_filter(&self) -> String { + format!("{}{}/%", self.parent_path, self.id) + } +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_one = "super::room::Entity")] + Room, + #[sea_orm(has_one = "super::buffer::Entity")] + Buffer, + #[sea_orm(has_many = "super::channel_member::Entity")] + Member, + #[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")] + BufferCollaborators, + #[sea_orm(has_many = "super::channel_chat_participant::Entity")] + ChatParticipants, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Member.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::BufferCollaborators.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ChatParticipants.def() + } +} diff --git a/crates/collab/src/db/tables/channel_buffer_collaborator.rs b/crates/collab/src/db/tables/channel_buffer_collaborator.rs new file mode 100644 index 0000000..ac2637b --- /dev/null +++ b/crates/collab/src/db/tables/channel_buffer_collaborator.rs @@ -0,0 +1,43 @@ +use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId}; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channel_buffer_collaborators")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ChannelBufferCollaboratorId, + pub channel_id: ChannelId, + pub connection_id: i32, + pub connection_server_id: ServerId, + pub connection_lost: bool, + pub user_id: UserId, + pub replica_id: ReplicaId, +} + +impl Model { + pub fn connection(&self) -> ConnectionId { + ConnectionId { + owner_id: self.connection_server_id.0 as u32, + id: self.connection_id as u32, + } + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/channel_chat_participant.rs b/crates/collab/src/db/tables/channel_chat_participant.rs new file mode 100644 index 0000000..f3ef36c --- /dev/null +++ b/crates/collab/src/db/tables/channel_chat_participant.rs @@ -0,0 +1,41 @@ +use crate::db::{ChannelChatParticipantId, ChannelId, ServerId, UserId}; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channel_chat_participants")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ChannelChatParticipantId, + pub channel_id: ChannelId, + pub user_id: UserId, + pub connection_id: i32, + pub connection_server_id: ServerId, +} + +impl Model { + pub fn connection(&self) -> ConnectionId { + ConnectionId { + owner_id: self.connection_server_id.0 as u32, + id: self.connection_id as u32, + } + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/channel_member.rs b/crates/collab/src/db/tables/channel_member.rs new file mode 100644 index 0000000..5498a00 --- /dev/null +++ b/crates/collab/src/db/tables/channel_member.rs @@ -0,0 +1,59 @@ +use crate::db::{channel_member, ChannelId, ChannelMemberId, ChannelRole, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channel_members")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ChannelMemberId, + pub channel_id: ChannelId, + pub user_id: UserId, + pub accepted: bool, + pub role: ChannelRole, +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +#[derive(Debug)] +pub struct UserToChannel; + +impl Linked for UserToChannel { + type FromEntity = super::user::Entity; + + type ToEntity = super::channel::Entity; + + fn link(&self) -> Vec { + vec![ + channel_member::Relation::User.def().rev(), + channel_member::Relation::Channel.def(), + ] + } +} diff --git a/crates/collab/src/db/tables/channel_message.rs b/crates/collab/src/db/tables/channel_message.rs new file mode 100644 index 0000000..2ec776f --- /dev/null +++ b/crates/collab/src/db/tables/channel_message.rs @@ -0,0 +1,47 @@ +use crate::db::{ChannelId, MessageId, UserId}; +use sea_orm::entity::prelude::*; +use time::PrimitiveDateTime; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channel_messages")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: MessageId, + pub channel_id: ChannelId, + pub sender_id: UserId, + pub body: String, + pub sent_at: PrimitiveDateTime, + pub edited_at: Option, + pub nonce: Uuid, + pub reply_to_message_id: Option, +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::SenderId", + to = "super::user::Column::Id" + )] + Sender, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Sender.def() + } +} diff --git a/crates/collab/src/db/tables/channel_message_mention.rs b/crates/collab/src/db/tables/channel_message_mention.rs new file mode 100644 index 0000000..6155b05 --- /dev/null +++ b/crates/collab/src/db/tables/channel_message_mention.rs @@ -0,0 +1,43 @@ +use crate::db::{MessageId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channel_message_mentions")] +pub struct Model { + #[sea_orm(primary_key)] + pub message_id: MessageId, + #[sea_orm(primary_key)] + pub start_offset: i32, + pub end_offset: i32, + pub user_id: UserId, +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::channel_message::Entity", + from = "Column::MessageId", + to = "super::channel_message::Column::Id" + )] + Message, + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + MentionedUser, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Message.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::MentionedUser.def() + } +} diff --git a/crates/collab/src/db/tables/contact.rs b/crates/collab/src/db/tables/contact.rs new file mode 100644 index 0000000..38af8b7 --- /dev/null +++ b/crates/collab/src/db/tables/contact.rs @@ -0,0 +1,32 @@ +use crate::db::{ContactId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "contacts")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ContactId, + pub user_id_a: UserId, + pub user_id_b: UserId, + pub a_to_b: bool, + pub should_notify: bool, + pub accepted: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room_participant::Entity", + from = "Column::UserIdA", + to = "super::room_participant::Column::UserId" + )] + UserARoomParticipant, + #[sea_orm( + belongs_to = "super::room_participant::Entity", + from = "Column::UserIdB", + to = "super::room_participant::Column::UserId" + )] + UserBRoomParticipant, +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/contributor.rs b/crates/collab/src/db/tables/contributor.rs new file mode 100644 index 0000000..3ae96a6 --- /dev/null +++ b/crates/collab/src/db/tables/contributor.rs @@ -0,0 +1,30 @@ +use crate::db::UserId; +use sea_orm::entity::prelude::*; +use serde::Serialize; + +/// A user who has signed the CLA. +#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)] +#[sea_orm(table_name = "contributors")] +pub struct Model { + #[sea_orm(primary_key)] + pub user_id: UserId, + pub signed_at: DateTime, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl ActiveModelBehavior for ActiveModel {} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} diff --git a/crates/collab/src/db/tables/dev_server.rs b/crates/collab/src/db/tables/dev_server.rs new file mode 100644 index 0000000..a9615ca --- /dev/null +++ b/crates/collab/src/db/tables/dev_server.rs @@ -0,0 +1,39 @@ +use crate::db::{DevServerId, UserId}; +use rpc::proto; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "dev_servers")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: DevServerId, + pub name: String, + pub user_id: UserId, + pub hashed_token: String, + pub ssh_connection_string: Option, +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::dev_server_project::Entity")] + RemoteProject, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::RemoteProject.def() + } +} + +impl Model { + pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer { + proto::DevServer { + dev_server_id: self.id.to_proto(), + name: self.name.clone(), + status: status as i32, + ssh_connection_string: self.ssh_connection_string.clone(), + } + } +} diff --git a/crates/collab/src/db/tables/dev_server_project.rs b/crates/collab/src/db/tables/dev_server_project.rs new file mode 100644 index 0000000..bf90d70 --- /dev/null +++ b/crates/collab/src/db/tables/dev_server_project.rs @@ -0,0 +1,50 @@ +use super::project; +use crate::db::{DevServerId, DevServerProjectId}; +use rpc::proto; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "dev_server_projects")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: DevServerProjectId, + pub dev_server_id: DevServerId, + pub path: String, +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_one = "super::project::Entity")] + Project, + #[sea_orm( + belongs_to = "super::dev_server::Entity", + from = "Column::DevServerId", + to = "super::dev_server::Column::Id" + )] + DevServer, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Project.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::DevServer.def() + } +} + +impl Model { + pub fn to_proto(&self, project: Option) -> proto::DevServerProject { + proto::DevServerProject { + id: self.id.to_proto(), + project_id: project.map(|p| p.id.to_proto()), + dev_server_id: self.dev_server_id.to_proto(), + path: self.path.clone(), + } + } +} diff --git a/crates/collab/src/db/tables/embedding.rs b/crates/collab/src/db/tables/embedding.rs new file mode 100644 index 0000000..8743b4b --- /dev/null +++ b/crates/collab/src/db/tables/embedding.rs @@ -0,0 +1,18 @@ +use sea_orm::entity::prelude::*; +use time::PrimitiveDateTime; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] +#[sea_orm(table_name = "embeddings")] +pub struct Model { + #[sea_orm(primary_key)] + pub model: String, + #[sea_orm(primary_key)] + pub digest: Vec, + pub dimensions: Vec, + pub retrieved_at: PrimitiveDateTime, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/extension.rs b/crates/collab/src/db/tables/extension.rs new file mode 100644 index 0000000..5a1462c --- /dev/null +++ b/crates/collab/src/db/tables/extension.rs @@ -0,0 +1,27 @@ +use crate::db::ExtensionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "extensions")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ExtensionId, + pub external_id: String, + pub name: String, + pub latest_version: String, + pub total_download_count: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_one = "super::extension_version::Entity")] + LatestVersion, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::LatestVersion.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/extension_version.rs b/crates/collab/src/db/tables/extension_version.rs new file mode 100644 index 0000000..60e3e5c --- /dev/null +++ b/crates/collab/src/db/tables/extension_version.rs @@ -0,0 +1,38 @@ +use crate::db::ExtensionId; +use sea_orm::entity::prelude::*; +use time::PrimitiveDateTime; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "extension_versions")] +pub struct Model { + #[sea_orm(primary_key)] + pub extension_id: ExtensionId, + #[sea_orm(primary_key)] + pub version: String, + pub published_at: PrimitiveDateTime, + pub authors: String, + pub repository: String, + pub description: String, + pub schema_version: i32, + pub wasm_api_version: Option, + pub download_count: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::extension::Entity", + from = "Column::ExtensionId", + to = "super::extension::Column::Id" + on_condition = r#"super::extension::Column::LatestVersion.into_expr().eq(Column::Version.into_expr())"# + )] + Extension, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Extension.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/feature_flag.rs b/crates/collab/src/db/tables/feature_flag.rs new file mode 100644 index 0000000..41c1451 --- /dev/null +++ b/crates/collab/src/db/tables/feature_flag.rs @@ -0,0 +1,40 @@ +use sea_orm::entity::prelude::*; + +use crate::db::FlagId; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "feature_flags")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: FlagId, + pub flag: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::user_feature::Entity")] + UserFeature, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::UserFeature.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} + +pub struct FlaggedUsers; + +impl Linked for FlaggedUsers { + type FromEntity = Entity; + + type ToEntity = super::user::Entity; + + fn link(&self) -> Vec { + vec![ + super::user_feature::Relation::Flag.def().rev(), + super::user_feature::Relation::User.def(), + ] + } +} diff --git a/crates/collab/src/db/tables/follower.rs b/crates/collab/src/db/tables/follower.rs new file mode 100644 index 0000000..ffd4543 --- /dev/null +++ b/crates/collab/src/db/tables/follower.rs @@ -0,0 +1,50 @@ +use crate::db::{FollowerId, ProjectId, RoomId, ServerId}; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "followers")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: FollowerId, + pub room_id: RoomId, + pub project_id: ProjectId, + pub leader_connection_server_id: ServerId, + pub leader_connection_id: i32, + pub follower_connection_server_id: ServerId, + pub follower_connection_id: i32, +} + +impl Model { + pub fn leader_connection(&self) -> ConnectionId { + ConnectionId { + owner_id: self.leader_connection_server_id.0 as u32, + id: self.leader_connection_id as u32, + } + } + + pub fn follower_connection(&self) -> ConnectionId { + ConnectionId { + owner_id: self.follower_connection_server_id.0 as u32, + id: self.follower_connection_id as u32, + } + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::RoomId", + to = "super::room::Column::Id" + )] + Room, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/hosted_project.rs b/crates/collab/src/db/tables/hosted_project.rs new file mode 100644 index 0000000..dd7cb1b --- /dev/null +++ b/crates/collab/src/db/tables/hosted_project.rs @@ -0,0 +1,27 @@ +use crate::db::{ChannelId, ChannelVisibility, HostedProjectId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "hosted_projects")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: HostedProjectId, + pub channel_id: ChannelId, + pub name: String, + pub visibility: ChannelVisibility, + pub deleted_at: Option, +} + +impl ActiveModelBehavior for ActiveModel {} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_one = "super::project::Entity")] + Project, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Project.def() + } +} diff --git a/crates/collab/src/db/tables/language_server.rs b/crates/collab/src/db/tables/language_server.rs new file mode 100644 index 0000000..9ff8c75 --- /dev/null +++ b/crates/collab/src/db/tables/language_server.rs @@ -0,0 +1,30 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "language_servers")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub id: i64, + pub name: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::project::Entity", + from = "Column::ProjectId", + to = "super::project::Column::Id" + )] + Project, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Project.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/notification.rs b/crates/collab/src/db/tables/notification.rs new file mode 100644 index 0000000..3105198 --- /dev/null +++ b/crates/collab/src/db/tables/notification.rs @@ -0,0 +1,29 @@ +use crate::db::{NotificationId, NotificationKindId, UserId}; +use sea_orm::entity::prelude::*; +use time::PrimitiveDateTime; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "notifications")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: NotificationId, + pub created_at: PrimitiveDateTime, + pub recipient_id: UserId, + pub kind: NotificationKindId, + pub entity_id: Option, + pub content: String, + pub is_read: bool, + pub response: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::RecipientId", + to = "super::user::Column::Id" + )] + Recipient, +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/notification_kind.rs b/crates/collab/src/db/tables/notification_kind.rs new file mode 100644 index 0000000..865b5da --- /dev/null +++ b/crates/collab/src/db/tables/notification_kind.rs @@ -0,0 +1,15 @@ +use crate::db::NotificationKindId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "notification_kinds")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: NotificationKindId, + pub name: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/observed_buffer_edits.rs b/crates/collab/src/db/tables/observed_buffer_edits.rs new file mode 100644 index 0000000..e8e7aaf --- /dev/null +++ b/crates/collab/src/db/tables/observed_buffer_edits.rs @@ -0,0 +1,43 @@ +use crate::db::{BufferId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "observed_buffer_edits")] +pub struct Model { + #[sea_orm(primary_key)] + pub user_id: UserId, + pub buffer_id: BufferId, + pub epoch: i32, + pub lamport_timestamp: i32, + pub replica_id: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::buffer::Entity", + from = "Column::BufferId", + to = "super::buffer::Column::Id" + )] + Buffer, + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/observed_channel_messages.rs b/crates/collab/src/db/tables/observed_channel_messages.rs new file mode 100644 index 0000000..18259f8 --- /dev/null +++ b/crates/collab/src/db/tables/observed_channel_messages.rs @@ -0,0 +1,41 @@ +use crate::db::{ChannelId, MessageId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "observed_channel_messages")] +pub struct Model { + #[sea_orm(primary_key)] + pub user_id: UserId, + pub channel_id: ChannelId, + pub channel_message_id: MessageId, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs new file mode 100644 index 0000000..6858af0 --- /dev/null +++ b/crates/collab/src/db/tables/project.rs @@ -0,0 +1,110 @@ +use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId}; +use anyhow::anyhow; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "projects")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ProjectId, + pub room_id: Option, + pub host_user_id: Option, + pub host_connection_id: Option, + pub host_connection_server_id: Option, + pub hosted_project_id: Option, + pub dev_server_project_id: Option, +} + +impl Model { + pub fn host_connection(&self) -> Result { + let host_connection_server_id = self + .host_connection_server_id + .ok_or_else(|| anyhow!("empty host_connection_server_id"))?; + let host_connection_id = self + .host_connection_id + .ok_or_else(|| anyhow!("empty host_connection_id"))?; + Ok(ConnectionId { + owner_id: host_connection_server_id.0 as u32, + id: host_connection_id as u32, + }) + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::HostUserId", + to = "super::user::Column::Id" + )] + HostUser, + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::RoomId", + to = "super::room::Column::Id" + )] + Room, + #[sea_orm(has_many = "super::worktree::Entity")] + Worktrees, + #[sea_orm(has_many = "super::project_collaborator::Entity")] + Collaborators, + #[sea_orm(has_many = "super::language_server::Entity")] + LanguageServers, + #[sea_orm( + belongs_to = "super::hosted_project::Entity", + from = "Column::HostedProjectId", + to = "super::hosted_project::Column::Id" + )] + HostedProject, + #[sea_orm( + belongs_to = "super::dev_server_project::Entity", + from = "Column::DevServerProjectId", + to = "super::dev_server_project::Column::Id" + )] + RemoteProject, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::HostUser.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Worktrees.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Collaborators.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::LanguageServers.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::HostedProject.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::RemoteProject.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/project_collaborator.rs b/crates/collab/src/db/tables/project_collaborator.rs new file mode 100644 index 0000000..ac57bef --- /dev/null +++ b/crates/collab/src/db/tables/project_collaborator.rs @@ -0,0 +1,43 @@ +use crate::db::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId}; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "project_collaborators")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ProjectCollaboratorId, + pub project_id: ProjectId, + pub connection_id: i32, + pub connection_server_id: ServerId, + pub user_id: UserId, + pub replica_id: ReplicaId, + pub is_host: bool, +} + +impl Model { + pub fn connection(&self) -> ConnectionId { + ConnectionId { + owner_id: self.connection_server_id.0 as u32, + id: self.connection_id as u32, + } + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::project::Entity", + from = "Column::ProjectId", + to = "super::project::Column::Id" + )] + Project, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Project.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/rate_buckets.rs b/crates/collab/src/db/tables/rate_buckets.rs new file mode 100644 index 0000000..e16db36 --- /dev/null +++ b/crates/collab/src/db/tables/rate_buckets.rs @@ -0,0 +1,31 @@ +use crate::db::UserId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "rate_buckets")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub user_id: UserId, + #[sea_orm(primary_key, auto_increment = false)] + pub rate_limit_name: String, + pub token_count: i32, + pub last_refill: DateTime, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/room.rs b/crates/collab/src/db/tables/room.rs new file mode 100644 index 0000000..f72f700 --- /dev/null +++ b/crates/collab/src/db/tables/room.rs @@ -0,0 +1,53 @@ +use crate::db::{ChannelId, RoomId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Default, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "rooms")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: RoomId, + pub live_kit_room: String, + pub channel_id: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::room_participant::Entity")] + RoomParticipant, + #[sea_orm(has_many = "super::project::Entity")] + Project, + #[sea_orm(has_many = "super::follower::Entity")] + Follower, + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::RoomParticipant.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Project.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Follower.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/room_participant.rs b/crates/collab/src/db/tables/room_participant.rs new file mode 100644 index 0000000..c562111 --- /dev/null +++ b/crates/collab/src/db/tables/room_participant.rs @@ -0,0 +1,62 @@ +use crate::db::{ChannelRole, ProjectId, RoomId, RoomParticipantId, ServerId, UserId}; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "room_participants")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: RoomParticipantId, + pub room_id: RoomId, + pub user_id: UserId, + pub answering_connection_id: Option, + pub answering_connection_server_id: Option, + pub answering_connection_lost: bool, + pub location_kind: Option, + pub location_project_id: Option, + pub initial_project_id: Option, + pub calling_user_id: UserId, + pub calling_connection_id: i32, + pub calling_connection_server_id: Option, + pub participant_index: Option, + pub role: Option, +} + +impl Model { + pub fn answering_connection(&self) -> Option { + Some(ConnectionId { + owner_id: self.answering_connection_server_id?.0 as u32, + id: self.answering_connection_id? as u32, + }) + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, + #[sea_orm( + belongs_to = "super::room::Entity", + from = "Column::RoomId", + to = "super::room::Column::Id" + )] + Room, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Room.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/server.rs b/crates/collab/src/db/tables/server.rs new file mode 100644 index 0000000..ea847bd --- /dev/null +++ b/crates/collab/src/db/tables/server.rs @@ -0,0 +1,15 @@ +use crate::db::ServerId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "servers")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ServerId, + pub environment: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/signup.rs b/crates/collab/src/db/tables/signup.rs new file mode 100644 index 0000000..79d9f05 --- /dev/null +++ b/crates/collab/src/db/tables/signup.rs @@ -0,0 +1,28 @@ +use crate::db::{SignupId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "signups")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: SignupId, + pub email_address: String, + pub email_confirmation_code: String, + pub email_confirmation_sent: bool, + pub created_at: DateTime, + pub device_id: Option, + pub user_id: Option, + pub inviting_user_id: Option, + pub platform_mac: bool, + pub platform_linux: bool, + pub platform_windows: bool, + pub platform_unknown: bool, + pub editor_features: Option>, + pub programming_languages: Option>, + pub added_to_mailing_list: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/user.rs b/crates/collab/src/db/tables/user.rs new file mode 100644 index 0000000..5c9166a --- /dev/null +++ b/crates/collab/src/db/tables/user.rs @@ -0,0 +1,84 @@ +use crate::db::UserId; +use sea_orm::entity::prelude::*; +use serde::Serialize; + +/// A user model. +#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)] +#[sea_orm(table_name = "users")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: UserId, + pub github_login: String, + pub github_user_id: Option, + pub email_address: Option, + pub admin: bool, + pub invite_code: Option, + pub invite_count: i32, + pub inviter_id: Option, + pub connected_once: bool, + pub metrics_id: Uuid, + pub created_at: DateTime, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::access_token::Entity")] + AccessToken, + #[sea_orm(has_one = "super::room_participant::Entity")] + RoomParticipant, + #[sea_orm(has_many = "super::project::Entity")] + HostedProjects, + #[sea_orm(has_many = "super::channel_member::Entity")] + ChannelMemberships, + #[sea_orm(has_many = "super::user_feature::Entity")] + UserFeatures, + #[sea_orm(has_one = "super::contributor::Entity")] + Contributor, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::AccessToken.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::RoomParticipant.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::HostedProjects.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::ChannelMemberships.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::UserFeatures.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} + +pub struct UserFlags; + +impl Linked for UserFlags { + type FromEntity = Entity; + + type ToEntity = super::feature_flag::Entity; + + fn link(&self) -> Vec { + vec![ + super::user_feature::Relation::User.def().rev(), + super::user_feature::Relation::Flag.def(), + ] + } +} diff --git a/crates/collab/src/db/tables/user_feature.rs b/crates/collab/src/db/tables/user_feature.rs new file mode 100644 index 0000000..cc24b5e --- /dev/null +++ b/crates/collab/src/db/tables/user_feature.rs @@ -0,0 +1,42 @@ +use sea_orm::entity::prelude::*; + +use crate::db::{FlagId, UserId}; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "user_features")] +pub struct Model { + #[sea_orm(primary_key)] + pub user_id: UserId, + #[sea_orm(primary_key)] + pub feature_id: FlagId, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::feature_flag::Entity", + from = "Column::FeatureId", + to = "super::feature_flag::Column::Id" + )] + Flag, + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Flag.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree.rs b/crates/collab/src/db/tables/worktree.rs new file mode 100644 index 0000000..46d9877 --- /dev/null +++ b/crates/collab/src/db/tables/worktree.rs @@ -0,0 +1,36 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktrees")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i64, + #[sea_orm(primary_key)] + pub project_id: ProjectId, + pub abs_path: String, + pub root_name: String, + pub visible: bool, + /// The last scan for which we've observed entries. It may be in progress. + pub scan_id: i64, + /// The last scan that fully completed. + pub completed_scan_id: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::project::Entity", + from = "Column::ProjectId", + to = "super::project::Column::Id" + )] + Project, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Project.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree_diagnostic_summary.rs b/crates/collab/src/db/tables/worktree_diagnostic_summary.rs new file mode 100644 index 0000000..5620ed2 --- /dev/null +++ b/crates/collab/src/db/tables/worktree_diagnostic_summary.rs @@ -0,0 +1,21 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktree_diagnostic_summaries")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub worktree_id: i64, + #[sea_orm(primary_key)] + pub path: String, + pub language_server_id: i64, + pub error_count: i32, + pub warning_count: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree_entry.rs b/crates/collab/src/db/tables/worktree_entry.rs new file mode 100644 index 0000000..81bf6e2 --- /dev/null +++ b/crates/collab/src/db/tables/worktree_entry.rs @@ -0,0 +1,29 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktree_entries")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub worktree_id: i64, + #[sea_orm(primary_key)] + pub id: i64, + pub is_dir: bool, + pub path: String, + pub inode: i64, + pub mtime_seconds: i64, + pub mtime_nanos: i32, + pub git_status: Option, + pub is_symlink: bool, + pub is_ignored: bool, + pub is_external: bool, + pub is_deleted: bool, + pub scan_id: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree_repository.rs b/crates/collab/src/db/tables/worktree_repository.rs new file mode 100644 index 0000000..6f86ff0 --- /dev/null +++ b/crates/collab/src/db/tables/worktree_repository.rs @@ -0,0 +1,21 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktree_repositories")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub worktree_id: i64, + #[sea_orm(primary_key)] + pub work_directory_id: i64, + pub scan_id: i64, + pub branch: Option, + pub is_deleted: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree_repository_statuses.rs b/crates/collab/src/db/tables/worktree_repository_statuses.rs new file mode 100644 index 0000000..cab0167 --- /dev/null +++ b/crates/collab/src/db/tables/worktree_repository_statuses.rs @@ -0,0 +1,23 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktree_repository_statuses")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub worktree_id: i64, + #[sea_orm(primary_key)] + pub work_directory_id: i64, + #[sea_orm(primary_key)] + pub repo_path: String, + pub status: i64, + pub scan_id: i64, + pub is_deleted: bool, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree_settings_file.rs b/crates/collab/src/db/tables/worktree_settings_file.rs new file mode 100644 index 0000000..92348c1 --- /dev/null +++ b/crates/collab/src/db/tables/worktree_settings_file.rs @@ -0,0 +1,19 @@ +use crate::db::ProjectId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "worktree_settings_files")] +pub struct Model { + #[sea_orm(primary_key)] + pub project_id: ProjectId, + #[sea_orm(primary_key)] + pub worktree_id: i64, + #[sea_orm(primary_key)] + pub path: String, + pub content: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs new file mode 100644 index 0000000..e3ce834 --- /dev/null +++ b/crates/collab/src/db/tests.rs @@ -0,0 +1,187 @@ +mod buffer_tests; +mod channel_tests; +mod contributor_tests; +mod db_tests; +mod embedding_tests; +mod extension_tests; +mod feature_flag_tests; +mod message_tests; + +use super::*; +use gpui::BackgroundExecutor; +use parking_lot::Mutex; +use sea_orm::ConnectionTrait; +use sqlx::migrate::MigrateDatabase; +use std::sync::{ + atomic::{AtomicI32, AtomicU32, Ordering::SeqCst}, + Arc, +}; + +pub struct TestDb { + pub db: Option>, + pub connection: Option, +} + +impl TestDb { + pub fn sqlite(background: BackgroundExecutor) -> Self { + let url = "sqlite::memory:"; + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_io() + .enable_time() + .build() + .unwrap(); + + let mut db = runtime.block_on(async { + let mut options = ConnectOptions::new(url); + options.max_connections(5); + let mut db = Database::new(options, Executor::Deterministic(background)) + .await + .unwrap(); + let sql = include_str!(concat!( + env!("CARGO_MANIFEST_DIR"), + "/migrations.sqlite/20221109000000_test_schema.sql" + )); + db.pool + .execute(sea_orm::Statement::from_string( + db.pool.get_database_backend(), + sql, + )) + .await + .unwrap(); + db.initialize_notification_kinds().await.unwrap(); + db + }); + + db.runtime = Some(runtime); + + Self { + db: Some(Arc::new(db)), + connection: None, + } + } + + pub fn postgres(background: BackgroundExecutor) -> Self { + static LOCK: Mutex<()> = Mutex::new(()); + + let _guard = LOCK.lock(); + let mut rng = StdRng::from_entropy(); + let url = format!( + "postgres://postgres@localhost/zed-test-{}", + rng.gen::() + ); + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_io() + .enable_time() + .build() + .unwrap(); + + let mut db = runtime.block_on(async { + sqlx::Postgres::create_database(&url) + .await + .expect("failed to create test db"); + let mut options = ConnectOptions::new(url); + options + .max_connections(5) + .idle_timeout(Duration::from_secs(0)); + let mut db = Database::new(options, Executor::Deterministic(background)) + .await + .unwrap(); + let migrations_path = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations"); + db.migrate(Path::new(migrations_path), false).await.unwrap(); + db.initialize_notification_kinds().await.unwrap(); + db + }); + + db.runtime = Some(runtime); + + Self { + db: Some(Arc::new(db)), + connection: None, + } + } + + pub fn db(&self) -> &Arc { + self.db.as_ref().unwrap() + } +} + +#[macro_export] +macro_rules! test_both_dbs { + ($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => { + #[gpui::test] + async fn $postgres_test_name(cx: &mut gpui::TestAppContext) { + let test_db = $crate::db::TestDb::postgres(cx.executor().clone()); + $test_name(test_db.db()).await; + } + + #[gpui::test] + async fn $sqlite_test_name(cx: &mut gpui::TestAppContext) { + let test_db = $crate::db::TestDb::sqlite(cx.executor().clone()); + $test_name(test_db.db()).await; + } + }; +} + +impl Drop for TestDb { + fn drop(&mut self) { + let db = self.db.take().unwrap(); + if let sea_orm::DatabaseBackend::Postgres = db.pool.get_database_backend() { + db.runtime.as_ref().unwrap().block_on(async { + use util::ResultExt; + let query = " + SELECT pg_terminate_backend(pg_stat_activity.pid) + FROM pg_stat_activity + WHERE + pg_stat_activity.datname = current_database() AND + pid <> pg_backend_pid(); + "; + db.pool + .execute(sea_orm::Statement::from_string( + db.pool.get_database_backend(), + query, + )) + .await + .log_err(); + sqlx::Postgres::drop_database(db.options.get_url()) + .await + .log_err(); + }) + } + } +} + +fn channel_tree(channels: &[(ChannelId, &[ChannelId], &'static str)]) -> Vec { + channels + .iter() + .map(|(id, parent_path, name)| Channel { + id: *id, + name: name.to_string(), + visibility: ChannelVisibility::Members, + parent_path: parent_path.to_vec(), + }) + .collect() +} + +static GITHUB_USER_ID: AtomicI32 = AtomicI32::new(5); + +async fn new_test_user(db: &Arc, email: &str) -> UserId { + db.create_user( + email, + false, + NewUserParams { + github_login: email[0..email.find('@').unwrap()].to_string(), + github_user_id: GITHUB_USER_ID.fetch_add(1, SeqCst), + }, + ) + .await + .unwrap() + .user_id +} + +static TEST_CONNECTION_ID: AtomicU32 = AtomicU32::new(1); +fn new_test_connection(server: ServerId) -> ConnectionId { + ConnectionId { + id: TEST_CONNECTION_ID.fetch_add(1, SeqCst), + owner_id: server.0 as u32, + } +} diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs new file mode 100644 index 0000000..7084612 --- /dev/null +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -0,0 +1,329 @@ +use super::*; +use crate::test_both_dbs; +use language::proto::{self, serialize_version}; +use text::Buffer; + +test_both_dbs!( + test_channel_buffers, + test_channel_buffers_postgres, + test_channel_buffers_sqlite +); + +async fn test_channel_buffers(db: &Arc) { + let a_id = db + .create_user( + "user_a@example.com", + false, + NewUserParams { + github_login: "user_a".into(), + github_user_id: 101, + }, + ) + .await + .unwrap() + .user_id; + let b_id = db + .create_user( + "user_b@example.com", + false, + NewUserParams { + github_login: "user_b".into(), + github_user_id: 102, + }, + ) + .await + .unwrap() + .user_id; + + // This user will not be a part of the channel + let c_id = db + .create_user( + "user_c@example.com", + false, + NewUserParams { + github_login: "user_c".into(), + github_user_id: 102, + }, + ) + .await + .unwrap() + .user_id; + + let owner_id = db.create_server("production").await.unwrap().0 as u32; + + let zed_id = db.create_root_channel("zed", a_id).await.unwrap(); + + db.invite_channel_member(zed_id, b_id, a_id, ChannelRole::Member) + .await + .unwrap(); + + db.respond_to_channel_invite(zed_id, b_id, true) + .await + .unwrap(); + + let connection_id_a = ConnectionId { owner_id, id: 1 }; + let _ = db + .join_channel_buffer(zed_id, a_id, connection_id_a) + .await + .unwrap(); + + let mut buffer_a = Buffer::new(0, text::BufferId::new(1).unwrap(), "".to_string()); + let operations = vec![ + buffer_a.edit([(0..0, "hello world")]), + buffer_a.edit([(5..5, ", cruel")]), + buffer_a.edit([(0..5, "goodbye")]), + buffer_a.undo().unwrap().1, + ]; + assert_eq!(buffer_a.text(), "hello, cruel world"); + + let operations = operations + .into_iter() + .map(|op| proto::serialize_operation(&language::Operation::Buffer(op))) + .collect::>(); + + db.update_channel_buffer(zed_id, a_id, &operations) + .await + .unwrap(); + + let connection_id_b = ConnectionId { owner_id, id: 2 }; + let buffer_response_b = db + .join_channel_buffer(zed_id, b_id, connection_id_b) + .await + .unwrap(); + + let mut buffer_b = Buffer::new( + 0, + text::BufferId::new(1).unwrap(), + buffer_response_b.base_text, + ); + buffer_b + .apply_ops(buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })) + .unwrap(); + + assert_eq!(buffer_b.text(), "hello, cruel world"); + + // Ensure that C fails to open the buffer + assert!(db + .join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 }) + .await + .is_err()); + + // Ensure that both collaborators have shown up + assert_eq!( + buffer_response_b.collaborators, + &[ + rpc::proto::Collaborator { + user_id: a_id.to_proto(), + peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }), + replica_id: 0, + }, + rpc::proto::Collaborator { + user_id: b_id.to_proto(), + peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }), + replica_id: 1, + } + ] + ); + + // Ensure that get_channel_buffer_collaborators works + let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap(); + assert_eq!(zed_collaborats, &[a_id, b_id]); + + let left_buffer = db + .leave_channel_buffer(zed_id, connection_id_b) + .await + .unwrap(); + + assert_eq!(left_buffer.connections, &[connection_id_a],); + + let cargo_id = db.create_root_channel("cargo", a_id).await.unwrap(); + let _ = db + .join_channel_buffer(cargo_id, a_id, connection_id_a) + .await + .unwrap(); + + db.leave_channel_buffers(connection_id_a).await.unwrap(); + + let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap(); + let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap(); + assert_eq!(zed_collaborators, &[]); + assert_eq!(cargo_collaborators, &[]); + + // When everyone has left the channel, the operations are collapsed into + // a new base text. + let buffer_response_b = db + .join_channel_buffer(zed_id, b_id, connection_id_b) + .await + .unwrap(); + assert_eq!(buffer_response_b.base_text, "hello, cruel world"); + assert_eq!(buffer_response_b.operations, &[]); +} + +test_both_dbs!( + test_channel_buffers_last_operations, + test_channel_buffers_last_operations_postgres, + test_channel_buffers_last_operations_sqlite +); + +async fn test_channel_buffers_last_operations(db: &Database) { + let user_id = db + .create_user( + "user_a@example.com", + false, + NewUserParams { + github_login: "user_a".into(), + github_user_id: 101, + }, + ) + .await + .unwrap() + .user_id; + let observer_id = db + .create_user( + "user_b@example.com", + false, + NewUserParams { + github_login: "user_b".into(), + github_user_id: 102, + }, + ) + .await + .unwrap() + .user_id; + let owner_id = db.create_server("production").await.unwrap().0 as u32; + let connection_id = ConnectionId { + owner_id, + id: user_id.0 as u32, + }; + + let mut buffers = Vec::new(); + let mut text_buffers = Vec::new(); + for i in 0..3 { + let channel = db + .create_root_channel(&format!("channel-{i}"), user_id) + .await + .unwrap(); + + db.invite_channel_member(channel, observer_id, user_id, ChannelRole::Member) + .await + .unwrap(); + db.respond_to_channel_invite(channel, observer_id, true) + .await + .unwrap(); + + db.join_channel_buffer(channel, user_id, connection_id) + .await + .unwrap(); + + buffers.push( + db.transaction(|tx| async move { db.get_channel_buffer(channel, &tx).await }) + .await + .unwrap(), + ); + + text_buffers.push(Buffer::new( + 0, + text::BufferId::new(1).unwrap(), + "".to_string(), + )); + } + + update_buffer( + buffers[0].channel_id, + user_id, + db, + vec![ + text_buffers[0].edit([(0..0, "a")]), + text_buffers[0].edit([(0..0, "b")]), + text_buffers[0].edit([(0..0, "c")]), + ], + ) + .await; + + update_buffer( + buffers[1].channel_id, + user_id, + db, + vec![ + text_buffers[1].edit([(0..0, "d")]), + text_buffers[1].edit([(1..1, "e")]), + text_buffers[1].edit([(2..2, "f")]), + ], + ) + .await; + + // cause buffer 1's epoch to increment. + db.leave_channel_buffer(buffers[1].channel_id, connection_id) + .await + .unwrap(); + db.join_channel_buffer(buffers[1].channel_id, user_id, connection_id) + .await + .unwrap(); + text_buffers[1] = Buffer::new(1, text::BufferId::new(1).unwrap(), "def".to_string()); + update_buffer( + buffers[1].channel_id, + user_id, + db, + vec![ + text_buffers[1].edit([(0..0, "g")]), + text_buffers[1].edit([(0..0, "h")]), + ], + ) + .await; + + update_buffer( + buffers[2].channel_id, + user_id, + db, + vec![text_buffers[2].edit([(0..0, "i")])], + ) + .await; + + let channels_for_user = db.get_channels_for_user(user_id).await.unwrap(); + + pretty_assertions::assert_eq!( + channels_for_user.latest_buffer_versions, + [ + rpc::proto::ChannelBufferVersion { + channel_id: buffers[0].channel_id.to_proto(), + epoch: 0, + version: serialize_version(&text_buffers[0].version()), + }, + rpc::proto::ChannelBufferVersion { + channel_id: buffers[1].channel_id.to_proto(), + epoch: 1, + version: serialize_version(&text_buffers[1].version()) + .into_iter() + .filter(|vector| vector.replica_id == text_buffers[1].replica_id() as u32) + .collect::>(), + }, + rpc::proto::ChannelBufferVersion { + channel_id: buffers[2].channel_id.to_proto(), + epoch: 0, + version: serialize_version(&text_buffers[2].version()), + }, + ] + ); +} + +async fn update_buffer( + channel_id: ChannelId, + user_id: UserId, + db: &Database, + operations: Vec, +) { + let operations = operations + .into_iter() + .map(|op| proto::serialize_operation(&language::Operation::Buffer(op))) + .collect::>(); + db.update_channel_buffer(channel_id, user_id, &operations) + .await + .unwrap(); +} diff --git a/crates/collab/src/db/tests/channel_tests.rs b/crates/collab/src/db/tests/channel_tests.rs new file mode 100644 index 0000000..4482549 --- /dev/null +++ b/crates/collab/src/db/tests/channel_tests.rs @@ -0,0 +1,745 @@ +use crate::{ + db::{ + tests::{channel_tree, new_test_connection, new_test_user}, + Channel, ChannelId, ChannelRole, Database, NewUserParams, RoomId, UserId, + }, + test_both_dbs, +}; +use rpc::{ + proto::{self}, + ConnectionId, +}; +use std::sync::Arc; + +test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite); + +async fn test_channels(db: &Arc) { + let a_id = new_test_user(db, "user1@example.com").await; + let b_id = new_test_user(db, "user2@example.com").await; + + let zed_id = db.create_root_channel("zed", a_id).await.unwrap(); + + // Make sure that people cannot read channels they haven't been invited to + assert!(db.get_channel(zed_id, b_id).await.is_err()); + + db.invite_channel_member(zed_id, b_id, a_id, ChannelRole::Member) + .await + .unwrap(); + + db.respond_to_channel_invite(zed_id, b_id, true) + .await + .unwrap(); + + let crdb_id = db.create_sub_channel("crdb", zed_id, a_id).await.unwrap(); + let livestreaming_id = db + .create_sub_channel("livestreaming", zed_id, a_id) + .await + .unwrap(); + let replace_id = db + .create_sub_channel("replace", zed_id, a_id) + .await + .unwrap(); + + let (members, _) = db + .get_channel_participant_details(replace_id, "", 10, a_id) + .await + .unwrap(); + let ids = members + .into_iter() + .map(|m| UserId::from_proto(m.user_id)) + .collect::>(); + assert_eq!(ids, &[a_id, b_id]); + + let rust_id = db.create_root_channel("rust", a_id).await.unwrap(); + let cargo_id = db.create_sub_channel("cargo", rust_id, a_id).await.unwrap(); + + let cargo_ra_id = db + .create_sub_channel("cargo-ra", cargo_id, a_id) + .await + .unwrap(); + + let result = db.get_channels_for_user(a_id).await.unwrap(); + assert_eq!( + result.channels, + channel_tree(&[ + (zed_id, &[], "zed"), + (crdb_id, &[zed_id], "crdb"), + (livestreaming_id, &[zed_id], "livestreaming",), + (replace_id, &[zed_id], "replace"), + (rust_id, &[], "rust"), + (cargo_id, &[rust_id], "cargo"), + (cargo_ra_id, &[rust_id, cargo_id], "cargo-ra",) + ],) + ); + + let result = db.get_channels_for_user(b_id).await.unwrap(); + assert_eq!( + result.channels, + channel_tree(&[ + (zed_id, &[], "zed"), + (crdb_id, &[zed_id], "crdb"), + (livestreaming_id, &[zed_id], "livestreaming",), + (replace_id, &[zed_id], "replace") + ],) + ); + + // Update member permissions + let set_subchannel_admin = db + .set_channel_member_role(crdb_id, a_id, b_id, ChannelRole::Admin) + .await; + assert!(set_subchannel_admin.is_err()); + let set_channel_admin = db + .set_channel_member_role(zed_id, a_id, b_id, ChannelRole::Admin) + .await; + assert!(set_channel_admin.is_ok()); + + let result = db.get_channels_for_user(b_id).await.unwrap(); + assert_eq!( + result.channels, + channel_tree(&[ + (zed_id, &[], "zed"), + (crdb_id, &[zed_id], "crdb"), + (livestreaming_id, &[zed_id], "livestreaming",), + (replace_id, &[zed_id], "replace") + ],) + ); + + // Remove a single channel + db.delete_channel(crdb_id, a_id).await.unwrap(); + assert!(db.get_channel(crdb_id, a_id).await.is_err()); + + // Remove a channel tree + let (_, mut channel_ids) = db.delete_channel(rust_id, a_id).await.unwrap(); + channel_ids.sort(); + assert_eq!(channel_ids, &[rust_id, cargo_id, cargo_ra_id]); + + assert!(db.get_channel(rust_id, a_id).await.is_err()); + assert!(db.get_channel(cargo_id, a_id).await.is_err()); + assert!(db.get_channel(cargo_ra_id, a_id).await.is_err()); +} + +test_both_dbs!( + test_joining_channels, + test_joining_channels_postgres, + test_joining_channels_sqlite +); + +async fn test_joining_channels(db: &Arc) { + let owner_id = db.create_server("test").await.unwrap().0 as u32; + + let user_1 = new_test_user(db, "user1@example.com").await; + let user_2 = new_test_user(db, "user2@example.com").await; + + let channel_1 = db.create_root_channel("channel_1", user_1).await.unwrap(); + + // can join a room with membership to its channel + let (joined_room, _, _) = db + .join_channel(channel_1, user_1, ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + assert_eq!(joined_room.room.participants.len(), 1); + + let room_id = RoomId::from_proto(joined_room.room.id); + drop(joined_room); + // cannot join a room without membership to its channel + assert!(db + .join_room(room_id, user_2, ConnectionId { owner_id, id: 1 },) + .await + .is_err()); +} + +test_both_dbs!( + test_channel_invites, + test_channel_invites_postgres, + test_channel_invites_sqlite +); + +async fn test_channel_invites(db: &Arc) { + db.create_server("test").await.unwrap(); + + let user_1 = new_test_user(db, "user1@example.com").await; + let user_2 = new_test_user(db, "user2@example.com").await; + let user_3 = new_test_user(db, "user3@example.com").await; + + let channel_1_1 = db.create_root_channel("channel_1", user_1).await.unwrap(); + + let channel_1_2 = db.create_root_channel("channel_2", user_1).await.unwrap(); + + db.invite_channel_member(channel_1_1, user_2, user_1, ChannelRole::Member) + .await + .unwrap(); + db.invite_channel_member(channel_1_2, user_2, user_1, ChannelRole::Member) + .await + .unwrap(); + db.invite_channel_member(channel_1_1, user_3, user_1, ChannelRole::Admin) + .await + .unwrap(); + + let user_2_invites = db + .get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2] + .await + .unwrap() + .into_iter() + .map(|channel| channel.id) + .collect::>(); + + assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]); + + let user_3_invites = db + .get_channel_invites_for_user(user_3) // -> [channel_1_1] + .await + .unwrap() + .into_iter() + .map(|channel| channel.id) + .collect::>(); + + assert_eq!(user_3_invites, &[channel_1_1]); + + let (mut members, _) = db + .get_channel_participant_details(channel_1_1, "", 100, user_1) + .await + .unwrap(); + + members.sort_by_key(|member| member.user_id); + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: user_1.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: user_2.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + role: proto::ChannelRole::Member.into(), + }, + proto::ChannelMember { + user_id: user_3.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + role: proto::ChannelRole::Admin.into(), + }, + ] + ); + + db.respond_to_channel_invite(channel_1_1, user_2, true) + .await + .unwrap(); + + let channel_1_3 = db + .create_sub_channel("channel_3", channel_1_1, user_1) + .await + .unwrap(); + + let (members, _) = db + .get_channel_participant_details(channel_1_3, "", 100, user_1) + .await + .unwrap(); + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: user_1.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: user_3.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: user_2.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Member.into(), + }, + ] + ); +} + +test_both_dbs!( + test_channel_renames, + test_channel_renames_postgres, + test_channel_renames_sqlite +); + +async fn test_channel_renames(db: &Arc) { + db.create_server("test").await.unwrap(); + + let user_1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + }, + ) + .await + .unwrap() + .user_id; + + let user_2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "user2".into(), + github_user_id: 6, + }, + ) + .await + .unwrap() + .user_id; + + let zed_id = db.create_root_channel("zed", user_1).await.unwrap(); + + db.rename_channel(zed_id, user_1, "#zed-archive") + .await + .unwrap(); + + let channel = db.get_channel(zed_id, user_1).await.unwrap(); + assert_eq!(channel.name, "zed-archive"); + + let non_permissioned_rename = db.rename_channel(zed_id, user_2, "hacked-lol").await; + assert!(non_permissioned_rename.is_err()); + + let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await; + assert!(bad_name_rename.is_err()) +} + +test_both_dbs!( + test_db_channel_moving, + test_channels_moving_postgres, + test_channels_moving_sqlite +); + +async fn test_db_channel_moving(db: &Arc) { + let a_id = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + }, + ) + .await + .unwrap() + .user_id; + + let zed_id = db.create_root_channel("zed", a_id).await.unwrap(); + + let crdb_id = db.create_sub_channel("crdb", zed_id, a_id).await.unwrap(); + + let gpui2_id = db.create_sub_channel("gpui2", zed_id, a_id).await.unwrap(); + + let livestreaming_id = db + .create_sub_channel("livestreaming", crdb_id, a_id) + .await + .unwrap(); + + let livestreaming_dag_id = db + .create_sub_channel("livestreaming_dag", livestreaming_id, a_id) + .await + .unwrap(); + + // ======================================================================== + // sanity check + // Initial DAG: + // /- gpui2 + // zed -- crdb - livestreaming - livestreaming_dag + let result = db.get_channels_for_user(a_id).await.unwrap(); + assert_channel_tree( + result.channels, + &[ + (zed_id, &[]), + (crdb_id, &[zed_id]), + (livestreaming_id, &[zed_id, crdb_id]), + (livestreaming_dag_id, &[zed_id, crdb_id, livestreaming_id]), + (gpui2_id, &[zed_id]), + ], + ); +} + +test_both_dbs!( + test_db_channel_moving_bugs, + test_db_channel_moving_bugs_postgres, + test_db_channel_moving_bugs_sqlite +); + +async fn test_db_channel_moving_bugs(db: &Arc) { + let user_id = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + }, + ) + .await + .unwrap() + .user_id; + + let zed_id = db.create_root_channel("zed", user_id).await.unwrap(); + + let projects_id = db + .create_sub_channel("projects", zed_id, user_id) + .await + .unwrap(); + + let livestreaming_id = db + .create_sub_channel("livestreaming", projects_id, user_id) + .await + .unwrap(); + + let result = db.get_channels_for_user(user_id).await.unwrap(); + assert_channel_tree( + result.channels, + &[ + (zed_id, &[]), + (projects_id, &[zed_id]), + (livestreaming_id, &[zed_id, projects_id]), + ], + ); + + // Can't move a channel into its ancestor + db.move_channel(projects_id, livestreaming_id, user_id) + .await + .unwrap_err(); + let result = db.get_channels_for_user(user_id).await.unwrap(); + assert_channel_tree( + result.channels, + &[ + (zed_id, &[]), + (projects_id, &[zed_id]), + (livestreaming_id, &[zed_id, projects_id]), + ], + ); +} + +test_both_dbs!( + test_user_is_channel_participant, + test_user_is_channel_participant_postgres, + test_user_is_channel_participant_sqlite +); + +async fn test_user_is_channel_participant(db: &Arc) { + let admin = new_test_user(db, "admin@example.com").await; + let member = new_test_user(db, "member@example.com").await; + let guest = new_test_user(db, "guest@example.com").await; + + let zed_channel = db.create_root_channel("zed", admin).await.unwrap(); + let internal_channel_id = db + .create_sub_channel("active", zed_channel, admin) + .await + .unwrap(); + let public_channel_id = db + .create_sub_channel("vim", zed_channel, admin) + .await + .unwrap(); + + db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin) + .await + .unwrap(); + db.set_channel_visibility( + public_channel_id, + crate::db::ChannelVisibility::Public, + admin, + ) + .await + .unwrap(); + db.invite_channel_member(zed_channel, member, admin, ChannelRole::Member) + .await + .unwrap(); + db.invite_channel_member(zed_channel, guest, admin, ChannelRole::Guest) + .await + .unwrap(); + + db.respond_to_channel_invite(zed_channel, member, true) + .await + .unwrap(); + + db.transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(public_channel_id, &tx).await?, + admin, + &tx, + ) + .await + }) + .await + .unwrap(); + db.transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(public_channel_id, &tx).await?, + member, + &tx, + ) + .await + }) + .await + .unwrap(); + + let (mut members, _) = db + .get_channel_participant_details(public_channel_id, "", 100, admin) + .await + .unwrap(); + + members.sort_by_key(|member| member.user_id); + + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: admin.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: member.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Member.into(), + }, + proto::ChannelMember { + user_id: guest.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + role: proto::ChannelRole::Guest.into(), + }, + ] + ); + + db.respond_to_channel_invite(zed_channel, guest, true) + .await + .unwrap(); + + db.transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(public_channel_id, &tx).await?, + guest, + &tx, + ) + .await + }) + .await + .unwrap(); + + let channels = db.get_channels_for_user(guest).await.unwrap().channels; + assert_channel_tree( + channels, + &[(zed_channel, &[]), (public_channel_id, &[zed_channel])], + ); + let channels = db.get_channels_for_user(member).await.unwrap().channels; + assert_channel_tree( + channels, + &[ + (zed_channel, &[]), + (internal_channel_id, &[zed_channel]), + (public_channel_id, &[zed_channel]), + ], + ); + + db.set_channel_member_role(zed_channel, admin, guest, ChannelRole::Banned) + .await + .unwrap(); + assert!(db + .transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(public_channel_id, &tx) + .await + .unwrap(), + guest, + &tx, + ) + .await + }) + .await + .is_err()); + + let (mut members, _) = db + .get_channel_participant_details(public_channel_id, "", 100, admin) + .await + .unwrap(); + + members.sort_by_key(|member| member.user_id); + + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: admin.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: member.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Member.into(), + }, + proto::ChannelMember { + user_id: guest.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Banned.into(), + }, + ] + ); + + db.remove_channel_member(zed_channel, guest, admin) + .await + .unwrap(); + + db.invite_channel_member(zed_channel, guest, admin, ChannelRole::Guest) + .await + .unwrap(); + + // currently people invited to parent channels are not shown here + let (mut members, _) = db + .get_channel_participant_details(public_channel_id, "", 100, admin) + .await + .unwrap(); + + members.sort_by_key(|member| member.user_id); + + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: admin.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: member.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Member.into(), + }, + proto::ChannelMember { + user_id: guest.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + role: proto::ChannelRole::Guest.into(), + }, + ] + ); + + db.respond_to_channel_invite(zed_channel, guest, true) + .await + .unwrap(); + + db.transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(zed_channel, &tx).await.unwrap(), + guest, + &tx, + ) + .await + }) + .await + .unwrap(); + assert!(db + .transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(internal_channel_id, &tx) + .await + .unwrap(), + guest, + &tx, + ) + .await + }) + .await + .is_err(),); + + db.transaction(|tx| async move { + db.check_user_is_channel_participant( + &db.get_channel_internal(public_channel_id, &tx) + .await + .unwrap(), + guest, + &tx, + ) + .await + }) + .await + .unwrap(); + + let (mut members, _) = db + .get_channel_participant_details(public_channel_id, "", 100, admin) + .await + .unwrap(); + + members.sort_by_key(|member| member.user_id); + + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: admin.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Admin.into(), + }, + proto::ChannelMember { + user_id: member.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Member.into(), + }, + proto::ChannelMember { + user_id: guest.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + role: proto::ChannelRole::Guest.into(), + }, + ] + ); + + let channels = db.get_channels_for_user(guest).await.unwrap().channels; + assert_channel_tree( + channels, + &[(zed_channel, &[]), (public_channel_id, &[zed_channel])], + ) +} + +test_both_dbs!( + test_guest_access, + test_guest_access_postgres, + test_guest_access_sqlite +); + +async fn test_guest_access(db: &Arc) { + let server = db.create_server("test").await.unwrap(); + + let admin = new_test_user(db, "admin@example.com").await; + let guest = new_test_user(db, "guest@example.com").await; + let guest_connection = new_test_connection(server); + + let zed_channel = db.create_root_channel("zed", admin).await.unwrap(); + db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin) + .await + .unwrap(); + + assert!(db + .join_channel_chat(zed_channel, guest_connection, guest) + .await + .is_err()); + + db.join_channel(zed_channel, guest, guest_connection) + .await + .unwrap(); + + assert!(db + .join_channel_chat(zed_channel, guest_connection, guest) + .await + .is_ok()) +} + +#[track_caller] +fn assert_channel_tree(actual: Vec, expected: &[(ChannelId, &[ChannelId])]) { + let actual = actual + .iter() + .map(|channel| (channel.id, channel.parent_path.as_slice())) + .collect::>(); + pretty_assertions::assert_eq!( + actual, + expected.to_vec(), + "wrong channel ids and parent paths" + ); +} diff --git a/crates/collab/src/db/tests/contributor_tests.rs b/crates/collab/src/db/tests/contributor_tests.rs new file mode 100644 index 0000000..72fa5f9 --- /dev/null +++ b/crates/collab/src/db/tests/contributor_tests.rs @@ -0,0 +1,40 @@ +use super::Database; +use crate::{db::NewUserParams, test_both_dbs}; +use std::sync::Arc; + +test_both_dbs!( + test_contributors, + test_contributors_postgres, + test_contributors_sqlite +); + +async fn test_contributors(db: &Arc) { + db.create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".to_string(), + github_user_id: 1, + }, + ) + .await + .unwrap(); + + assert_eq!(db.get_contributors().await.unwrap(), Vec::::new()); + + db.add_contributor("user1", Some(1), None, None) + .await + .unwrap(); + assert_eq!( + db.get_contributors().await.unwrap(), + vec!["user1".to_string()] + ); + + db.add_contributor("user2", Some(2), None, None) + .await + .unwrap(); + assert_eq!( + db.get_contributors().await.unwrap(), + vec!["user1".to_string(), "user2".to_string()] + ); +} diff --git a/crates/collab/src/db/tests/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs new file mode 100644 index 0000000..c78ba9e --- /dev/null +++ b/crates/collab/src/db/tests/db_tests.rs @@ -0,0 +1,612 @@ +use super::*; +use crate::test_both_dbs; +use gpui::TestAppContext; +use pretty_assertions::{assert_eq, assert_ne}; +use std::sync::Arc; +use tests::TestDb; + +test_both_dbs!( + test_get_users, + test_get_users_by_ids_postgres, + test_get_users_by_ids_sqlite +); + +async fn test_get_users(db: &Arc) { + let mut user_ids = Vec::new(); + let mut user_metric_ids = Vec::new(); + for i in 1..=4 { + let user = db + .create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + }, + ) + .await + .unwrap(); + user_ids.push(user.user_id); + user_metric_ids.push(user.metrics_id); + } + + assert_eq!( + db.get_users_by_ids(user_ids.clone()) + .await + .unwrap() + .into_iter() + .map(|user| ( + user.id, + user.github_login, + user.github_user_id, + user.email_address + )) + .collect::>(), + vec![ + ( + user_ids[0], + "user1".to_string(), + Some(1), + Some("user1@example.com".to_string()), + ), + ( + user_ids[1], + "user2".to_string(), + Some(2), + Some("user2@example.com".to_string()), + ), + ( + user_ids[2], + "user3".to_string(), + Some(3), + Some("user3@example.com".to_string()), + ), + ( + user_ids[3], + "user4".to_string(), + Some(4), + Some("user4@example.com".to_string()), + ) + ] + ); +} + +test_both_dbs!( + test_get_or_create_user_by_github_account, + test_get_or_create_user_by_github_account_postgres, + test_get_or_create_user_by_github_account_sqlite +); + +async fn test_get_or_create_user_by_github_account(db: &Arc) { + db.create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "login1".into(), + github_user_id: 101, + }, + ) + .await + .unwrap(); + let user_id2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "login2".into(), + github_user_id: 102, + }, + ) + .await + .unwrap() + .user_id; + + let user = db + .get_or_create_user_by_github_account("the-new-login2", Some(102), None, None) + .await + .unwrap(); + assert_eq!(user.id, user_id2); + assert_eq!(&user.github_login, "the-new-login2"); + assert_eq!(user.github_user_id, Some(102)); + + let user = db + .get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com"), None) + .await + .unwrap(); + assert_eq!(&user.github_login, "login3"); + assert_eq!(user.github_user_id, Some(103)); + assert_eq!(user.email_address, Some("user3@example.com".into())); +} + +test_both_dbs!( + test_create_access_tokens, + test_create_access_tokens_postgres, + test_create_access_tokens_sqlite +); + +async fn test_create_access_tokens(db: &Arc) { + let user_1 = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 1, + }, + ) + .await + .unwrap() + .user_id; + let user_2 = db + .create_user( + "u2@example.com", + false, + NewUserParams { + github_login: "u2".into(), + github_user_id: 2, + }, + ) + .await + .unwrap() + .user_id; + + let token_1 = db.create_access_token(user_1, None, "h1", 2).await.unwrap(); + let token_2 = db.create_access_token(user_1, None, "h2", 2).await.unwrap(); + assert_eq!( + db.get_access_token(token_1).await.unwrap(), + access_token::Model { + id: token_1, + user_id: user_1, + impersonated_user_id: None, + hash: "h1".into(), + } + ); + assert_eq!( + db.get_access_token(token_2).await.unwrap(), + access_token::Model { + id: token_2, + user_id: user_1, + impersonated_user_id: None, + hash: "h2".into() + } + ); + + let token_3 = db.create_access_token(user_1, None, "h3", 2).await.unwrap(); + assert_eq!( + db.get_access_token(token_3).await.unwrap(), + access_token::Model { + id: token_3, + user_id: user_1, + impersonated_user_id: None, + hash: "h3".into() + } + ); + assert_eq!( + db.get_access_token(token_2).await.unwrap(), + access_token::Model { + id: token_2, + user_id: user_1, + impersonated_user_id: None, + hash: "h2".into() + } + ); + assert!(db.get_access_token(token_1).await.is_err()); + + let token_4 = db.create_access_token(user_1, None, "h4", 2).await.unwrap(); + assert_eq!( + db.get_access_token(token_4).await.unwrap(), + access_token::Model { + id: token_4, + user_id: user_1, + impersonated_user_id: None, + hash: "h4".into() + } + ); + assert_eq!( + db.get_access_token(token_3).await.unwrap(), + access_token::Model { + id: token_3, + user_id: user_1, + impersonated_user_id: None, + hash: "h3".into() + } + ); + assert!(db.get_access_token(token_2).await.is_err()); + assert!(db.get_access_token(token_1).await.is_err()); + + // An access token for user 2 impersonating user 1 does not + // count against user 1's access token limit (of 2). + let token_5 = db + .create_access_token(user_2, Some(user_1), "h5", 2) + .await + .unwrap(); + assert_eq!( + db.get_access_token(token_5).await.unwrap(), + access_token::Model { + id: token_5, + user_id: user_2, + impersonated_user_id: Some(user_1), + hash: "h5".into() + } + ); + assert_eq!( + db.get_access_token(token_3).await.unwrap(), + access_token::Model { + id: token_3, + user_id: user_1, + impersonated_user_id: None, + hash: "h3".into() + } + ); + + // Only a limited number (2) of access tokens are stored for user 2 + // impersonating other users. + let token_6 = db + .create_access_token(user_2, Some(user_1), "h6", 2) + .await + .unwrap(); + let token_7 = db + .create_access_token(user_2, Some(user_1), "h7", 2) + .await + .unwrap(); + assert_eq!( + db.get_access_token(token_6).await.unwrap(), + access_token::Model { + id: token_6, + user_id: user_2, + impersonated_user_id: Some(user_1), + hash: "h6".into() + } + ); + assert_eq!( + db.get_access_token(token_7).await.unwrap(), + access_token::Model { + id: token_7, + user_id: user_2, + impersonated_user_id: Some(user_1), + hash: "h7".into() + } + ); + assert!(db.get_access_token(token_5).await.is_err()); + assert_eq!( + db.get_access_token(token_3).await.unwrap(), + access_token::Model { + id: token_3, + user_id: user_1, + impersonated_user_id: None, + hash: "h3".into() + } + ); +} + +test_both_dbs!( + test_add_contacts, + test_add_contacts_postgres, + test_add_contacts_sqlite +); + +async fn test_add_contacts(db: &Arc) { + let mut user_ids = Vec::new(); + for i in 0..3 { + user_ids.push( + db.create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + }, + ) + .await + .unwrap() + .user_id, + ); + } + + let user_1 = user_ids[0]; + let user_2 = user_ids[1]; + let user_3 = user_ids[2]; + + // User starts with no contacts + assert_eq!(db.get_contacts(user_1).await.unwrap(), &[]); + + // User requests a contact. Both users see the pending request. + db.send_contact_request(user_1, user_2).await.unwrap(); + assert!(!db.has_contact(user_1, user_2).await.unwrap()); + assert!(!db.has_contact(user_2, user_1).await.unwrap()); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[Contact::Outgoing { user_id: user_2 }], + ); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[Contact::Incoming { user_id: user_1 }] + ); + + // User 2 dismisses the contact request notification without accepting or rejecting. + // We shouldn't notify them again. + db.dismiss_contact_notification(user_1, user_2) + .await + .unwrap_err(); + db.dismiss_contact_notification(user_2, user_1) + .await + .unwrap(); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[Contact::Incoming { user_id: user_1 }] + ); + + // User can't accept their own contact request + db.respond_to_contact_request(user_1, user_2, true) + .await + .unwrap_err(); + + // User accepts a contact request. Both users see the contact. + db.respond_to_contact_request(user_2, user_1, true) + .await + .unwrap(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[Contact::Accepted { + user_id: user_2, + busy: false, + }], + ); + assert!(db.has_contact(user_1, user_2).await.unwrap()); + assert!(db.has_contact(user_2, user_1).await.unwrap()); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[Contact::Accepted { + user_id: user_1, + busy: false, + }] + ); + + // Users cannot re-request existing contacts. + db.send_contact_request(user_1, user_2).await.unwrap_err(); + db.send_contact_request(user_2, user_1).await.unwrap_err(); + + // Users can't dismiss notifications of them accepting other users' requests. + db.dismiss_contact_notification(user_2, user_1) + .await + .unwrap_err(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[Contact::Accepted { + user_id: user_2, + busy: false, + }] + ); + + // Users can dismiss notifications of other users accepting their requests. + db.dismiss_contact_notification(user_1, user_2) + .await + .unwrap(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[Contact::Accepted { + user_id: user_2, + busy: false, + }] + ); + + // Users send each other concurrent contact requests and + // see that they are immediately accepted. + db.send_contact_request(user_1, user_3).await.unwrap(); + db.send_contact_request(user_3, user_1).await.unwrap(); + assert_eq!( + db.get_contacts(user_1).await.unwrap(), + &[ + Contact::Accepted { + user_id: user_2, + busy: false, + }, + Contact::Accepted { + user_id: user_3, + busy: false, + } + ] + ); + assert_eq!( + db.get_contacts(user_3).await.unwrap(), + &[Contact::Accepted { + user_id: user_1, + busy: false, + }], + ); + + // User declines a contact request. Both users see that it is gone. + db.send_contact_request(user_2, user_3).await.unwrap(); + db.respond_to_contact_request(user_3, user_2, false) + .await + .unwrap(); + assert!(!db.has_contact(user_2, user_3).await.unwrap()); + assert!(!db.has_contact(user_3, user_2).await.unwrap()); + assert_eq!( + db.get_contacts(user_2).await.unwrap(), + &[Contact::Accepted { + user_id: user_1, + busy: false, + }] + ); + assert_eq!( + db.get_contacts(user_3).await.unwrap(), + &[Contact::Accepted { + user_id: user_1, + busy: false, + }], + ); +} + +test_both_dbs!( + test_metrics_id, + test_metrics_id_postgres, + test_metrics_id_sqlite +); + +async fn test_metrics_id(db: &Arc) { + let NewUserResult { + user_id: user1, + metrics_id: metrics_id1, + .. + } = db + .create_user( + "person1@example.com", + false, + NewUserParams { + github_login: "person1".into(), + github_user_id: 101, + }, + ) + .await + .unwrap(); + let NewUserResult { + user_id: user2, + metrics_id: metrics_id2, + .. + } = db + .create_user( + "person2@example.com", + false, + NewUserParams { + github_login: "person2".into(), + github_user_id: 102, + }, + ) + .await + .unwrap(); + + assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1); + assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2); + assert_eq!(metrics_id1.len(), 36); + assert_eq!(metrics_id2.len(), 36); + assert_ne!(metrics_id1, metrics_id2); +} + +test_both_dbs!( + test_project_count, + test_project_count_postgres, + test_project_count_sqlite +); + +async fn test_project_count(db: &Arc) { + let owner_id = db.create_server("test").await.unwrap().0 as u32; + + let user1 = db + .create_user( + "admin@example.com", + true, + NewUserParams { + github_login: "admin".into(), + github_user_id: 0, + }, + ) + .await + .unwrap(); + let user2 = db + .create_user( + "user@example.com", + false, + NewUserParams { + github_login: "user".into(), + github_user_id: 1, + }, + ) + .await + .unwrap(); + + let room_id = RoomId::from_proto( + db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "") + .await + .unwrap() + .id, + ); + db.call( + room_id, + user1.user_id, + ConnectionId { owner_id, id: 0 }, + user2.user_id, + None, + ) + .await + .unwrap(); + db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); + + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], None) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); + + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], None) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); + + // Projects shared by admins aren't counted. + db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], None) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); + + db.leave_room(ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); +} + +#[test] +fn test_fuzzy_like_string() { + assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%"); + assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%"); + assert_eq!(Database::fuzzy_like_string(" z "), "%z%"); +} + +#[gpui::test] +async fn test_fuzzy_search_users(cx: &mut TestAppContext) { + let test_db = TestDb::postgres(cx.executor()); + let db = test_db.db(); + for (i, github_login) in [ + "California", + "colorado", + "oregon", + "washington", + "florida", + "delaware", + "rhode-island", + ] + .into_iter() + .enumerate() + { + db.create_user( + &format!("{github_login}@example.com"), + false, + NewUserParams { + github_login: github_login.into(), + github_user_id: i as i32, + }, + ) + .await + .unwrap(); + } + + assert_eq!( + fuzzy_search_user_names(db, "clr").await, + &["colorado", "California"] + ); + assert_eq!( + fuzzy_search_user_names(db, "ro").await, + &["rhode-island", "colorado", "oregon"], + ); + + async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec { + db.fuzzy_search_users(query, 10) + .await + .unwrap() + .into_iter() + .map(|user| user.github_login) + .collect::>() + } +} diff --git a/crates/collab/src/db/tests/embedding_tests.rs b/crates/collab/src/db/tests/embedding_tests.rs new file mode 100644 index 0000000..fcafac6 --- /dev/null +++ b/crates/collab/src/db/tests/embedding_tests.rs @@ -0,0 +1,84 @@ +use super::TestDb; +use crate::db::embedding; +use collections::HashMap; +use sea_orm::{sea_query::Expr, ColumnTrait, EntityTrait, QueryFilter}; +use std::ops::Sub; +use time::{Duration, OffsetDateTime, PrimitiveDateTime}; + +// SQLite does not support array arguments, so we only test this against a real postgres instance +#[gpui::test] +async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) { + let test_db = TestDb::postgres(cx.executor().clone()); + let db = test_db.db(); + + let provider = "test_model"; + let digest1 = vec![1, 2, 3]; + let digest2 = vec![4, 5, 6]; + let embeddings = HashMap::from_iter([ + (digest1.clone(), vec![0.1, 0.2, 0.3]), + (digest2.clone(), vec![0.4, 0.5, 0.6]), + ]); + + // Save embeddings + db.save_embeddings(provider, &embeddings).await.unwrap(); + + // Retrieve embeddings + let retrieved_embeddings = db + .get_embeddings(provider, &[digest1.clone(), digest2.clone()]) + .await + .unwrap(); + assert_eq!(retrieved_embeddings.len(), 2); + assert!(retrieved_embeddings.contains_key(&digest1)); + assert!(retrieved_embeddings.contains_key(&digest2)); + + // Check if the retrieved embeddings are correct + assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]); + assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]); +} + +#[gpui::test] +async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) { + let test_db = TestDb::postgres(cx.executor().clone()); + let db = test_db.db(); + + let model = "test_model"; + let digest = vec![7, 8, 9]; + let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]); + + // Save old embeddings + db.save_embeddings(model, &embeddings).await.unwrap(); + + // Reach into the DB and change the retrieved at to be > 60 days + db.weak_transaction(|tx| { + let digest = digest.clone(); + async move { + let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61)); + let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time()); + + embedding::Entity::update_many() + .filter( + embedding::Column::Model + .eq(model) + .and(embedding::Column::Digest.eq(digest)), + ) + .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at)) + .exec(&*tx) + .await + .unwrap(); + + Ok(()) + } + }) + .await + .unwrap(); + + // Purge old embeddings + db.purge_old_embeddings().await.unwrap(); + + // Try to retrieve the purged embeddings + let retrieved_embeddings = db.get_embeddings(model, &[digest.clone()]).await.unwrap(); + assert!( + retrieved_embeddings.is_empty(), + "Old embeddings should have been purged" + ); +} diff --git a/crates/collab/src/db/tests/extension_tests.rs b/crates/collab/src/db/tests/extension_tests.rs new file mode 100644 index 0000000..b91570c --- /dev/null +++ b/crates/collab/src/db/tests/extension_tests.rs @@ -0,0 +1,386 @@ +use super::Database; +use crate::db::ExtensionVersionConstraints; +use crate::{ + db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion}, + test_both_dbs, +}; +use std::sync::Arc; + +test_both_dbs!( + test_extensions, + test_extensions_postgres, + test_extensions_sqlite +); + +async fn test_extensions(db: &Arc) { + let versions = db.get_known_extension_versions().await.unwrap(); + assert!(versions.is_empty()); + + let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + assert!(extensions.is_empty()); + + let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap(); + let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time()); + + let t0_chrono = convert_time_to_chrono(t0); + + db.insert_extension_versions( + &[ + ( + "ext1", + vec![ + NewExtensionVersion { + name: "Extension 1".into(), + version: semver::Version::parse("0.0.1").unwrap(), + description: "an extension".into(), + authors: vec!["max".into()], + repository: "ext1/repo".into(), + schema_version: 1, + wasm_api_version: None, + published_at: t0, + }, + NewExtensionVersion { + name: "Extension One".into(), + version: semver::Version::parse("0.0.2").unwrap(), + description: "a good extension".into(), + authors: vec!["max".into(), "marshall".into()], + repository: "ext1/repo".into(), + schema_version: 1, + wasm_api_version: None, + published_at: t0, + }, + ], + ), + ( + "ext2", + vec![NewExtensionVersion { + name: "Extension Two".into(), + version: semver::Version::parse("0.2.0").unwrap(), + description: "a great extension".into(), + authors: vec!["marshall".into()], + repository: "ext2/repo".into(), + schema_version: 0, + wasm_api_version: None, + published_at: t0, + }], + ), + ] + .into_iter() + .collect(), + ) + .await + .unwrap(); + + let versions = db.get_known_extension_versions().await.unwrap(); + assert_eq!( + versions, + [ + ("ext1".into(), vec!["0.0.1".into(), "0.0.2".into()]), + ("ext2".into(), vec!["0.2.0".into()]) + ] + .into_iter() + .collect() + ); + + // The latest version of each extension is returned. + let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + assert_eq!( + extensions, + &[ + ExtensionMetadata { + id: "ext1".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension One".into(), + version: "0.0.2".into(), + authors: vec!["max".into(), "marshall".into()], + description: Some("a good extension".into()), + repository: "ext1/repo".into(), + schema_version: Some(1), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 0, + }, + ExtensionMetadata { + id: "ext2".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension Two".into(), + version: "0.2.0".into(), + authors: vec!["marshall".into()], + description: Some("a great extension".into()), + repository: "ext2/repo".into(), + schema_version: Some(0), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 0 + }, + ] + ); + + // Extensions with too new of a schema version are excluded. + let extensions = db.get_extensions(None, 0, 5).await.unwrap(); + assert_eq!( + extensions, + &[ExtensionMetadata { + id: "ext2".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension Two".into(), + version: "0.2.0".into(), + authors: vec!["marshall".into()], + description: Some("a great extension".into()), + repository: "ext2/repo".into(), + schema_version: Some(0), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 0 + },] + ); + + // Record extensions being downloaded. + for _ in 0..7 { + assert!(db.record_extension_download("ext2", "0.0.2").await.unwrap()); + } + + for _ in 0..3 { + assert!(db.record_extension_download("ext1", "0.0.1").await.unwrap()); + } + + for _ in 0..2 { + assert!(db.record_extension_download("ext1", "0.0.2").await.unwrap()); + } + + // Record download returns false if the extension does not exist. + assert!(!db + .record_extension_download("no-such-extension", "0.0.2") + .await + .unwrap()); + + // Extensions are returned in descending order of total downloads. + let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + assert_eq!( + extensions, + &[ + ExtensionMetadata { + id: "ext2".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension Two".into(), + version: "0.2.0".into(), + authors: vec!["marshall".into()], + description: Some("a great extension".into()), + repository: "ext2/repo".into(), + schema_version: Some(0), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 7 + }, + ExtensionMetadata { + id: "ext1".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension One".into(), + version: "0.0.2".into(), + authors: vec!["max".into(), "marshall".into()], + description: Some("a good extension".into()), + repository: "ext1/repo".into(), + schema_version: Some(1), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 5, + }, + ] + ); + + // Add more extensions, including a new version of `ext1`, and backfilling + // an older version of `ext2`. + db.insert_extension_versions( + &[ + ( + "ext1", + vec![NewExtensionVersion { + name: "Extension One".into(), + version: semver::Version::parse("0.0.3").unwrap(), + description: "a real good extension".into(), + authors: vec!["max".into(), "marshall".into()], + repository: "ext1/repo".into(), + schema_version: 1, + wasm_api_version: None, + published_at: t0, + }], + ), + ( + "ext2", + vec![NewExtensionVersion { + name: "Extension Two".into(), + version: semver::Version::parse("0.1.0").unwrap(), + description: "an old extension".into(), + authors: vec!["marshall".into()], + repository: "ext2/repo".into(), + schema_version: 0, + wasm_api_version: None, + published_at: t0, + }], + ), + ] + .into_iter() + .collect(), + ) + .await + .unwrap(); + + let versions = db.get_known_extension_versions().await.unwrap(); + assert_eq!( + versions, + [ + ( + "ext1".into(), + vec!["0.0.1".into(), "0.0.2".into(), "0.0.3".into()] + ), + ("ext2".into(), vec!["0.1.0".into(), "0.2.0".into()]) + ] + .into_iter() + .collect() + ); + + let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + assert_eq!( + extensions, + &[ + ExtensionMetadata { + id: "ext2".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension Two".into(), + version: "0.2.0".into(), + authors: vec!["marshall".into()], + description: Some("a great extension".into()), + repository: "ext2/repo".into(), + schema_version: Some(0), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 7 + }, + ExtensionMetadata { + id: "ext1".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension One".into(), + version: "0.0.3".into(), + authors: vec!["max".into(), "marshall".into()], + description: Some("a real good extension".into()), + repository: "ext1/repo".into(), + schema_version: Some(1), + wasm_api_version: None, + }, + published_at: t0_chrono, + download_count: 5, + }, + ] + ); +} + +test_both_dbs!( + test_extensions_by_id, + test_extensions_by_id_postgres, + test_extensions_by_id_sqlite +); + +async fn test_extensions_by_id(db: &Arc) { + let versions = db.get_known_extension_versions().await.unwrap(); + assert!(versions.is_empty()); + + let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + assert!(extensions.is_empty()); + + let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap(); + let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time()); + + let t0_chrono = convert_time_to_chrono(t0); + + db.insert_extension_versions( + &[ + ( + "ext1", + vec![ + NewExtensionVersion { + name: "Extension 1".into(), + version: semver::Version::parse("0.0.1").unwrap(), + description: "an extension".into(), + authors: vec!["max".into()], + repository: "ext1/repo".into(), + schema_version: 1, + wasm_api_version: Some("0.0.4".into()), + published_at: t0, + }, + NewExtensionVersion { + name: "Extension 1".into(), + version: semver::Version::parse("0.0.2").unwrap(), + description: "a good extension".into(), + authors: vec!["max".into()], + repository: "ext1/repo".into(), + schema_version: 1, + wasm_api_version: Some("0.0.4".into()), + published_at: t0, + }, + NewExtensionVersion { + name: "Extension 1".into(), + version: semver::Version::parse("0.0.3").unwrap(), + description: "a real good extension".into(), + authors: vec!["max".into(), "marshall".into()], + repository: "ext1/repo".into(), + schema_version: 1, + wasm_api_version: Some("0.0.5".into()), + published_at: t0, + }, + ], + ), + ( + "ext2", + vec![NewExtensionVersion { + name: "Extension 2".into(), + version: semver::Version::parse("0.2.0").unwrap(), + description: "a great extension".into(), + authors: vec!["marshall".into()], + repository: "ext2/repo".into(), + schema_version: 0, + wasm_api_version: None, + published_at: t0, + }], + ), + ] + .into_iter() + .collect(), + ) + .await + .unwrap(); + + let extensions = db + .get_extensions_by_ids( + &["ext1"], + Some(&ExtensionVersionConstraints { + schema_versions: 1..=1, + wasm_api_versions: "0.0.1".parse().unwrap()..="0.0.4".parse().unwrap(), + }), + ) + .await + .unwrap(); + + assert_eq!( + extensions, + &[ExtensionMetadata { + id: "ext1".into(), + manifest: rpc::ExtensionApiManifest { + name: "Extension 1".into(), + version: "0.0.2".into(), + authors: vec!["max".into()], + description: Some("a good extension".into()), + repository: "ext1/repo".into(), + schema_version: Some(1), + wasm_api_version: Some("0.0.4".into()), + }, + published_at: t0_chrono, + download_count: 0, + }] + ); +} diff --git a/crates/collab/src/db/tests/feature_flag_tests.rs b/crates/collab/src/db/tests/feature_flag_tests.rs new file mode 100644 index 0000000..5269d53 --- /dev/null +++ b/crates/collab/src/db/tests/feature_flag_tests.rs @@ -0,0 +1,58 @@ +use crate::{ + db::{Database, NewUserParams}, + test_both_dbs, +}; +use std::sync::Arc; + +test_both_dbs!( + test_get_user_flags, + test_get_user_flags_postgres, + test_get_user_flags_sqlite +); + +async fn test_get_user_flags(db: &Arc) { + let user_1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".to_string(), + github_user_id: 1, + }, + ) + .await + .unwrap() + .user_id; + + let user_2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "user2".to_string(), + github_user_id: 2, + }, + ) + .await + .unwrap() + .user_id; + + const CHANNELS_ALPHA: &str = "channels-alpha"; + const NEW_SEARCH: &str = "new-search"; + + let channels_flag = db.create_user_flag(CHANNELS_ALPHA).await.unwrap(); + let search_flag = db.create_user_flag(NEW_SEARCH).await.unwrap(); + + db.add_user_flag(user_1, channels_flag).await.unwrap(); + db.add_user_flag(user_1, search_flag).await.unwrap(); + + db.add_user_flag(user_2, channels_flag).await.unwrap(); + + let mut user_1_flags = db.get_user_flags(user_1).await.unwrap(); + user_1_flags.sort(); + assert_eq!(user_1_flags, &[CHANNELS_ALPHA, NEW_SEARCH]); + + let mut user_2_flags = db.get_user_flags(user_2).await.unwrap(); + user_2_flags.sort(); + assert_eq!(user_2_flags, &[CHANNELS_ALPHA]); +} diff --git a/crates/collab/src/db/tests/message_tests.rs b/crates/collab/src/db/tests/message_tests.rs new file mode 100644 index 0000000..e20473d --- /dev/null +++ b/crates/collab/src/db/tests/message_tests.rs @@ -0,0 +1,421 @@ +use super::new_test_user; +use crate::{ + db::{ChannelRole, Database, MessageId}, + test_both_dbs, +}; +use channel::mentions_to_proto; +use std::sync::Arc; +use time::OffsetDateTime; + +test_both_dbs!( + test_channel_message_retrieval, + test_channel_message_retrieval_postgres, + test_channel_message_retrieval_sqlite +); + +async fn test_channel_message_retrieval(db: &Arc) { + let user = new_test_user(db, "user@example.com").await; + let channel = db.create_channel("channel", None, user).await.unwrap().0; + + let owner_id = db.create_server("test").await.unwrap().0 as u32; + db.join_channel_chat(channel.id, rpc::ConnectionId { owner_id, id: 0 }, user) + .await + .unwrap(); + + let mut all_messages = Vec::new(); + for i in 0..10 { + all_messages.push( + db.create_channel_message( + channel.id, + user, + &i.to_string(), + &[], + OffsetDateTime::now_utc(), + i, + None, + ) + .await + .unwrap() + .message_id + .to_proto(), + ); + } + + let messages = db + .get_channel_messages(channel.id, user, 3, None) + .await + .unwrap() + .into_iter() + .map(|message| message.id) + .collect::>(); + assert_eq!(messages, &all_messages[7..10]); + + let messages = db + .get_channel_messages( + channel.id, + user, + 4, + Some(MessageId::from_proto(all_messages[6])), + ) + .await + .unwrap() + .into_iter() + .map(|message| message.id) + .collect::>(); + assert_eq!(messages, &all_messages[2..6]); +} + +test_both_dbs!( + test_channel_message_nonces, + test_channel_message_nonces_postgres, + test_channel_message_nonces_sqlite +); + +async fn test_channel_message_nonces(db: &Arc) { + let user_a = new_test_user(db, "user_a@example.com").await; + let user_b = new_test_user(db, "user_b@example.com").await; + let user_c = new_test_user(db, "user_c@example.com").await; + let channel = db.create_root_channel("channel", user_a).await.unwrap(); + db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member) + .await + .unwrap(); + db.invite_channel_member(channel, user_c, user_a, ChannelRole::Member) + .await + .unwrap(); + db.respond_to_channel_invite(channel, user_b, true) + .await + .unwrap(); + db.respond_to_channel_invite(channel, user_c, true) + .await + .unwrap(); + + let owner_id = db.create_server("test").await.unwrap().0 as u32; + db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 0 }, user_a) + .await + .unwrap(); + db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 1 }, user_b) + .await + .unwrap(); + + // As user A, create messages that reuse the same nonces. The requests + // succeed, but return the same ids. + let id1 = db + .create_channel_message( + channel, + user_a, + "hi @user_b", + &mentions_to_proto(&[(3..10, user_b.to_proto())]), + OffsetDateTime::now_utc(), + 100, + None, + ) + .await + .unwrap() + .message_id; + let id2 = db + .create_channel_message( + channel, + user_a, + "hello, fellow users", + &mentions_to_proto(&[]), + OffsetDateTime::now_utc(), + 200, + None, + ) + .await + .unwrap() + .message_id; + let id3 = db + .create_channel_message( + channel, + user_a, + "bye @user_c (same nonce as first message)", + &mentions_to_proto(&[(4..11, user_c.to_proto())]), + OffsetDateTime::now_utc(), + 100, + None, + ) + .await + .unwrap() + .message_id; + let id4 = db + .create_channel_message( + channel, + user_a, + "omg (same nonce as second message)", + &mentions_to_proto(&[]), + OffsetDateTime::now_utc(), + 200, + None, + ) + .await + .unwrap() + .message_id; + + // As a different user, reuse one of the same nonces. This request succeeds + // and returns a different id. + let id5 = db + .create_channel_message( + channel, + user_b, + "omg @user_a (same nonce as user_a's first message)", + &mentions_to_proto(&[(4..11, user_a.to_proto())]), + OffsetDateTime::now_utc(), + 100, + None, + ) + .await + .unwrap() + .message_id; + + assert_ne!(id1, id2); + assert_eq!(id1, id3); + assert_eq!(id2, id4); + assert_ne!(id5, id1); + + let messages = db + .get_channel_messages(channel, user_a, 5, None) + .await + .unwrap() + .into_iter() + .map(|m| (m.id, m.body, m.mentions)) + .collect::>(); + assert_eq!( + messages, + &[ + ( + id1.to_proto(), + "hi @user_b".into(), + mentions_to_proto(&[(3..10, user_b.to_proto())]), + ), + ( + id2.to_proto(), + "hello, fellow users".into(), + mentions_to_proto(&[]) + ), + ( + id5.to_proto(), + "omg @user_a (same nonce as user_a's first message)".into(), + mentions_to_proto(&[(4..11, user_a.to_proto())]), + ), + ] + ); +} + +test_both_dbs!( + test_unseen_channel_messages, + test_unseen_channel_messages_postgres, + test_unseen_channel_messages_sqlite +); + +async fn test_unseen_channel_messages(db: &Arc) { + let user = new_test_user(db, "user_a@example.com").await; + let observer = new_test_user(db, "user_b@example.com").await; + + let channel_1 = db.create_root_channel("channel", user).await.unwrap(); + let channel_2 = db.create_root_channel("channel-2", user).await.unwrap(); + + db.invite_channel_member(channel_1, observer, user, ChannelRole::Member) + .await + .unwrap(); + db.invite_channel_member(channel_2, observer, user, ChannelRole::Member) + .await + .unwrap(); + + db.respond_to_channel_invite(channel_1, observer, true) + .await + .unwrap(); + db.respond_to_channel_invite(channel_2, observer, true) + .await + .unwrap(); + + let owner_id = db.create_server("test").await.unwrap().0 as u32; + let user_connection_id = rpc::ConnectionId { owner_id, id: 0 }; + + db.join_channel_chat(channel_1, user_connection_id, user) + .await + .unwrap(); + + let _ = db + .create_channel_message( + channel_1, + user, + "1_1", + &[], + OffsetDateTime::now_utc(), + 1, + None, + ) + .await + .unwrap(); + + let _ = db + .create_channel_message( + channel_1, + user, + "1_2", + &[], + OffsetDateTime::now_utc(), + 2, + None, + ) + .await + .unwrap(); + + let third_message = db + .create_channel_message( + channel_1, + user, + "1_3", + &[], + OffsetDateTime::now_utc(), + 3, + None, + ) + .await + .unwrap() + .message_id; + + db.join_channel_chat(channel_2, user_connection_id, user) + .await + .unwrap(); + + let fourth_message = db + .create_channel_message( + channel_2, + user, + "2_1", + &[], + OffsetDateTime::now_utc(), + 4, + None, + ) + .await + .unwrap() + .message_id; + + // Check that observer has new messages + let latest_messages = db + .transaction(|tx| async move { + db.latest_channel_messages(&[channel_1, channel_2], &tx) + .await + }) + .await + .unwrap(); + + assert_eq!( + latest_messages, + [ + rpc::proto::ChannelMessageId { + channel_id: channel_1.to_proto(), + message_id: third_message.to_proto(), + }, + rpc::proto::ChannelMessageId { + channel_id: channel_2.to_proto(), + message_id: fourth_message.to_proto(), + }, + ] + ); +} + +test_both_dbs!( + test_channel_message_mentions, + test_channel_message_mentions_postgres, + test_channel_message_mentions_sqlite +); + +async fn test_channel_message_mentions(db: &Arc) { + let user_a = new_test_user(db, "user_a@example.com").await; + let user_b = new_test_user(db, "user_b@example.com").await; + let user_c = new_test_user(db, "user_c@example.com").await; + + let channel = db + .create_channel("channel", None, user_a) + .await + .unwrap() + .0 + .id; + db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member) + .await + .unwrap(); + db.respond_to_channel_invite(channel, user_b, true) + .await + .unwrap(); + + let owner_id = db.create_server("test").await.unwrap().0 as u32; + let connection_id = rpc::ConnectionId { owner_id, id: 0 }; + db.join_channel_chat(channel, connection_id, user_a) + .await + .unwrap(); + + db.create_channel_message( + channel, + user_a, + "hi @user_b and @user_c", + &mentions_to_proto(&[(3..10, user_b.to_proto()), (15..22, user_c.to_proto())]), + OffsetDateTime::now_utc(), + 1, + None, + ) + .await + .unwrap(); + db.create_channel_message( + channel, + user_a, + "bye @user_c", + &mentions_to_proto(&[(4..11, user_c.to_proto())]), + OffsetDateTime::now_utc(), + 2, + None, + ) + .await + .unwrap(); + db.create_channel_message( + channel, + user_a, + "umm", + &mentions_to_proto(&[]), + OffsetDateTime::now_utc(), + 3, + None, + ) + .await + .unwrap(); + db.create_channel_message( + channel, + user_a, + "@user_b, stop.", + &mentions_to_proto(&[(0..7, user_b.to_proto())]), + OffsetDateTime::now_utc(), + 4, + None, + ) + .await + .unwrap(); + + let messages = db + .get_channel_messages(channel, user_b, 5, None) + .await + .unwrap() + .into_iter() + .map(|m| (m.body, m.mentions)) + .collect::>(); + assert_eq!( + &messages, + &[ + ( + "hi @user_b and @user_c".into(), + mentions_to_proto(&[(3..10, user_b.to_proto()), (15..22, user_c.to_proto())]), + ), + ( + "bye @user_c".into(), + mentions_to_proto(&[(4..11, user_c.to_proto())]), + ), + ("umm".into(), mentions_to_proto(&[]),), + ( + "@user_b, stop.".into(), + mentions_to_proto(&[(0..7, user_b.to_proto())]), + ), + ] + ); +} diff --git a/crates/collab/src/env.rs b/crates/collab/src/env.rs new file mode 100644 index 0000000..4e6fe3b --- /dev/null +++ b/crates/collab/src/env.rs @@ -0,0 +1,21 @@ +use anyhow::anyhow; +use std::fs; + +pub fn load_dotenv() -> anyhow::Result<()> { + let env: toml::map::Map = toml::de::from_str( + &fs::read_to_string("./crates/collab/.env.toml") + .map_err(|_| anyhow!("no .env.toml file found"))?, + )?; + + for (key, value) in env { + let value = match value { + toml::Value::String(value) => value, + toml::Value::Integer(value) => value.to_string(), + toml::Value::Float(value) => value.to_string(), + _ => panic!("unsupported TOML value in .env.toml for key {}", key), + }; + std::env::set_var(key, value); + } + + Ok(()) +} diff --git a/crates/collab/src/errors.rs b/crates/collab/src/errors.rs new file mode 100644 index 0000000..93e4684 --- /dev/null +++ b/crates/collab/src/errors.rs @@ -0,0 +1,29 @@ +// Allow tide Results to accept context like other Results do when +// using anyhow. +pub trait TideResultExt { + fn context(self, cx: C) -> Self + where + C: std::fmt::Display + Send + Sync + 'static; + + fn with_context(self, f: F) -> Self + where + C: std::fmt::Display + Send + Sync + 'static, + F: FnOnce() -> C; +} + +impl TideResultExt for tide::Result { + fn context(self, cx: C) -> Self + where + C: std::fmt::Display + Send + Sync + 'static, + { + self.map_err(|e| tide::Error::new(e.status(), e.into_inner().context(cx))) + } + + fn with_context(self, f: F) -> Self + where + C: std::fmt::Display + Send + Sync + 'static, + F: FnOnce() -> C, + { + self.map_err(|e| tide::Error::new(e.status(), e.into_inner().context(f()))) + } +} diff --git a/crates/collab/src/executor.rs b/crates/collab/src/executor.rs new file mode 100644 index 0000000..81d5e97 --- /dev/null +++ b/crates/collab/src/executor.rs @@ -0,0 +1,39 @@ +use std::{future::Future, time::Duration}; + +#[cfg(test)] +use gpui::BackgroundExecutor; + +#[derive(Clone)] +pub enum Executor { + Production, + #[cfg(test)] + Deterministic(BackgroundExecutor), +} + +impl Executor { + pub fn spawn_detached(&self, future: F) + where + F: 'static + Send + Future, + { + match self { + Executor::Production => { + tokio::spawn(future); + } + #[cfg(test)] + Executor::Deterministic(background) => { + background.spawn(future).detach(); + } + } + } + + pub fn sleep(&self, duration: Duration) -> impl Future { + let this = self.clone(); + async move { + match this { + Executor::Production => tokio::time::sleep(duration).await, + #[cfg(test)] + Executor::Deterministic(background) => background.timer(duration).await, + } + } + } +} diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs new file mode 100644 index 0000000..ae83fcc --- /dev/null +++ b/crates/collab/src/lib.rs @@ -0,0 +1,260 @@ +pub mod ai; +pub mod api; +pub mod auth; +pub mod db; +pub mod env; +pub mod executor; +mod rate_limiter; +pub mod rpc; +pub mod seed; + +#[cfg(test)] +mod tests; + +use anyhow::anyhow; +use aws_config::{BehaviorVersion, Region}; +use axum::{http::StatusCode, response::IntoResponse}; +use db::{ChannelId, Database}; +use executor::Executor; +pub use rate_limiter::*; +use serde::Deserialize; +use std::{path::PathBuf, sync::Arc}; +use util::ResultExt; + +pub type Result = std::result::Result; + +pub enum Error { + Http(StatusCode, String), + Database(sea_orm::error::DbErr), + Internal(anyhow::Error), +} + +impl From for Error { + fn from(error: anyhow::Error) -> Self { + Self::Internal(error) + } +} + +impl From for Error { + fn from(error: sea_orm::error::DbErr) -> Self { + Self::Database(error) + } +} + +impl From for Error { + fn from(error: axum::Error) -> Self { + Self::Internal(error.into()) + } +} + +impl From for Error { + fn from(error: axum::http::Error) -> Self { + Self::Internal(error.into()) + } +} + +impl From for Error { + fn from(error: serde_json::Error) -> Self { + Self::Internal(error.into()) + } +} + +impl IntoResponse for Error { + fn into_response(self) -> axum::response::Response { + match self { + Error::Http(code, message) => { + log::error!("HTTP error {}: {}", code, &message); + (code, message).into_response() + } + Error::Database(error) => { + log::error!( + "HTTP error {}: {:?}", + StatusCode::INTERNAL_SERVER_ERROR, + &error + ); + (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response() + } + Error::Internal(error) => { + log::error!( + "HTTP error {}: {:?}", + StatusCode::INTERNAL_SERVER_ERROR, + &error + ); + (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response() + } + } + } +} + +impl std::fmt::Debug for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Error::Http(code, message) => (code, message).fmt(f), + Error::Database(error) => error.fmt(f), + Error::Internal(error) => error.fmt(f), + } + } +} + +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Error::Http(code, message) => write!(f, "{code}: {message}"), + Error::Database(error) => error.fmt(f), + Error::Internal(error) => error.fmt(f), + } + } +} + +impl std::error::Error for Error {} + +#[derive(Deserialize)] +pub struct Config { + pub http_port: u16, + pub database_url: String, + pub migrations_path: Option, + pub seed_path: Option, + pub database_max_connections: u32, + pub api_token: String, + pub clickhouse_url: Option, + pub clickhouse_user: Option, + pub clickhouse_password: Option, + pub clickhouse_database: Option, + pub invite_link_prefix: String, + pub live_kit_server: Option, + pub live_kit_key: Option, + pub live_kit_secret: Option, + pub rust_log: Option, + pub log_json: Option, + pub blob_store_url: Option, + pub blob_store_region: Option, + pub blob_store_access_key: Option, + pub blob_store_secret_key: Option, + pub blob_store_bucket: Option, + pub zed_environment: Arc, + pub openai_api_key: Option>, + pub google_ai_api_key: Option>, + pub anthropic_api_key: Option>, + pub zed_client_checksum_seed: Option, + pub slack_panics_webhook: Option, + pub auto_join_channel_id: Option, + pub supermaven_admin_api_key: Option>, +} + +impl Config { + pub fn is_development(&self) -> bool { + self.zed_environment == "development".into() + } +} + +pub struct AppState { + pub db: Arc, + pub live_kit_client: Option>, + pub blob_store_client: Option, + pub rate_limiter: Arc, + pub executor: Executor, + pub clickhouse_client: Option, + pub config: Config, +} + +impl AppState { + pub async fn new(config: Config, executor: Executor) -> Result> { + let mut db_options = db::ConnectOptions::new(config.database_url.clone()); + db_options.max_connections(config.database_max_connections); + let mut db = Database::new(db_options, Executor::Production).await?; + db.initialize_notification_kinds().await?; + + let live_kit_client = if let Some(((server, key), secret)) = config + .live_kit_server + .as_ref() + .zip(config.live_kit_key.as_ref()) + .zip(config.live_kit_secret.as_ref()) + { + Some(Arc::new(live_kit_server::api::LiveKitClient::new( + server.clone(), + key.clone(), + secret.clone(), + )) as Arc) + } else { + None + }; + + let db = Arc::new(db); + let this = Self { + db: db.clone(), + live_kit_client, + blob_store_client: build_blob_store_client(&config).await.log_err(), + rate_limiter: Arc::new(RateLimiter::new(db)), + executor, + clickhouse_client: config + .clickhouse_url + .as_ref() + .and_then(|_| build_clickhouse_client(&config).log_err()), + config, + }; + Ok(Arc::new(this)) + } +} + +async fn build_blob_store_client(config: &Config) -> anyhow::Result { + let keys = aws_sdk_s3::config::Credentials::new( + config + .blob_store_access_key + .clone() + .ok_or_else(|| anyhow!("missing blob_store_access_key"))?, + config + .blob_store_secret_key + .clone() + .ok_or_else(|| anyhow!("missing blob_store_secret_key"))?, + None, + None, + "env", + ); + + let s3_config = aws_config::defaults(BehaviorVersion::latest()) + .endpoint_url( + config + .blob_store_url + .as_ref() + .ok_or_else(|| anyhow!("missing blob_store_url"))?, + ) + .region(Region::new( + config + .blob_store_region + .clone() + .ok_or_else(|| anyhow!("missing blob_store_region"))?, + )) + .credentials_provider(keys) + .load() + .await; + + Ok(aws_sdk_s3::Client::new(&s3_config)) +} + +fn build_clickhouse_client(config: &Config) -> anyhow::Result { + Ok(clickhouse::Client::default() + .with_url( + config + .clickhouse_url + .as_ref() + .ok_or_else(|| anyhow!("missing clickhouse_url"))?, + ) + .with_user( + config + .clickhouse_user + .as_ref() + .ok_or_else(|| anyhow!("missing clickhouse_user"))?, + ) + .with_password( + config + .clickhouse_password + .as_ref() + .ok_or_else(|| anyhow!("missing clickhouse_password"))?, + ) + .with_database( + config + .clickhouse_database + .as_ref() + .ok_or_else(|| anyhow!("missing clickhouse_database"))?, + )) +} diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs new file mode 100644 index 0000000..b85d378 --- /dev/null +++ b/crates/collab/src/main.rs @@ -0,0 +1,261 @@ +use anyhow::anyhow; +use axum::{ + extract::MatchedPath, + http::{Request, Response}, + routing::get, + Extension, Router, +}; +use collab::{ + api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor, + rpc::ResultExt, AppState, Config, RateLimiter, Result, +}; +use db::Database; +use std::{ + env::args, + net::{SocketAddr, TcpListener}, + path::Path, + sync::Arc, + time::Duration, +}; +#[cfg(unix)] +use tokio::signal::unix::SignalKind; +use tower_http::trace::TraceLayer; +use tracing_subscriber::{ + filter::EnvFilter, fmt::format::JsonFields, util::SubscriberInitExt, Layer, +}; +use util::ResultExt as _; + +const VERSION: &str = env!("CARGO_PKG_VERSION"); +const REVISION: Option<&'static str> = option_env!("GITHUB_SHA"); + +#[tokio::main] +async fn main() -> Result<()> { + if let Err(error) = env::load_dotenv() { + eprintln!( + "error loading .env.toml (this is expected in production): {}", + error + ); + } + + let mut args = args().skip(1); + match args.next().as_deref() { + Some("version") => { + println!("collab v{} ({})", VERSION, REVISION.unwrap_or("unknown")); + } + Some("migrate") => { + let config = envy::from_env::().expect("error loading config"); + run_migrations(&config).await?; + } + Some("seed") => { + let config = envy::from_env::().expect("error loading config"); + let db_options = db::ConnectOptions::new(config.database_url.clone()); + let mut db = Database::new(db_options, Executor::Production).await?; + db.initialize_notification_kinds().await?; + + collab::seed::seed(&config, &db, true).await?; + } + Some("serve") => { + let (is_api, is_collab) = if let Some(next) = args.next() { + (next == "api", next == "collab") + } else { + (true, true) + }; + if !is_api && !is_collab { + Err(anyhow!( + "usage: collab " + ))?; + } + + let config = envy::from_env::().expect("error loading config"); + init_tracing(&config); + + run_migrations(&config).await?; + + let state = AppState::new(config, Executor::Production).await?; + + let listener = TcpListener::bind(&format!("0.0.0.0:{}", state.config.http_port)) + .expect("failed to bind TCP listener"); + + let rpc_server = if is_collab { + let epoch = state + .db + .create_server(&state.config.zed_environment) + .await?; + let rpc_server = collab::rpc::Server::new(epoch, state.clone()); + rpc_server.start().await?; + + Some(rpc_server) + } else { + None + }; + + if is_collab { + state.db.purge_old_embeddings().await.trace_err(); + RateLimiter::save_periodically(state.rate_limiter.clone(), state.executor.clone()); + } + + if is_api { + fetch_extensions_from_blob_store_periodically(state.clone()); + } + + let mut app = collab::api::routes(rpc_server.clone(), state.clone()); + if let Some(rpc_server) = rpc_server.clone() { + app = app.merge(collab::rpc::routes(rpc_server)) + } + app = app + .merge( + Router::new() + .route("/", get(handle_root)) + .route("/healthz", get(handle_liveness_probe)) + .merge(collab::api::extensions::router()) + .merge(collab::api::events::router()) + .layer(Extension(state.clone())), + ) + .layer( + TraceLayer::new_for_http() + .make_span_with(|request: &Request<_>| { + let matched_path = request + .extensions() + .get::() + .map(MatchedPath::as_str); + + tracing::info_span!( + "http_request", + method = ?request.method(), + matched_path, + ) + }) + .on_response( + |response: &Response<_>, latency: Duration, _: &tracing::Span| { + let duration_ms = latency.as_micros() as f64 / 1000.; + tracing::info!( + duration_ms, + status = response.status().as_u16(), + "finished processing request" + ); + }, + ), + ); + + #[cfg(unix)] + let signal = async move { + let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate()) + .expect("failed to listen for interrupt signal"); + let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt()) + .expect("failed to listen for interrupt signal"); + let sigterm = sigterm.recv(); + let sigint = sigint.recv(); + futures::pin_mut!(sigterm, sigint); + futures::future::select(sigterm, sigint).await; + }; + + #[cfg(windows)] + let signal = async move { + // todo(windows): + // `ctrl_close` does not work well, because tokio's signal handler always returns soon, + // but system termiates the application soon after returning CTRL+CLOSE handler. + // So we should implement blocking handler to treat CTRL+CLOSE signal. + let mut ctrl_break = tokio::signal::windows::ctrl_break() + .expect("failed to listen for interrupt signal"); + let mut ctrl_c = tokio::signal::windows::ctrl_c() + .expect("failed to listen for interrupt signal"); + let ctrl_break = ctrl_break.recv(); + let ctrl_c = ctrl_c.recv(); + futures::pin_mut!(ctrl_break, ctrl_c); + futures::future::select(ctrl_break, ctrl_c).await; + }; + + axum::Server::from_tcp(listener) + .map_err(|e| anyhow!(e))? + .serve(app.into_make_service_with_connect_info::()) + .with_graceful_shutdown(async move { + signal.await; + tracing::info!("Received interrupt signal"); + + if let Some(rpc_server) = rpc_server { + rpc_server.teardown(); + } + }) + .await + .map_err(|e| anyhow!(e))?; + } + _ => { + Err(anyhow!( + "usage: collab " + ))?; + } + } + Ok(()) +} + +async fn run_migrations(config: &Config) -> Result<()> { + let db_options = db::ConnectOptions::new(config.database_url.clone()); + let mut db = Database::new(db_options, Executor::Production).await?; + + let migrations_path = config.migrations_path.as_deref().unwrap_or_else(|| { + #[cfg(feature = "sqlite")] + let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations.sqlite"); + #[cfg(not(feature = "sqlite"))] + let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations"); + + Path::new(default_migrations) + }); + + let migrations = db.migrate(&migrations_path, false).await?; + for (migration, duration) in migrations { + log::info!( + "Migrated {} {} {:?}", + migration.version, + migration.description, + duration + ); + } + + db.initialize_notification_kinds().await?; + + if config.seed_path.is_some() { + collab::seed::seed(&config, &db, false).await?; + } + + return Ok(()); +} + +async fn handle_root() -> String { + format!("collab v{} ({})", VERSION, REVISION.unwrap_or("unknown")) +} + +async fn handle_liveness_probe(Extension(state): Extension>) -> Result { + state.db.get_all_users(0, 1).await?; + Ok("ok".to_string()) +} + +pub fn init_tracing(config: &Config) -> Option<()> { + use std::str::FromStr; + use tracing_subscriber::layer::SubscriberExt; + + let filter = EnvFilter::from_str(config.rust_log.as_deref()?).log_err()?; + + tracing_subscriber::registry() + .with(if config.log_json.unwrap_or(false) { + Box::new( + tracing_subscriber::fmt::layer() + .fmt_fields(JsonFields::default()) + .event_format( + tracing_subscriber::fmt::format() + .json() + .flatten_event(true) + .with_span_list(false), + ) + .with_filter(filter), + ) as Box + Send + Sync> + } else { + Box::new( + tracing_subscriber::fmt::layer() + .event_format(tracing_subscriber::fmt::format().pretty()) + .with_filter(filter), + ) + }) + .init(); + + None +} diff --git a/crates/collab/src/rate_limiter.rs b/crates/collab/src/rate_limiter.rs new file mode 100644 index 0000000..e6f1fcb --- /dev/null +++ b/crates/collab/src/rate_limiter.rs @@ -0,0 +1,274 @@ +use crate::{db::UserId, executor::Executor, Database, Error, Result}; +use anyhow::anyhow; +use chrono::{DateTime, Duration, Utc}; +use dashmap::{DashMap, DashSet}; +use sea_orm::prelude::DateTimeUtc; +use std::sync::Arc; +use util::ResultExt; + +pub trait RateLimit: 'static { + fn capacity() -> usize; + fn refill_duration() -> Duration; + fn db_name() -> &'static str; +} + +/// Used to enforce per-user rate limits +pub struct RateLimiter { + buckets: DashMap<(UserId, String), RateBucket>, + dirty_buckets: DashSet<(UserId, String)>, + db: Arc, +} + +impl RateLimiter { + pub fn new(db: Arc) -> Self { + RateLimiter { + buckets: DashMap::new(), + dirty_buckets: DashSet::new(), + db, + } + } + + /// Spawns a new task that periodically saves rate limit data to the database. + pub fn save_periodically(rate_limiter: Arc, executor: Executor) { + const RATE_LIMITER_SAVE_INTERVAL: std::time::Duration = std::time::Duration::from_secs(10); + + executor.clone().spawn_detached(async move { + loop { + executor.sleep(RATE_LIMITER_SAVE_INTERVAL).await; + rate_limiter.save().await.log_err(); + } + }); + } + + /// Returns an error if the user has exceeded the specified `RateLimit`. + /// Attempts to read the from the database if no cached RateBucket currently exists. + pub async fn check(&self, user_id: UserId) -> Result<()> { + self.check_internal::(user_id, Utc::now()).await + } + + async fn check_internal(&self, user_id: UserId, now: DateTimeUtc) -> Result<()> { + let bucket_key = (user_id, T::db_name().to_string()); + + // Attempt to fetch the bucket from the database if it hasn't been cached. + // For now, we keep buckets in memory for the lifetime of the process rather than expiring them, + // but this enforces limits across restarts so long as the database is reachable. + if !self.buckets.contains_key(&bucket_key) { + if let Some(bucket) = self.load_bucket::(user_id).await.log_err().flatten() { + self.buckets.insert(bucket_key.clone(), bucket); + self.dirty_buckets.insert(bucket_key.clone()); + } + } + + let mut bucket = self + .buckets + .entry(bucket_key.clone()) + .or_insert_with(|| RateBucket::new(T::capacity(), T::refill_duration(), now)); + + if bucket.value_mut().allow(now) { + self.dirty_buckets.insert(bucket_key); + Ok(()) + } else { + Err(anyhow!("rate limit exceeded"))? + } + } + + async fn load_bucket( + &self, + user_id: UserId, + ) -> Result, Error> { + Ok(self + .db + .get_rate_bucket(user_id, K::db_name()) + .await? + .map(|saved_bucket| RateBucket { + capacity: K::capacity(), + refill_time_per_token: K::refill_duration(), + token_count: saved_bucket.token_count as usize, + last_refill: DateTime::from_naive_utc_and_offset(saved_bucket.last_refill, Utc), + })) + } + + pub async fn save(&self) -> Result<()> { + let mut buckets = Vec::new(); + self.dirty_buckets.retain(|key| { + if let Some(bucket) = self.buckets.get(&key) { + buckets.push(crate::db::rate_buckets::Model { + user_id: key.0, + rate_limit_name: key.1.clone(), + token_count: bucket.token_count as i32, + last_refill: bucket.last_refill.naive_utc(), + }); + } + false + }); + + match self.db.save_rate_buckets(&buckets).await { + Ok(()) => Ok(()), + Err(err) => { + for bucket in buckets { + self.dirty_buckets + .insert((bucket.user_id, bucket.rate_limit_name)); + } + Err(err) + } + } + } +} + +#[derive(Clone)] +struct RateBucket { + capacity: usize, + token_count: usize, + refill_time_per_token: Duration, + last_refill: DateTimeUtc, +} + +impl RateBucket { + fn new(capacity: usize, refill_duration: Duration, now: DateTimeUtc) -> Self { + RateBucket { + capacity, + token_count: capacity, + refill_time_per_token: refill_duration / capacity as i32, + last_refill: now, + } + } + + fn allow(&mut self, now: DateTimeUtc) -> bool { + self.refill(now); + if self.token_count > 0 { + self.token_count -= 1; + true + } else { + false + } + } + + fn refill(&mut self, now: DateTimeUtc) { + let elapsed = now - self.last_refill; + if elapsed >= self.refill_time_per_token { + let new_tokens = + elapsed.num_milliseconds() / self.refill_time_per_token.num_milliseconds(); + + self.token_count = (self.token_count + new_tokens as usize).min(self.capacity); + self.last_refill = now; + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::{NewUserParams, TestDb}; + use gpui::TestAppContext; + + #[gpui::test] + async fn test_rate_limiter(cx: &mut TestAppContext) { + let test_db = TestDb::sqlite(cx.executor().clone()); + let db = test_db.db().clone(); + let user_1 = db + .create_user( + "user-1@zed.dev", + false, + NewUserParams { + github_login: "user-1".into(), + github_user_id: 1, + }, + ) + .await + .unwrap() + .user_id; + let user_2 = db + .create_user( + "user-2@zed.dev", + false, + NewUserParams { + github_login: "user-2".into(), + github_user_id: 2, + }, + ) + .await + .unwrap() + .user_id; + + let mut now = Utc::now(); + + let rate_limiter = RateLimiter::new(db.clone()); + + // User 1 can access resource A two times before being rate-limited. + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap(); + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap(); + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap_err(); + + // User 2 can access resource A and user 1 can access resource B. + rate_limiter + .check_internal::(user_2, now) + .await + .unwrap(); + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap(); + + // After one second, user 1 can make another request before being rate-limited again. + now += Duration::seconds(1); + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap(); + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap_err(); + + rate_limiter.save().await.unwrap(); + + // Rate limits are reloaded from the database, so user A is still rate-limited + // for resource A. + let rate_limiter = RateLimiter::new(db.clone()); + rate_limiter + .check_internal::(user_1, now) + .await + .unwrap_err(); + } + + struct RateLimitA; + + impl RateLimit for RateLimitA { + fn capacity() -> usize { + 2 + } + + fn refill_duration() -> Duration { + Duration::seconds(2) + } + + fn db_name() -> &'static str { + "rate-limit-a" + } + } + + struct RateLimitB; + + impl RateLimit for RateLimitB { + fn capacity() -> usize { + 10 + } + + fn refill_duration() -> Duration { + Duration::seconds(3) + } + + fn db_name() -> &'static str { + "rate-limit-b" + } + } +} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs new file mode 100644 index 0000000..18d6b00 --- /dev/null +++ b/crates/collab/src/rpc.rs @@ -0,0 +1,5350 @@ +mod connection_pool; + +use crate::{ + auth, + db::{ + self, dev_server, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser, + CreatedChannelMessage, Database, DevServerId, DevServerProjectId, InviteMemberResult, + MembershipUpdated, MessageId, NotificationId, PrincipalId, Project, ProjectId, + RejoinedProject, RemoveChannelMemberResult, ReplicaId, RespondToChannelInvite, RoomId, + ServerId, UpdatedChannelMessage, User, UserId, + }, + executor::Executor, + AppState, Error, RateLimit, RateLimiter, Result, +}; +use anyhow::{anyhow, Context as _}; +use async_tungstenite::tungstenite::{ + protocol::CloseFrame as TungsteniteCloseFrame, Message as TungsteniteMessage, +}; +use axum::{ + body::Body, + extract::{ + ws::{CloseFrame as AxumCloseFrame, Message as AxumMessage}, + ConnectInfo, WebSocketUpgrade, + }, + headers::{Header, HeaderName}, + http::StatusCode, + middleware, + response::IntoResponse, + routing::get, + Extension, Router, TypedHeader, +}; +use collections::{HashMap, HashSet}; +pub use connection_pool::{ConnectionPool, ZedVersion}; +use core::fmt::{self, Debug, Formatter}; +use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; +use sha2::Digest; +use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; + +use futures::{ + channel::oneshot, + future::{self, BoxFuture}, + stream::FuturesUnordered, + FutureExt, SinkExt, StreamExt, TryStreamExt, +}; +use http::IsahcHttpClient; +use prometheus::{register_int_gauge, IntGauge}; +use rpc::{ + proto::{ + self, Ack, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LanguageModelRole, + LiveKitConnectionInfo, RequestMessage, ShareProject, UpdateChannelBufferCollaborators, + }, + Connection, ConnectionId, ErrorCode, ErrorCodeExt, ErrorExt, Peer, Receipt, TypedEnvelope, +}; +use semantic_version::SemanticVersion; +use serde::{Serialize, Serializer}; +use std::{ + any::TypeId, + future::Future, + marker::PhantomData, + mem, + net::SocketAddr, + ops::{Deref, DerefMut}, + rc::Rc, + sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, OnceLock, + }, + time::{Duration, Instant}, +}; +use time::OffsetDateTime; +use tokio::sync::{watch, Semaphore}; +use tower::ServiceBuilder; +use tracing::{ + field::{self}, + info_span, instrument, Instrument, +}; + +use self::connection_pool::VersionedMessage; + +pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30); + +// kubernetes gives terminated pods 10s to shutdown gracefully. After they're gone, we can clean up old resources. +pub const CLEANUP_TIMEOUT: Duration = Duration::from_secs(15); + +const MESSAGE_COUNT_PER_PAGE: usize = 100; +const MAX_MESSAGE_LEN: usize = 1024; +const NOTIFICATION_COUNT_PER_PAGE: usize = 50; + +type MessageHandler = + Box, Session) -> BoxFuture<'static, ()>>; + +struct Response { + peer: Arc, + receipt: Receipt, + responded: Arc, +} + +impl Response { + fn send(self, payload: R::Response) -> Result<()> { + self.responded.store(true, SeqCst); + self.peer.respond(self.receipt, payload)?; + Ok(()) + } +} + +struct StreamingResponse { + peer: Arc, + receipt: Receipt, +} + +impl StreamingResponse { + fn send(&self, payload: R::Response) -> Result<()> { + self.peer.respond(self.receipt, payload)?; + Ok(()) + } +} + +#[derive(Clone, Debug)] +pub enum Principal { + User(User), + Impersonated { user: User, admin: User }, + DevServer(dev_server::Model), +} + +impl Principal { + fn update_span(&self, span: &tracing::Span) { + match &self { + Principal::User(user) => { + span.record("user_id", &user.id.0); + span.record("login", &user.github_login); + } + Principal::Impersonated { user, admin } => { + span.record("user_id", &user.id.0); + span.record("login", &user.github_login); + span.record("impersonator", &admin.github_login); + } + Principal::DevServer(dev_server) => { + span.record("dev_server_id", &dev_server.id.0); + } + } + } +} + +#[derive(Clone)] +struct Session { + principal: Principal, + connection_id: ConnectionId, + db: Arc>, + peer: Arc, + connection_pool: Arc>, + live_kit_client: Option>, + supermaven_client: Option>, + http_client: Arc, + rate_limiter: Arc, + _executor: Executor, +} + +impl Session { + async fn db(&self) -> tokio::sync::MutexGuard { + #[cfg(test)] + tokio::task::yield_now().await; + let guard = self.db.lock().await; + #[cfg(test)] + tokio::task::yield_now().await; + guard + } + + async fn connection_pool(&self) -> ConnectionPoolGuard<'_> { + #[cfg(test)] + tokio::task::yield_now().await; + let guard = self.connection_pool.lock(); + ConnectionPoolGuard { + guard, + _not_send: PhantomData, + } + } + + fn for_user(self) -> Option { + UserSession::new(self) + } + + fn for_dev_server(self) -> Option { + DevServerSession::new(self) + } + + fn user_id(&self) -> Option { + match &self.principal { + Principal::User(user) => Some(user.id), + Principal::Impersonated { user, .. } => Some(user.id), + Principal::DevServer(_) => None, + } + } + + fn is_staff(&self) -> bool { + match &self.principal { + Principal::User(user) => user.admin, + Principal::Impersonated { .. } => true, + Principal::DevServer(_) => false, + } + } + + fn dev_server_id(&self) -> Option { + match &self.principal { + Principal::User(_) | Principal::Impersonated { .. } => None, + Principal::DevServer(dev_server) => Some(dev_server.id), + } + } + + fn principal_id(&self) -> PrincipalId { + match &self.principal { + Principal::User(user) => PrincipalId::UserId(user.id), + Principal::Impersonated { user, .. } => PrincipalId::UserId(user.id), + Principal::DevServer(dev_server) => PrincipalId::DevServerId(dev_server.id), + } + } +} + +impl Debug for Session { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + let mut result = f.debug_struct("Session"); + match &self.principal { + Principal::User(user) => { + result.field("user", &user.github_login); + } + Principal::Impersonated { user, admin } => { + result.field("user", &user.github_login); + result.field("impersonator", &admin.github_login); + } + Principal::DevServer(dev_server) => { + result.field("dev_server", &dev_server.id); + } + } + result.field("connection_id", &self.connection_id).finish() + } +} + +struct UserSession(Session); + +impl UserSession { + pub fn new(s: Session) -> Option { + s.user_id().map(|_| UserSession(s)) + } + pub fn user_id(&self) -> UserId { + self.0.user_id().unwrap() + } + + pub fn email(&self) -> Option { + match &self.0.principal { + Principal::User(user) => user.email_address.clone(), + Principal::Impersonated { user, .. } => user.email_address.clone(), + Principal::DevServer(..) => None, + } + } +} + +impl Deref for UserSession { + type Target = Session; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for UserSession { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +struct DevServerSession(Session); + +impl DevServerSession { + pub fn new(s: Session) -> Option { + s.dev_server_id().map(|_| DevServerSession(s)) + } + pub fn dev_server_id(&self) -> DevServerId { + self.0.dev_server_id().unwrap() + } + + fn dev_server(&self) -> &dev_server::Model { + match &self.0.principal { + Principal::DevServer(dev_server) => dev_server, + _ => unreachable!(), + } + } +} + +impl Deref for DevServerSession { + type Target = Session; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for DevServerSession { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +fn user_handler( + handler: impl 'static + Send + Sync + Fn(M, Response, UserSession) -> Fut, +) -> impl 'static + Send + Sync + Fn(M, Response, Session) -> BoxFuture<'static, Result<()>> +where + Fut: Send + Future>, +{ + let handler = Arc::new(handler); + move |message, response, session| { + let handler = handler.clone(); + Box::pin(async move { + if let Some(user_session) = session.for_user() { + Ok(handler(message, response, user_session).await?) + } else { + Err(Error::Internal(anyhow!( + "must be a user to call {}", + M::NAME + ))) + } + }) + } +} + +fn dev_server_handler( + handler: impl 'static + Send + Sync + Fn(M, Response, DevServerSession) -> Fut, +) -> impl 'static + Send + Sync + Fn(M, Response, Session) -> BoxFuture<'static, Result<()>> +where + Fut: Send + Future>, +{ + let handler = Arc::new(handler); + move |message, response, session| { + let handler = handler.clone(); + Box::pin(async move { + if let Some(dev_server_session) = session.for_dev_server() { + Ok(handler(message, response, dev_server_session).await?) + } else { + Err(Error::Internal(anyhow!( + "must be a dev server to call {}", + M::NAME + ))) + } + }) + } +} + +fn user_message_handler( + handler: impl 'static + Send + Sync + Fn(M, UserSession) -> InnertRetFut, +) -> impl 'static + Send + Sync + Fn(M, Session) -> BoxFuture<'static, Result<()>> +where + InnertRetFut: Send + Future>, +{ + let handler = Arc::new(handler); + move |message, session| { + let handler = handler.clone(); + Box::pin(async move { + if let Some(user_session) = session.for_user() { + Ok(handler(message, user_session).await?) + } else { + Err(Error::Internal(anyhow!( + "must be a user to call {}", + M::NAME + ))) + } + }) + } +} + +struct DbHandle(Arc); + +impl Deref for DbHandle { + type Target = Database; + + fn deref(&self) -> &Self::Target { + self.0.as_ref() + } +} + +pub struct Server { + id: parking_lot::Mutex, + peer: Arc, + pub(crate) connection_pool: Arc>, + app_state: Arc, + handlers: HashMap, + teardown: watch::Sender, +} + +pub(crate) struct ConnectionPoolGuard<'a> { + guard: parking_lot::MutexGuard<'a, ConnectionPool>, + _not_send: PhantomData>, +} + +#[derive(Serialize)] +pub struct ServerSnapshot<'a> { + peer: &'a Peer, + #[serde(serialize_with = "serialize_deref")] + connection_pool: ConnectionPoolGuard<'a>, +} + +pub fn serialize_deref(value: &T, serializer: S) -> Result +where + S: Serializer, + T: Deref, + U: Serialize, +{ + Serialize::serialize(value.deref(), serializer) +} + +impl Server { + pub fn new(id: ServerId, app_state: Arc) -> Arc { + let mut server = Self { + id: parking_lot::Mutex::new(id), + peer: Peer::new(id.0 as u32), + app_state: app_state.clone(), + connection_pool: Default::default(), + handlers: Default::default(), + teardown: watch::channel(false).0, + }; + + server + .add_request_handler(ping) + .add_request_handler(user_handler(create_room)) + .add_request_handler(user_handler(join_room)) + .add_request_handler(user_handler(rejoin_room)) + .add_request_handler(user_handler(leave_room)) + .add_request_handler(user_handler(set_room_participant_role)) + .add_request_handler(user_handler(call)) + .add_request_handler(user_handler(cancel_call)) + .add_message_handler(user_message_handler(decline_call)) + .add_request_handler(user_handler(update_participant_location)) + .add_request_handler(user_handler(share_project)) + .add_message_handler(unshare_project) + .add_request_handler(user_handler(join_project)) + .add_request_handler(user_handler(join_hosted_project)) + .add_request_handler(user_handler(rejoin_dev_server_projects)) + .add_request_handler(user_handler(create_dev_server_project)) + .add_request_handler(user_handler(delete_dev_server_project)) + .add_request_handler(user_handler(create_dev_server)) + .add_request_handler(user_handler(regenerate_dev_server_token)) + .add_request_handler(user_handler(rename_dev_server)) + .add_request_handler(user_handler(delete_dev_server)) + .add_request_handler(dev_server_handler(share_dev_server_project)) + .add_request_handler(dev_server_handler(shutdown_dev_server)) + .add_request_handler(dev_server_handler(reconnect_dev_server)) + .add_message_handler(user_message_handler(leave_project)) + .add_request_handler(update_project) + .add_request_handler(update_worktree) + .add_message_handler(start_language_server) + .add_message_handler(update_language_server) + .add_message_handler(update_diagnostic_summary) + .add_message_handler(update_worktree_settings) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_read_only_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_versioned_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_versioned_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_request_handler(user_handler( + forward_mutating_project_request::, + )) + .add_message_handler(create_buffer_for_peer) + .add_request_handler(update_buffer) + .add_message_handler(broadcast_project_message_from_host::) + .add_message_handler(broadcast_project_message_from_host::) + .add_message_handler(broadcast_project_message_from_host::) + .add_message_handler(broadcast_project_message_from_host::) + .add_message_handler(broadcast_project_message_from_host::) + .add_request_handler(get_users) + .add_request_handler(user_handler(fuzzy_search_users)) + .add_request_handler(user_handler(request_contact)) + .add_request_handler(user_handler(remove_contact)) + .add_request_handler(user_handler(respond_to_contact_request)) + .add_request_handler(user_handler(create_channel)) + .add_request_handler(user_handler(delete_channel)) + .add_request_handler(user_handler(invite_channel_member)) + .add_request_handler(user_handler(remove_channel_member)) + .add_request_handler(user_handler(set_channel_member_role)) + .add_request_handler(user_handler(set_channel_visibility)) + .add_request_handler(user_handler(rename_channel)) + .add_request_handler(user_handler(join_channel_buffer)) + .add_request_handler(user_handler(leave_channel_buffer)) + .add_message_handler(user_message_handler(update_channel_buffer)) + .add_request_handler(user_handler(rejoin_channel_buffers)) + .add_request_handler(user_handler(get_channel_members)) + .add_request_handler(user_handler(respond_to_channel_invite)) + .add_request_handler(user_handler(join_channel)) + .add_request_handler(user_handler(join_channel_chat)) + .add_message_handler(user_message_handler(leave_channel_chat)) + .add_request_handler(user_handler(send_channel_message)) + .add_request_handler(user_handler(remove_channel_message)) + .add_request_handler(user_handler(update_channel_message)) + .add_request_handler(user_handler(get_channel_messages)) + .add_request_handler(user_handler(get_channel_messages_by_id)) + .add_request_handler(user_handler(get_notifications)) + .add_request_handler(user_handler(mark_notification_as_read)) + .add_request_handler(user_handler(move_channel)) + .add_request_handler(user_handler(follow)) + .add_message_handler(user_message_handler(unfollow)) + .add_message_handler(user_message_handler(update_followers)) + .add_request_handler(user_handler(get_private_user_info)) + .add_message_handler(user_message_handler(acknowledge_channel_message)) + .add_message_handler(user_message_handler(acknowledge_buffer_version)) + .add_request_handler(user_handler(get_supermaven_api_key)) + .add_streaming_request_handler({ + let app_state = app_state.clone(); + move |request, response, session| { + complete_with_language_model( + request, + response, + session, + app_state.config.openai_api_key.clone(), + app_state.config.google_ai_api_key.clone(), + app_state.config.anthropic_api_key.clone(), + ) + } + }) + .add_request_handler({ + let app_state = app_state.clone(); + user_handler(move |request, response, session| { + count_tokens_with_language_model( + request, + response, + session, + app_state.config.google_ai_api_key.clone(), + ) + }) + }) + .add_request_handler({ + user_handler(move |request, response, session| { + get_cached_embeddings(request, response, session) + }) + }) + .add_request_handler({ + let app_state = app_state.clone(); + user_handler(move |request, response, session| { + compute_embeddings( + request, + response, + session, + app_state.config.openai_api_key.clone(), + ) + }) + }); + + Arc::new(server) + } + + pub async fn start(&self) -> Result<()> { + let server_id = *self.id.lock(); + let app_state = self.app_state.clone(); + let peer = self.peer.clone(); + let timeout = self.app_state.executor.sleep(CLEANUP_TIMEOUT); + let pool = self.connection_pool.clone(); + let live_kit_client = self.app_state.live_kit_client.clone(); + + let span = info_span!("start server"); + self.app_state.executor.spawn_detached( + async move { + tracing::info!("waiting for cleanup timeout"); + timeout.await; + tracing::info!("cleanup timeout expired, retrieving stale rooms"); + if let Some((room_ids, channel_ids)) = app_state + .db + .stale_server_resource_ids(&app_state.config.zed_environment, server_id) + .await + .trace_err() + { + tracing::info!(stale_room_count = room_ids.len(), "retrieved stale rooms"); + tracing::info!( + stale_channel_buffer_count = channel_ids.len(), + "retrieved stale channel buffers" + ); + + for channel_id in channel_ids { + if let Some(refreshed_channel_buffer) = app_state + .db + .clear_stale_channel_buffer_collaborators(channel_id, server_id) + .await + .trace_err() + { + for connection_id in refreshed_channel_buffer.connection_ids { + peer.send( + connection_id, + proto::UpdateChannelBufferCollaborators { + channel_id: channel_id.to_proto(), + collaborators: refreshed_channel_buffer + .collaborators + .clone(), + }, + ) + .trace_err(); + } + } + } + + for room_id in room_ids { + let mut contacts_to_update = HashSet::default(); + let mut canceled_calls_to_user_ids = Vec::new(); + let mut live_kit_room = String::new(); + let mut delete_live_kit_room = false; + + if let Some(mut refreshed_room) = app_state + .db + .clear_stale_room_participants(room_id, server_id) + .await + .trace_err() + { + tracing::info!( + room_id = room_id.0, + new_participant_count = refreshed_room.room.participants.len(), + "refreshed room" + ); + room_updated(&refreshed_room.room, &peer); + if let Some(channel) = refreshed_room.channel.as_ref() { + channel_updated(channel, &refreshed_room.room, &peer, &pool.lock()); + } + contacts_to_update + .extend(refreshed_room.stale_participant_user_ids.iter().copied()); + contacts_to_update + .extend(refreshed_room.canceled_calls_to_user_ids.iter().copied()); + canceled_calls_to_user_ids = + mem::take(&mut refreshed_room.canceled_calls_to_user_ids); + live_kit_room = mem::take(&mut refreshed_room.room.live_kit_room); + delete_live_kit_room = refreshed_room.room.participants.is_empty(); + } + + { + let pool = pool.lock(); + for canceled_user_id in canceled_calls_to_user_ids { + for connection_id in pool.user_connection_ids(canceled_user_id) { + peer.send( + connection_id, + proto::CallCanceled { + room_id: room_id.to_proto(), + }, + ) + .trace_err(); + } + } + } + + for user_id in contacts_to_update { + let busy = app_state.db.is_user_busy(user_id).await.trace_err(); + let contacts = app_state.db.get_contacts(user_id).await.trace_err(); + if let Some((busy, contacts)) = busy.zip(contacts) { + let pool = pool.lock(); + let updated_contact = contact_for_user(user_id, busy, &pool); + for contact in contacts { + if let db::Contact::Accepted { + user_id: contact_user_id, + .. + } = contact + { + for contact_conn_id in + pool.user_connection_ids(contact_user_id) + { + peer.send( + contact_conn_id, + proto::UpdateContacts { + contacts: vec![updated_contact.clone()], + remove_contacts: Default::default(), + incoming_requests: Default::default(), + remove_incoming_requests: Default::default(), + outgoing_requests: Default::default(), + remove_outgoing_requests: Default::default(), + }, + ) + .trace_err(); + } + } + } + } + } + + if let Some(live_kit) = live_kit_client.as_ref() { + if delete_live_kit_room { + live_kit.delete_room(live_kit_room).await.trace_err(); + } + } + } + } + + app_state + .db + .delete_stale_servers(&app_state.config.zed_environment, server_id) + .await + .trace_err(); + } + .instrument(span), + ); + Ok(()) + } + + pub fn teardown(&self) { + self.peer.teardown(); + self.connection_pool.lock().reset(); + let _ = self.teardown.send(true); + } + + #[cfg(test)] + pub fn reset(&self, id: ServerId) { + self.teardown(); + *self.id.lock() = id; + self.peer.reset(id.0 as u32); + let _ = self.teardown.send(false); + } + + #[cfg(test)] + pub fn id(&self) -> ServerId { + *self.id.lock() + } + + fn add_handler(&mut self, handler: F) -> &mut Self + where + F: 'static + Send + Sync + Fn(TypedEnvelope, Session) -> Fut, + Fut: 'static + Send + Future>, + M: EnvelopedMessage, + { + let prev_handler = self.handlers.insert( + TypeId::of::(), + Box::new(move |envelope, session| { + let envelope = envelope.into_any().downcast::>().unwrap(); + let received_at = envelope.received_at; + tracing::info!("message received"); + let start_time = Instant::now(); + let future = (handler)(*envelope, session); + async move { + let result = future.await; + let total_duration_ms = received_at.elapsed().as_micros() as f64 / 1000.0; + let processing_duration_ms = start_time.elapsed().as_micros() as f64 / 1000.0; + let queue_duration_ms = total_duration_ms - processing_duration_ms; + let payload_type = M::NAME; + + match result { + Err(error) => { + tracing::error!( + ?error, + total_duration_ms, + processing_duration_ms, + queue_duration_ms, + payload_type, + "error handling message" + ) + } + Ok(()) => tracing::info!( + total_duration_ms, + processing_duration_ms, + queue_duration_ms, + "finished handling message" + ), + } + } + .boxed() + }), + ); + if prev_handler.is_some() { + panic!("registered a handler for the same message twice"); + } + self + } + + fn add_message_handler(&mut self, handler: F) -> &mut Self + where + F: 'static + Send + Sync + Fn(M, Session) -> Fut, + Fut: 'static + Send + Future>, + M: EnvelopedMessage, + { + self.add_handler(move |envelope, session| handler(envelope.payload, session)); + self + } + + fn add_request_handler(&mut self, handler: F) -> &mut Self + where + F: 'static + Send + Sync + Fn(M, Response, Session) -> Fut, + Fut: Send + Future>, + M: RequestMessage, + { + let handler = Arc::new(handler); + self.add_handler(move |envelope, session| { + let receipt = envelope.receipt(); + let handler = handler.clone(); + async move { + let peer = session.peer.clone(); + let responded = Arc::new(AtomicBool::default()); + let response = Response { + peer: peer.clone(), + responded: responded.clone(), + receipt, + }; + match (handler)(envelope.payload, response, session).await { + Ok(()) => { + if responded.load(std::sync::atomic::Ordering::SeqCst) { + Ok(()) + } else { + Err(anyhow!("handler did not send a response"))? + } + } + Err(error) => { + let proto_err = match &error { + Error::Internal(err) => err.to_proto(), + _ => ErrorCode::Internal.message(format!("{}", error)).to_proto(), + }; + peer.respond_with_error(receipt, proto_err)?; + Err(error) + } + } + } + }) + } + + fn add_streaming_request_handler(&mut self, handler: F) -> &mut Self + where + F: 'static + Send + Sync + Fn(M, StreamingResponse, Session) -> Fut, + Fut: Send + Future>, + M: RequestMessage, + { + let handler = Arc::new(handler); + self.add_handler(move |envelope, session| { + let receipt = envelope.receipt(); + let handler = handler.clone(); + async move { + let peer = session.peer.clone(); + let response = StreamingResponse { + peer: peer.clone(), + receipt, + }; + match (handler)(envelope.payload, response, session).await { + Ok(()) => { + peer.end_stream(receipt)?; + Ok(()) + } + Err(error) => { + let proto_err = match &error { + Error::Internal(err) => err.to_proto(), + _ => ErrorCode::Internal.message(format!("{}", error)).to_proto(), + }; + peer.respond_with_error(receipt, proto_err)?; + Err(error) + } + } + } + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn handle_connection( + self: &Arc, + connection: Connection, + address: String, + principal: Principal, + zed_version: ZedVersion, + send_connection_id: Option>, + executor: Executor, + ) -> impl Future { + let this = self.clone(); + let span = info_span!("handle connection", %address, + connection_id=field::Empty, + user_id=field::Empty, + login=field::Empty, + impersonator=field::Empty, + dev_server_id=field::Empty + ); + principal.update_span(&span); + + let mut teardown = self.teardown.subscribe(); + async move { + if *teardown.borrow() { + tracing::error!("server is tearing down"); + return + } + let (connection_id, handle_io, mut incoming_rx) = this + .peer + .add_connection(connection, { + let executor = executor.clone(); + move |duration| executor.sleep(duration) + }); + tracing::Span::current().record("connection_id", format!("{}", connection_id)); + tracing::info!("connection opened"); + + let http_client = match IsahcHttpClient::new() { + Ok(http_client) => Arc::new(http_client), + Err(error) => { + tracing::error!(?error, "failed to create HTTP client"); + return; + } + }; + + let supermaven_client = if let Some(supermaven_admin_api_key) = this.app_state.config.supermaven_admin_api_key.clone() { + Some(Arc::new(SupermavenAdminApi::new( + supermaven_admin_api_key.to_string(), + http_client.clone(), + ))) + } else { + None + }; + + let session = Session { + principal: principal.clone(), + connection_id, + db: Arc::new(tokio::sync::Mutex::new(DbHandle(this.app_state.db.clone()))), + peer: this.peer.clone(), + connection_pool: this.connection_pool.clone(), + live_kit_client: this.app_state.live_kit_client.clone(), + http_client, + rate_limiter: this.app_state.rate_limiter.clone(), + _executor: executor.clone(), + supermaven_client, + }; + + if let Err(error) = this.send_initial_client_update(connection_id, &principal, zed_version, send_connection_id, &session).await { + tracing::error!(?error, "failed to send initial client update"); + return; + } + + let handle_io = handle_io.fuse(); + futures::pin_mut!(handle_io); + + // Handlers for foreground messages are pushed into the following `FuturesUnordered`. + // This prevents deadlocks when e.g., client A performs a request to client B and + // client B performs a request to client A. If both clients stop processing further + // messages until their respective request completes, they won't have a chance to + // respond to the other client's request and cause a deadlock. + // + // This arrangement ensures we will attempt to process earlier messages first, but fall + // back to processing messages arrived later in the spirit of making progress. + let mut foreground_message_handlers = FuturesUnordered::new(); + let concurrent_handlers = Arc::new(Semaphore::new(256)); + loop { + let next_message = async { + let permit = concurrent_handlers.clone().acquire_owned().await.unwrap(); + let message = incoming_rx.next().await; + (permit, message) + }.fuse(); + futures::pin_mut!(next_message); + futures::select_biased! { + _ = teardown.changed().fuse() => return, + result = handle_io => { + if let Err(error) = result { + tracing::error!(?error, "error handling I/O"); + } + break; + } + _ = foreground_message_handlers.next() => {} + next_message = next_message => { + let (permit, message) = next_message; + if let Some(message) = message { + let type_name = message.payload_type_name(); + // note: we copy all the fields from the parent span so we can query them in the logs. + // (https://github.com/tokio-rs/tracing/issues/2670). + let span = tracing::info_span!("receive message", %connection_id, %address, type_name, + user_id=field::Empty, + login=field::Empty, + impersonator=field::Empty, + dev_server_id=field::Empty + ); + principal.update_span(&span); + let span_enter = span.enter(); + if let Some(handler) = this.handlers.get(&message.payload_type_id()) { + let is_background = message.is_background(); + let handle_message = (handler)(message, session.clone()); + drop(span_enter); + + let handle_message = async move { + handle_message.await; + drop(permit); + }.instrument(span); + if is_background { + executor.spawn_detached(handle_message); + } else { + foreground_message_handlers.push(handle_message); + } + } else { + tracing::error!("no message handler"); + } + } else { + tracing::info!("connection closed"); + break; + } + } + } + } + + drop(foreground_message_handlers); + tracing::info!("signing out"); + if let Err(error) = connection_lost(session, teardown, executor).await { + tracing::error!(?error, "error signing out"); + } + + }.instrument(span) + } + + async fn send_initial_client_update( + &self, + connection_id: ConnectionId, + principal: &Principal, + zed_version: ZedVersion, + mut send_connection_id: Option>, + session: &Session, + ) -> Result<()> { + self.peer.send( + connection_id, + proto::Hello { + peer_id: Some(connection_id.into()), + }, + )?; + tracing::info!("sent hello message"); + if let Some(send_connection_id) = send_connection_id.take() { + let _ = send_connection_id.send(connection_id); + } + + match principal { + Principal::User(user) | Principal::Impersonated { user, admin: _ } => { + if !user.connected_once { + self.peer.send(connection_id, proto::ShowContacts {})?; + self.app_state + .db + .set_user_connected_once(user.id, true) + .await?; + } + + let (contacts, channels_for_user, channel_invites, dev_server_projects) = + future::try_join4( + self.app_state.db.get_contacts(user.id), + self.app_state.db.get_channels_for_user(user.id), + self.app_state.db.get_channel_invites_for_user(user.id), + self.app_state.db.dev_server_projects_update(user.id), + ) + .await?; + + { + let mut pool = self.connection_pool.lock(); + pool.add_connection(connection_id, user.id, user.admin, zed_version); + for membership in &channels_for_user.channel_memberships { + pool.subscribe_to_channel(user.id, membership.channel_id, membership.role) + } + self.peer.send( + connection_id, + build_initial_contacts_update(contacts, &pool), + )?; + self.peer.send( + connection_id, + build_update_user_channels(&channels_for_user), + )?; + self.peer.send( + connection_id, + build_channels_update(channels_for_user, channel_invites), + )?; + } + send_dev_server_projects_update(user.id, dev_server_projects, session).await; + + if let Some(incoming_call) = + self.app_state.db.incoming_call_for_user(user.id).await? + { + self.peer.send(connection_id, incoming_call)?; + } + + update_user_contacts(user.id, &session).await?; + } + Principal::DevServer(dev_server) => { + { + let mut pool = self.connection_pool.lock(); + if pool.dev_server_connection_id(dev_server.id).is_some() { + return Err(anyhow!(ErrorCode::DevServerAlreadyOnline))?; + }; + pool.add_dev_server(connection_id, dev_server.id, zed_version); + } + + let projects = self + .app_state + .db + .get_projects_for_dev_server(dev_server.id) + .await?; + self.peer + .send(connection_id, proto::DevServerInstructions { projects })?; + + let status = self + .app_state + .db + .dev_server_projects_update(dev_server.user_id) + .await?; + send_dev_server_projects_update(dev_server.user_id, status, &session).await; + } + } + + Ok(()) + } + + pub async fn invite_code_redeemed( + self: &Arc, + inviter_id: UserId, + invitee_id: UserId, + ) -> Result<()> { + if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? { + if let Some(code) = &user.invite_code { + let pool = self.connection_pool.lock(); + let invitee_contact = contact_for_user(invitee_id, false, &pool); + for connection_id in pool.user_connection_ids(inviter_id) { + self.peer.send( + connection_id, + proto::UpdateContacts { + contacts: vec![invitee_contact.clone()], + ..Default::default() + }, + )?; + self.peer.send( + connection_id, + proto::UpdateInviteInfo { + url: format!("{}{}", self.app_state.config.invite_link_prefix, &code), + count: user.invite_count as u32, + }, + )?; + } + } + } + Ok(()) + } + + pub async fn invite_count_updated(self: &Arc, user_id: UserId) -> Result<()> { + if let Some(user) = self.app_state.db.get_user_by_id(user_id).await? { + if let Some(invite_code) = &user.invite_code { + let pool = self.connection_pool.lock(); + for connection_id in pool.user_connection_ids(user_id) { + self.peer.send( + connection_id, + proto::UpdateInviteInfo { + url: format!( + "{}{}", + self.app_state.config.invite_link_prefix, invite_code + ), + count: user.invite_count as u32, + }, + )?; + } + } + } + Ok(()) + } + + pub async fn snapshot<'a>(self: &'a Arc) -> ServerSnapshot<'a> { + ServerSnapshot { + connection_pool: ConnectionPoolGuard { + guard: self.connection_pool.lock(), + _not_send: PhantomData, + }, + peer: &self.peer, + } + } +} + +impl<'a> Deref for ConnectionPoolGuard<'a> { + type Target = ConnectionPool; + + fn deref(&self) -> &Self::Target { + &self.guard + } +} + +impl<'a> DerefMut for ConnectionPoolGuard<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.guard + } +} + +impl<'a> Drop for ConnectionPoolGuard<'a> { + fn drop(&mut self) { + #[cfg(test)] + self.check_invariants(); + } +} + +fn broadcast( + sender_id: Option, + receiver_ids: impl IntoIterator, + mut f: F, +) where + F: FnMut(ConnectionId) -> anyhow::Result<()>, +{ + for receiver_id in receiver_ids { + if Some(receiver_id) != sender_id { + if let Err(error) = f(receiver_id) { + tracing::error!("failed to send to {:?} {}", receiver_id, error); + } + } + } +} + +pub struct ProtocolVersion(u32); + +impl Header for ProtocolVersion { + fn name() -> &'static HeaderName { + static ZED_PROTOCOL_VERSION: OnceLock = OnceLock::new(); + ZED_PROTOCOL_VERSION.get_or_init(|| HeaderName::from_static("x-zed-protocol-version")) + } + + fn decode<'i, I>(values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + let version = values + .next() + .ok_or_else(axum::headers::Error::invalid)? + .to_str() + .map_err(|_| axum::headers::Error::invalid())? + .parse() + .map_err(|_| axum::headers::Error::invalid())?; + Ok(Self(version)) + } + + fn encode>(&self, values: &mut E) { + values.extend([self.0.to_string().parse().unwrap()]); + } +} + +pub struct AppVersionHeader(SemanticVersion); +impl Header for AppVersionHeader { + fn name() -> &'static HeaderName { + static ZED_APP_VERSION: OnceLock = OnceLock::new(); + ZED_APP_VERSION.get_or_init(|| HeaderName::from_static("x-zed-app-version")) + } + + fn decode<'i, I>(values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + let version = values + .next() + .ok_or_else(axum::headers::Error::invalid)? + .to_str() + .map_err(|_| axum::headers::Error::invalid())? + .parse() + .map_err(|_| axum::headers::Error::invalid())?; + Ok(Self(version)) + } + + fn encode>(&self, values: &mut E) { + values.extend([self.0.to_string().parse().unwrap()]); + } +} + +pub fn routes(server: Arc) -> Router<(), Body> { + Router::new() + .route("/rpc", get(handle_websocket_request)) + .layer( + ServiceBuilder::new() + .layer(Extension(server.app_state.clone())) + .layer(middleware::from_fn(auth::validate_header)), + ) + .route("/metrics", get(handle_metrics)) + .layer(Extension(server)) +} + +pub async fn handle_websocket_request( + TypedHeader(ProtocolVersion(protocol_version)): TypedHeader, + app_version_header: Option>, + ConnectInfo(socket_address): ConnectInfo, + Extension(server): Extension>, + Extension(principal): Extension, + ws: WebSocketUpgrade, +) -> axum::response::Response { + if protocol_version != rpc::PROTOCOL_VERSION { + return ( + StatusCode::UPGRADE_REQUIRED, + "client must be upgraded".to_string(), + ) + .into_response(); + } + + let Some(version) = app_version_header.map(|header| ZedVersion(header.0 .0)) else { + return ( + StatusCode::UPGRADE_REQUIRED, + "no version header found".to_string(), + ) + .into_response(); + }; + + if !version.can_collaborate() { + return ( + StatusCode::UPGRADE_REQUIRED, + "client must be upgraded".to_string(), + ) + .into_response(); + } + + let socket_address = socket_address.to_string(); + ws.on_upgrade(move |socket| { + let socket = socket + .map_ok(to_tungstenite_message) + .err_into() + .with(|message| async move { Ok(to_axum_message(message)) }); + let connection = Connection::new(Box::pin(socket)); + async move { + server + .handle_connection( + connection, + socket_address, + principal, + version, + None, + Executor::Production, + ) + .await; + } + }) +} + +pub async fn handle_metrics(Extension(server): Extension>) -> Result { + static CONNECTIONS_METRIC: OnceLock = OnceLock::new(); + let connections_metric = CONNECTIONS_METRIC + .get_or_init(|| register_int_gauge!("connections", "number of connections").unwrap()); + + let connections = server + .connection_pool + .lock() + .connections() + .filter(|connection| !connection.admin) + .count(); + connections_metric.set(connections as _); + + static SHARED_PROJECTS_METRIC: OnceLock = OnceLock::new(); + let shared_projects_metric = SHARED_PROJECTS_METRIC.get_or_init(|| { + register_int_gauge!( + "shared_projects", + "number of open projects with one or more guests" + ) + .unwrap() + }); + + let shared_projects = server.app_state.db.project_count_excluding_admins().await?; + shared_projects_metric.set(shared_projects as _); + + let encoder = prometheus::TextEncoder::new(); + let metric_families = prometheus::gather(); + let encoded_metrics = encoder + .encode_to_string(&metric_families) + .map_err(|err| anyhow!("{}", err))?; + Ok(encoded_metrics) +} + +#[instrument(err, skip(executor))] +async fn connection_lost( + session: Session, + mut teardown: watch::Receiver, + executor: Executor, +) -> Result<()> { + session.peer.disconnect(session.connection_id); + session + .connection_pool() + .await + .remove_connection(session.connection_id)?; + + session + .db() + .await + .connection_lost(session.connection_id) + .await + .trace_err(); + + futures::select_biased! { + _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => { + match &session.principal { + Principal::User(_) | Principal::Impersonated{ user: _, admin:_ } => { + let session = session.for_user().unwrap(); + + log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id(), session.connection_id); + leave_room_for_session(&session, session.connection_id).await.trace_err(); + leave_channel_buffers_for_session(&session) + .await + .trace_err(); + + if !session + .connection_pool() + .await + .is_user_online(session.user_id()) + { + let db = session.db().await; + if let Some(room) = db.decline_call(None, session.user_id()).await.trace_err().flatten() { + room_updated(&room, &session.peer); + } + } + + update_user_contacts(session.user_id(), &session).await?; + }, + Principal::DevServer(_) => { + lost_dev_server_connection(&session.for_dev_server().unwrap()).await?; + }, + } + }, + _ = teardown.changed().fuse() => {} + } + + Ok(()) +} + +/// Acknowledges a ping from a client, used to keep the connection alive. +async fn ping(_: proto::Ping, response: Response, _session: Session) -> Result<()> { + response.send(proto::Ack {})?; + Ok(()) +} + +/// Creates a new room for calling (outside of channels) +async fn create_room( + _request: proto::CreateRoom, + response: Response, + session: UserSession, +) -> Result<()> { + let live_kit_room = nanoid::nanoid!(30); + + let live_kit_connection_info = util::maybe!(async { + let live_kit = session.live_kit_client.as_ref(); + let live_kit = live_kit?; + let user_id = session.user_id().to_string(); + + let token = live_kit + .room_token(&live_kit_room, &user_id.to_string()) + .trace_err()?; + + Some(proto::LiveKitConnectionInfo { + server_url: live_kit.url().into(), + token, + can_publish: true, + }) + }) + .await; + + let room = session + .db() + .await + .create_room(session.user_id(), session.connection_id, &live_kit_room) + .await?; + + response.send(proto::CreateRoomResponse { + room: Some(room.clone()), + live_kit_connection_info, + })?; + + update_user_contacts(session.user_id(), &session).await?; + Ok(()) +} + +/// Join a room from an invitation. Equivalent to joining a channel if there is one. +async fn join_room( + request: proto::JoinRoom, + response: Response, + session: UserSession, +) -> Result<()> { + let room_id = RoomId::from_proto(request.id); + + let channel_id = session.db().await.channel_id_for_room(room_id).await?; + + if let Some(channel_id) = channel_id { + return join_channel_internal(channel_id, Box::new(response), session).await; + } + + let joined_room = { + let room = session + .db() + .await + .join_room(room_id, session.user_id(), session.connection_id) + .await?; + room_updated(&room.room, &session.peer); + room.into_inner() + }; + + for connection_id in session + .connection_pool() + .await + .user_connection_ids(session.user_id()) + { + session + .peer + .send( + connection_id, + proto::CallCanceled { + room_id: room_id.to_proto(), + }, + ) + .trace_err(); + } + + let live_kit_connection_info = if let Some(live_kit) = session.live_kit_client.as_ref() { + if let Some(token) = live_kit + .room_token( + &joined_room.room.live_kit_room, + &session.user_id().to_string(), + ) + .trace_err() + { + Some(proto::LiveKitConnectionInfo { + server_url: live_kit.url().into(), + token, + can_publish: true, + }) + } else { + None + } + } else { + None + }; + + response.send(proto::JoinRoomResponse { + room: Some(joined_room.room), + channel_id: None, + live_kit_connection_info, + })?; + + update_user_contacts(session.user_id(), &session).await?; + Ok(()) +} + +/// Rejoin room is used to reconnect to a room after connection errors. +async fn rejoin_room( + request: proto::RejoinRoom, + response: Response, + session: UserSession, +) -> Result<()> { + let room; + let channel; + { + let mut rejoined_room = session + .db() + .await + .rejoin_room(request, session.user_id(), session.connection_id) + .await?; + + response.send(proto::RejoinRoomResponse { + room: Some(rejoined_room.room.clone()), + reshared_projects: rejoined_room + .reshared_projects + .iter() + .map(|project| proto::ResharedProject { + id: project.id.to_proto(), + collaborators: project + .collaborators + .iter() + .map(|collaborator| collaborator.to_proto()) + .collect(), + }) + .collect(), + rejoined_projects: rejoined_room + .rejoined_projects + .iter() + .map(|rejoined_project| rejoined_project.to_proto()) + .collect(), + })?; + room_updated(&rejoined_room.room, &session.peer); + + for project in &rejoined_room.reshared_projects { + for collaborator in &project.collaborators { + session + .peer + .send( + collaborator.connection_id, + proto::UpdateProjectCollaborator { + project_id: project.id.to_proto(), + old_peer_id: Some(project.old_connection_id.into()), + new_peer_id: Some(session.connection_id.into()), + }, + ) + .trace_err(); + } + + broadcast( + Some(session.connection_id), + project + .collaborators + .iter() + .map(|collaborator| collaborator.connection_id), + |connection_id| { + session.peer.forward_send( + session.connection_id, + connection_id, + proto::UpdateProject { + project_id: project.id.to_proto(), + worktrees: project.worktrees.clone(), + }, + ) + }, + ); + } + + notify_rejoined_projects(&mut rejoined_room.rejoined_projects, &session)?; + + let rejoined_room = rejoined_room.into_inner(); + + room = rejoined_room.room; + channel = rejoined_room.channel; + } + + if let Some(channel) = channel { + channel_updated( + &channel, + &room, + &session.peer, + &*session.connection_pool().await, + ); + } + + update_user_contacts(session.user_id(), &session).await?; + Ok(()) +} + +fn notify_rejoined_projects( + rejoined_projects: &mut Vec, + session: &UserSession, +) -> Result<()> { + for project in rejoined_projects.iter() { + for collaborator in &project.collaborators { + session + .peer + .send( + collaborator.connection_id, + proto::UpdateProjectCollaborator { + project_id: project.id.to_proto(), + old_peer_id: Some(project.old_connection_id.into()), + new_peer_id: Some(session.connection_id.into()), + }, + ) + .trace_err(); + } + } + + for project in rejoined_projects { + for worktree in mem::take(&mut project.worktrees) { + #[cfg(any(test, feature = "test-support"))] + const MAX_CHUNK_SIZE: usize = 2; + #[cfg(not(any(test, feature = "test-support")))] + const MAX_CHUNK_SIZE: usize = 256; + + // Stream this worktree's entries. + let message = proto::UpdateWorktree { + project_id: project.id.to_proto(), + worktree_id: worktree.id, + abs_path: worktree.abs_path.clone(), + root_name: worktree.root_name, + updated_entries: worktree.updated_entries, + removed_entries: worktree.removed_entries, + scan_id: worktree.scan_id, + is_last_update: worktree.completed_scan_id == worktree.scan_id, + updated_repositories: worktree.updated_repositories, + removed_repositories: worktree.removed_repositories, + }; + for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) { + session.peer.send(session.connection_id, update.clone())?; + } + + // Stream this worktree's diagnostics. + for summary in worktree.diagnostic_summaries { + session.peer.send( + session.connection_id, + proto::UpdateDiagnosticSummary { + project_id: project.id.to_proto(), + worktree_id: worktree.id, + summary: Some(summary), + }, + )?; + } + + for settings_file in worktree.settings_files { + session.peer.send( + session.connection_id, + proto::UpdateWorktreeSettings { + project_id: project.id.to_proto(), + worktree_id: worktree.id, + path: settings_file.path, + content: Some(settings_file.content), + }, + )?; + } + } + + for language_server in &project.language_servers { + session.peer.send( + session.connection_id, + proto::UpdateLanguageServer { + project_id: project.id.to_proto(), + language_server_id: language_server.id, + variant: Some( + proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated( + proto::LspDiskBasedDiagnosticsUpdated {}, + ), + ), + }, + )?; + } + } + Ok(()) +} + +/// leave room disconnects from the room. +async fn leave_room( + _: proto::LeaveRoom, + response: Response, + session: UserSession, +) -> Result<()> { + leave_room_for_session(&session, session.connection_id).await?; + response.send(proto::Ack {})?; + Ok(()) +} + +/// Updates the permissions of someone else in the room. +async fn set_room_participant_role( + request: proto::SetRoomParticipantRole, + response: Response, + session: UserSession, +) -> Result<()> { + let user_id = UserId::from_proto(request.user_id); + let role = ChannelRole::from(request.role()); + + let (live_kit_room, can_publish) = { + let room = session + .db() + .await + .set_room_participant_role( + session.user_id(), + RoomId::from_proto(request.room_id), + user_id, + role, + ) + .await?; + + let live_kit_room = room.live_kit_room.clone(); + let can_publish = ChannelRole::from(request.role()).can_use_microphone(); + room_updated(&room, &session.peer); + (live_kit_room, can_publish) + }; + + if let Some(live_kit) = session.live_kit_client.as_ref() { + live_kit + .update_participant( + live_kit_room.clone(), + request.user_id.to_string(), + live_kit_server::proto::ParticipantPermission { + can_subscribe: true, + can_publish, + can_publish_data: can_publish, + hidden: false, + recorder: false, + }, + ) + .await + .trace_err(); + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Call someone else into the current room +async fn call( + request: proto::Call, + response: Response, + session: UserSession, +) -> Result<()> { + let room_id = RoomId::from_proto(request.room_id); + let calling_user_id = session.user_id(); + let calling_connection_id = session.connection_id; + let called_user_id = UserId::from_proto(request.called_user_id); + let initial_project_id = request.initial_project_id.map(ProjectId::from_proto); + if !session + .db() + .await + .has_contact(calling_user_id, called_user_id) + .await? + { + return Err(anyhow!("cannot call a user who isn't a contact"))?; + } + + let incoming_call = { + let (room, incoming_call) = &mut *session + .db() + .await + .call( + room_id, + calling_user_id, + calling_connection_id, + called_user_id, + initial_project_id, + ) + .await?; + room_updated(&room, &session.peer); + mem::take(incoming_call) + }; + update_user_contacts(called_user_id, &session).await?; + + let mut calls = session + .connection_pool() + .await + .user_connection_ids(called_user_id) + .map(|connection_id| session.peer.request(connection_id, incoming_call.clone())) + .collect::>(); + + while let Some(call_response) = calls.next().await { + match call_response.as_ref() { + Ok(_) => { + response.send(proto::Ack {})?; + return Ok(()); + } + Err(_) => { + call_response.trace_err(); + } + } + } + + { + let room = session + .db() + .await + .call_failed(room_id, called_user_id) + .await?; + room_updated(&room, &session.peer); + } + update_user_contacts(called_user_id, &session).await?; + + Err(anyhow!("failed to ring user"))? +} + +/// Cancel an outgoing call. +async fn cancel_call( + request: proto::CancelCall, + response: Response, + session: UserSession, +) -> Result<()> { + let called_user_id = UserId::from_proto(request.called_user_id); + let room_id = RoomId::from_proto(request.room_id); + { + let room = session + .db() + .await + .cancel_call(room_id, session.connection_id, called_user_id) + .await?; + room_updated(&room, &session.peer); + } + + for connection_id in session + .connection_pool() + .await + .user_connection_ids(called_user_id) + { + session + .peer + .send( + connection_id, + proto::CallCanceled { + room_id: room_id.to_proto(), + }, + ) + .trace_err(); + } + response.send(proto::Ack {})?; + + update_user_contacts(called_user_id, &session).await?; + Ok(()) +} + +/// Decline an incoming call. +async fn decline_call(message: proto::DeclineCall, session: UserSession) -> Result<()> { + let room_id = RoomId::from_proto(message.room_id); + { + let room = session + .db() + .await + .decline_call(Some(room_id), session.user_id()) + .await? + .ok_or_else(|| anyhow!("failed to decline call"))?; + room_updated(&room, &session.peer); + } + + for connection_id in session + .connection_pool() + .await + .user_connection_ids(session.user_id()) + { + session + .peer + .send( + connection_id, + proto::CallCanceled { + room_id: room_id.to_proto(), + }, + ) + .trace_err(); + } + update_user_contacts(session.user_id(), &session).await?; + Ok(()) +} + +/// Updates other participants in the room with your current location. +async fn update_participant_location( + request: proto::UpdateParticipantLocation, + response: Response, + session: UserSession, +) -> Result<()> { + let room_id = RoomId::from_proto(request.room_id); + let location = request + .location + .ok_or_else(|| anyhow!("invalid location"))?; + + let db = session.db().await; + let room = db + .update_room_participant_location(room_id, session.connection_id, location) + .await?; + + room_updated(&room, &session.peer); + response.send(proto::Ack {})?; + Ok(()) +} + +/// Share a project into the room. +async fn share_project( + request: proto::ShareProject, + response: Response, + session: UserSession, +) -> Result<()> { + let (project_id, room) = &*session + .db() + .await + .share_project( + RoomId::from_proto(request.room_id), + session.connection_id, + &request.worktrees, + request + .dev_server_project_id + .map(|id| DevServerProjectId::from_proto(id)), + ) + .await?; + response.send(proto::ShareProjectResponse { + project_id: project_id.to_proto(), + })?; + room_updated(&room, &session.peer); + + Ok(()) +} + +/// Unshare a project from the room. +async fn unshare_project(message: proto::UnshareProject, session: Session) -> Result<()> { + let project_id = ProjectId::from_proto(message.project_id); + unshare_project_internal( + project_id, + session.connection_id, + session.user_id(), + &session, + ) + .await +} + +async fn unshare_project_internal( + project_id: ProjectId, + connection_id: ConnectionId, + user_id: Option, + session: &Session, +) -> Result<()> { + let delete = { + let room_guard = session + .db() + .await + .unshare_project(project_id, connection_id, user_id) + .await?; + + let (delete, room, guest_connection_ids) = &*room_guard; + + let message = proto::UnshareProject { + project_id: project_id.to_proto(), + }; + + broadcast( + Some(connection_id), + guest_connection_ids.iter().copied(), + |conn_id| session.peer.send(conn_id, message.clone()), + ); + if let Some(room) = room { + room_updated(room, &session.peer); + } + + *delete + }; + + if delete { + let db = session.db().await; + db.delete_project(project_id).await?; + } + + Ok(()) +} + +/// DevServer makes a project available online +async fn share_dev_server_project( + request: proto::ShareDevServerProject, + response: Response, + session: DevServerSession, +) -> Result<()> { + let (dev_server_project, user_id, status) = session + .db() + .await + .share_dev_server_project( + DevServerProjectId::from_proto(request.dev_server_project_id), + session.dev_server_id(), + session.connection_id, + &request.worktrees, + ) + .await?; + let Some(project_id) = dev_server_project.project_id else { + return Err(anyhow!("failed to share remote project"))?; + }; + + send_dev_server_projects_update(user_id, status, &session).await; + + response.send(proto::ShareProjectResponse { project_id })?; + + Ok(()) +} + +/// Join someone elses shared project. +async fn join_project( + request: proto::JoinProject, + response: Response, + session: UserSession, +) -> Result<()> { + let project_id = ProjectId::from_proto(request.project_id); + + tracing::info!(%project_id, "join project"); + + let db = session.db().await; + let (project, replica_id) = &mut *db + .join_project(project_id, session.connection_id, session.user_id()) + .await?; + drop(db); + tracing::info!(%project_id, "join remote project"); + join_project_internal(response, session, project, replica_id) +} + +trait JoinProjectInternalResponse { + fn send(self, result: proto::JoinProjectResponse) -> Result<()>; +} +impl JoinProjectInternalResponse for Response { + fn send(self, result: proto::JoinProjectResponse) -> Result<()> { + Response::::send(self, result) + } +} +impl JoinProjectInternalResponse for Response { + fn send(self, result: proto::JoinProjectResponse) -> Result<()> { + Response::::send(self, result) + } +} + +fn join_project_internal( + response: impl JoinProjectInternalResponse, + session: UserSession, + project: &mut Project, + replica_id: &ReplicaId, +) -> Result<()> { + let collaborators = project + .collaborators + .iter() + .filter(|collaborator| collaborator.connection_id != session.connection_id) + .map(|collaborator| collaborator.to_proto()) + .collect::>(); + let project_id = project.id; + let guest_user_id = session.user_id(); + + let worktrees = project + .worktrees + .iter() + .map(|(id, worktree)| proto::WorktreeMetadata { + id: *id, + root_name: worktree.root_name.clone(), + visible: worktree.visible, + abs_path: worktree.abs_path.clone(), + }) + .collect::>(); + + let add_project_collaborator = proto::AddProjectCollaborator { + project_id: project_id.to_proto(), + collaborator: Some(proto::Collaborator { + peer_id: Some(session.connection_id.into()), + replica_id: replica_id.0 as u32, + user_id: guest_user_id.to_proto(), + }), + }; + + for collaborator in &collaborators { + session + .peer + .send( + collaborator.peer_id.unwrap().into(), + add_project_collaborator.clone(), + ) + .trace_err(); + } + + // First, we send the metadata associated with each worktree. + response.send(proto::JoinProjectResponse { + project_id: project.id.0 as u64, + worktrees: worktrees.clone(), + replica_id: replica_id.0 as u32, + collaborators: collaborators.clone(), + language_servers: project.language_servers.clone(), + role: project.role.into(), + dev_server_project_id: project + .dev_server_project_id + .map(|dev_server_project_id| dev_server_project_id.0 as u64), + })?; + + for (worktree_id, worktree) in mem::take(&mut project.worktrees) { + #[cfg(any(test, feature = "test-support"))] + const MAX_CHUNK_SIZE: usize = 2; + #[cfg(not(any(test, feature = "test-support")))] + const MAX_CHUNK_SIZE: usize = 256; + + // Stream this worktree's entries. + let message = proto::UpdateWorktree { + project_id: project_id.to_proto(), + worktree_id, + abs_path: worktree.abs_path.clone(), + root_name: worktree.root_name, + updated_entries: worktree.entries, + removed_entries: Default::default(), + scan_id: worktree.scan_id, + is_last_update: worktree.scan_id == worktree.completed_scan_id, + updated_repositories: worktree.repository_entries.into_values().collect(), + removed_repositories: Default::default(), + }; + for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) { + session.peer.send(session.connection_id, update.clone())?; + } + + // Stream this worktree's diagnostics. + for summary in worktree.diagnostic_summaries { + session.peer.send( + session.connection_id, + proto::UpdateDiagnosticSummary { + project_id: project_id.to_proto(), + worktree_id: worktree.id, + summary: Some(summary), + }, + )?; + } + + for settings_file in worktree.settings_files { + session.peer.send( + session.connection_id, + proto::UpdateWorktreeSettings { + project_id: project_id.to_proto(), + worktree_id: worktree.id, + path: settings_file.path, + content: Some(settings_file.content), + }, + )?; + } + } + + for language_server in &project.language_servers { + session.peer.send( + session.connection_id, + proto::UpdateLanguageServer { + project_id: project_id.to_proto(), + language_server_id: language_server.id, + variant: Some( + proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated( + proto::LspDiskBasedDiagnosticsUpdated {}, + ), + ), + }, + )?; + } + + Ok(()) +} + +/// Leave someone elses shared project. +async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Result<()> { + let sender_id = session.connection_id; + let project_id = ProjectId::from_proto(request.project_id); + let db = session.db().await; + if db.is_hosted_project(project_id).await? { + let project = db.leave_hosted_project(project_id, sender_id).await?; + project_left(&project, &session); + return Ok(()); + } + + let (room, project) = &*db.leave_project(project_id, sender_id).await?; + tracing::info!( + %project_id, + "leave project" + ); + + project_left(&project, &session); + if let Some(room) = room { + room_updated(&room, &session.peer); + } + + Ok(()) +} + +async fn join_hosted_project( + request: proto::JoinHostedProject, + response: Response, + session: UserSession, +) -> Result<()> { + let (mut project, replica_id) = session + .db() + .await + .join_hosted_project( + ProjectId(request.project_id as i32), + session.user_id(), + session.connection_id, + ) + .await?; + + join_project_internal(response, session, &mut project, &replica_id) +} + +async fn create_dev_server_project( + request: proto::CreateDevServerProject, + response: Response, + session: UserSession, +) -> Result<()> { + let dev_server_id = DevServerId(request.dev_server_id as i32); + let dev_server_connection_id = session + .connection_pool() + .await + .dev_server_connection_id(dev_server_id); + let Some(dev_server_connection_id) = dev_server_connection_id else { + Err(ErrorCode::DevServerOffline + .message("Cannot create a remote project when the dev server is offline".to_string()) + .anyhow())? + }; + + let path = request.path.clone(); + //Check that the path exists on the dev server + session + .peer + .forward_request( + session.connection_id, + dev_server_connection_id, + proto::ValidateDevServerProjectRequest { path: path.clone() }, + ) + .await?; + + let (dev_server_project, update) = session + .db() + .await + .create_dev_server_project( + DevServerId(request.dev_server_id as i32), + &request.path, + session.user_id(), + ) + .await?; + + let projects = session + .db() + .await + .get_projects_for_dev_server(dev_server_project.dev_server_id) + .await?; + + session.peer.send( + dev_server_connection_id, + proto::DevServerInstructions { projects }, + )?; + + send_dev_server_projects_update(session.user_id(), update, &session).await; + + response.send(proto::CreateDevServerProjectResponse { + dev_server_project: Some(dev_server_project.to_proto(None)), + })?; + Ok(()) +} + +async fn create_dev_server( + request: proto::CreateDevServer, + response: Response, + session: UserSession, +) -> Result<()> { + let access_token = auth::random_token(); + let hashed_access_token = auth::hash_access_token(&access_token); + + if request.name.is_empty() { + return Err(proto::ErrorCode::Forbidden + .message("Dev server name cannot be empty".to_string()) + .anyhow())?; + } + + let (dev_server, status) = session + .db() + .await + .create_dev_server( + &request.name, + request.ssh_connection_string.as_deref(), + &hashed_access_token, + session.user_id(), + ) + .await?; + + send_dev_server_projects_update(session.user_id(), status, &session).await; + + response.send(proto::CreateDevServerResponse { + dev_server_id: dev_server.id.0 as u64, + access_token: auth::generate_dev_server_token(dev_server.id.0 as usize, access_token), + name: request.name, + })?; + Ok(()) +} + +async fn regenerate_dev_server_token( + request: proto::RegenerateDevServerToken, + response: Response, + session: UserSession, +) -> Result<()> { + let dev_server_id = DevServerId(request.dev_server_id as i32); + let access_token = auth::random_token(); + let hashed_access_token = auth::hash_access_token(&access_token); + + let connection_id = session + .connection_pool() + .await + .dev_server_connection_id(dev_server_id); + if let Some(connection_id) = connection_id { + shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?; + session + .peer + .send(connection_id, proto::ShutdownDevServer {})?; + let _ = remove_dev_server_connection(dev_server_id, &session).await; + } + + let status = session + .db() + .await + .update_dev_server_token(dev_server_id, &hashed_access_token, session.user_id()) + .await?; + + send_dev_server_projects_update(session.user_id(), status, &session).await; + + response.send(proto::RegenerateDevServerTokenResponse { + dev_server_id: dev_server_id.to_proto(), + access_token: auth::generate_dev_server_token(dev_server_id.0 as usize, access_token), + })?; + Ok(()) +} + +async fn rename_dev_server( + request: proto::RenameDevServer, + response: Response, + session: UserSession, +) -> Result<()> { + if request.name.trim().is_empty() { + return Err(proto::ErrorCode::Forbidden + .message("Dev server name cannot be empty".to_string()) + .anyhow())?; + } + + let dev_server_id = DevServerId(request.dev_server_id as i32); + let dev_server = session.db().await.get_dev_server(dev_server_id).await?; + if dev_server.user_id != session.user_id() { + return Err(anyhow!(ErrorCode::Forbidden))?; + } + + let status = session + .db() + .await + .rename_dev_server(dev_server_id, &request.name, session.user_id()) + .await?; + + send_dev_server_projects_update(session.user_id(), status, &session).await; + + response.send(proto::Ack {})?; + Ok(()) +} + +async fn delete_dev_server( + request: proto::DeleteDevServer, + response: Response, + session: UserSession, +) -> Result<()> { + let dev_server_id = DevServerId(request.dev_server_id as i32); + let dev_server = session.db().await.get_dev_server(dev_server_id).await?; + if dev_server.user_id != session.user_id() { + return Err(anyhow!(ErrorCode::Forbidden))?; + } + + let connection_id = session + .connection_pool() + .await + .dev_server_connection_id(dev_server_id); + if let Some(connection_id) = connection_id { + shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?; + session + .peer + .send(connection_id, proto::ShutdownDevServer {})?; + let _ = remove_dev_server_connection(dev_server_id, &session).await; + } + + let status = session + .db() + .await + .delete_dev_server(dev_server_id, session.user_id()) + .await?; + + send_dev_server_projects_update(session.user_id(), status, &session).await; + + response.send(proto::Ack {})?; + Ok(()) +} + +async fn delete_dev_server_project( + request: proto::DeleteDevServerProject, + response: Response, + session: UserSession, +) -> Result<()> { + let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32); + let dev_server_project = session + .db() + .await + .get_dev_server_project(dev_server_project_id) + .await?; + + let dev_server = session + .db() + .await + .get_dev_server(dev_server_project.dev_server_id) + .await?; + if dev_server.user_id != session.user_id() { + return Err(anyhow!(ErrorCode::Forbidden))?; + } + + let dev_server_connection_id = session + .connection_pool() + .await + .dev_server_connection_id(dev_server.id); + + if let Some(dev_server_connection_id) = dev_server_connection_id { + let project = session + .db() + .await + .find_dev_server_project(dev_server_project_id) + .await; + if let Ok(project) = project { + unshare_project_internal( + project.id, + dev_server_connection_id, + Some(session.user_id()), + &session, + ) + .await?; + } + } + + let (projects, status) = session + .db() + .await + .delete_dev_server_project(dev_server_project_id, dev_server.id, session.user_id()) + .await?; + + if let Some(dev_server_connection_id) = dev_server_connection_id { + session.peer.send( + dev_server_connection_id, + proto::DevServerInstructions { projects }, + )?; + } + + send_dev_server_projects_update(session.user_id(), status, &session).await; + + response.send(proto::Ack {})?; + Ok(()) +} + +async fn rejoin_dev_server_projects( + request: proto::RejoinRemoteProjects, + response: Response, + session: UserSession, +) -> Result<()> { + let mut rejoined_projects = { + let db = session.db().await; + db.rejoin_dev_server_projects( + &request.rejoined_projects, + session.user_id(), + session.0.connection_id, + ) + .await? + }; + notify_rejoined_projects(&mut rejoined_projects, &session)?; + + response.send(proto::RejoinRemoteProjectsResponse { + rejoined_projects: rejoined_projects + .into_iter() + .map(|project| project.to_proto()) + .collect(), + }) +} + +async fn reconnect_dev_server( + request: proto::ReconnectDevServer, + response: Response, + session: DevServerSession, +) -> Result<()> { + let reshared_projects = { + let db = session.db().await; + db.reshare_dev_server_projects( + &request.reshared_projects, + session.dev_server_id(), + session.0.connection_id, + ) + .await? + }; + + for project in &reshared_projects { + for collaborator in &project.collaborators { + session + .peer + .send( + collaborator.connection_id, + proto::UpdateProjectCollaborator { + project_id: project.id.to_proto(), + old_peer_id: Some(project.old_connection_id.into()), + new_peer_id: Some(session.connection_id.into()), + }, + ) + .trace_err(); + } + + broadcast( + Some(session.connection_id), + project + .collaborators + .iter() + .map(|collaborator| collaborator.connection_id), + |connection_id| { + session.peer.forward_send( + session.connection_id, + connection_id, + proto::UpdateProject { + project_id: project.id.to_proto(), + worktrees: project.worktrees.clone(), + }, + ) + }, + ); + } + + response.send(proto::ReconnectDevServerResponse { + reshared_projects: reshared_projects + .iter() + .map(|project| proto::ResharedProject { + id: project.id.to_proto(), + collaborators: project + .collaborators + .iter() + .map(|collaborator| collaborator.to_proto()) + .collect(), + }) + .collect(), + })?; + + Ok(()) +} + +async fn shutdown_dev_server( + _: proto::ShutdownDevServer, + response: Response, + session: DevServerSession, +) -> Result<()> { + response.send(proto::Ack {})?; + shutdown_dev_server_internal(session.dev_server_id(), session.connection_id, &session).await?; + remove_dev_server_connection(session.dev_server_id(), &session).await +} + +async fn shutdown_dev_server_internal( + dev_server_id: DevServerId, + connection_id: ConnectionId, + session: &Session, +) -> Result<()> { + let (dev_server_projects, dev_server) = { + let db = session.db().await; + let dev_server_projects = db.get_projects_for_dev_server(dev_server_id).await?; + let dev_server = db.get_dev_server(dev_server_id).await?; + (dev_server_projects, dev_server) + }; + + for project_id in dev_server_projects.iter().filter_map(|p| p.project_id) { + unshare_project_internal( + ProjectId::from_proto(project_id), + connection_id, + None, + session, + ) + .await?; + } + + session + .connection_pool() + .await + .set_dev_server_offline(dev_server_id); + + let status = session + .db() + .await + .dev_server_projects_update(dev_server.user_id) + .await?; + send_dev_server_projects_update(dev_server.user_id, status, &session).await; + + Ok(()) +} + +async fn remove_dev_server_connection(dev_server_id: DevServerId, session: &Session) -> Result<()> { + let dev_server_connection = session + .connection_pool() + .await + .dev_server_connection_id(dev_server_id); + + if let Some(dev_server_connection) = dev_server_connection { + session + .connection_pool() + .await + .remove_connection(dev_server_connection)?; + } + Ok(()) +} + +/// Updates other participants with changes to the project +async fn update_project( + request: proto::UpdateProject, + response: Response, + session: Session, +) -> Result<()> { + let project_id = ProjectId::from_proto(request.project_id); + let (room, guest_connection_ids) = &*session + .db() + .await + .update_project(project_id, session.connection_id, &request.worktrees) + .await?; + broadcast( + Some(session.connection_id), + guest_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone()) + }, + ); + if let Some(room) = room { + room_updated(&room, &session.peer); + } + response.send(proto::Ack {})?; + + Ok(()) +} + +/// Updates other participants with changes to the worktree +async fn update_worktree( + request: proto::UpdateWorktree, + response: Response, + session: Session, +) -> Result<()> { + let guest_connection_ids = session + .db() + .await + .update_worktree(&request, session.connection_id) + .await?; + + broadcast( + Some(session.connection_id), + guest_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone()) + }, + ); + response.send(proto::Ack {})?; + Ok(()) +} + +/// Updates other participants with changes to the diagnostics +async fn update_diagnostic_summary( + message: proto::UpdateDiagnosticSummary, + session: Session, +) -> Result<()> { + let guest_connection_ids = session + .db() + .await + .update_diagnostic_summary(&message, session.connection_id) + .await?; + + broadcast( + Some(session.connection_id), + guest_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, message.clone()) + }, + ); + + Ok(()) +} + +/// Updates other participants with changes to the worktree settings +async fn update_worktree_settings( + message: proto::UpdateWorktreeSettings, + session: Session, +) -> Result<()> { + let guest_connection_ids = session + .db() + .await + .update_worktree_settings(&message, session.connection_id) + .await?; + + broadcast( + Some(session.connection_id), + guest_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, message.clone()) + }, + ); + + Ok(()) +} + +/// Notify other participants that a language server has started. +async fn start_language_server( + request: proto::StartLanguageServer, + session: Session, +) -> Result<()> { + let guest_connection_ids = session + .db() + .await + .start_language_server(&request, session.connection_id) + .await?; + + broadcast( + Some(session.connection_id), + guest_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone()) + }, + ); + Ok(()) +} + +/// Notify other participants that a language server has changed. +async fn update_language_server( + request: proto::UpdateLanguageServer, + session: Session, +) -> Result<()> { + let project_id = ProjectId::from_proto(request.project_id); + let project_connection_ids = session + .db() + .await + .project_connection_ids(project_id, session.connection_id, true) + .await?; + broadcast( + Some(session.connection_id), + project_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone()) + }, + ); + Ok(()) +} + +/// forward a project request to the host. These requests should be read only +/// as guests are allowed to send them. +async fn forward_read_only_project_request( + request: T, + response: Response, + session: UserSession, +) -> Result<()> +where + T: EntityMessage + RequestMessage, +{ + let project_id = ProjectId::from_proto(request.remote_entity_id()); + let host_connection_id = session + .db() + .await + .host_for_read_only_project_request(project_id, session.connection_id, session.user_id()) + .await?; + let payload = session + .peer + .forward_request(session.connection_id, host_connection_id, request) + .await?; + response.send(payload)?; + Ok(()) +} + +/// forward a project request to the host. These requests are disallowed +/// for guests. +async fn forward_mutating_project_request( + request: T, + response: Response, + session: UserSession, +) -> Result<()> +where + T: EntityMessage + RequestMessage, +{ + let project_id = ProjectId::from_proto(request.remote_entity_id()); + + let host_connection_id = session + .db() + .await + .host_for_mutating_project_request(project_id, session.connection_id, session.user_id()) + .await?; + let payload = session + .peer + .forward_request(session.connection_id, host_connection_id, request) + .await?; + response.send(payload)?; + Ok(()) +} + +/// forward a project request to the host. These requests are disallowed +/// for guests. +async fn forward_versioned_mutating_project_request( + request: T, + response: Response, + session: UserSession, +) -> Result<()> +where + T: EntityMessage + RequestMessage + VersionedMessage, +{ + let project_id = ProjectId::from_proto(request.remote_entity_id()); + + let host_connection_id = session + .db() + .await + .host_for_mutating_project_request(project_id, session.connection_id, session.user_id()) + .await?; + if let Some(host_version) = session + .connection_pool() + .await + .connection(host_connection_id) + .map(|c| c.zed_version) + { + if let Some(min_required_version) = request.required_host_version() { + if min_required_version > host_version { + return Err(anyhow!(ErrorCode::RemoteUpgradeRequired + .with_tag("required", &min_required_version.to_string())))?; + } + } + } + + let payload = session + .peer + .forward_request(session.connection_id, host_connection_id, request) + .await?; + response.send(payload)?; + Ok(()) +} + +/// Notify other participants that a new buffer has been created +async fn create_buffer_for_peer( + request: proto::CreateBufferForPeer, + session: Session, +) -> Result<()> { + session + .db() + .await + .check_user_is_project_host( + ProjectId::from_proto(request.project_id), + session.connection_id, + ) + .await?; + let peer_id = request.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?; + session + .peer + .forward_send(session.connection_id, peer_id.into(), request)?; + Ok(()) +} + +/// Notify other participants that a buffer has been updated. This is +/// allowed for guests as long as the update is limited to selections. +async fn update_buffer( + request: proto::UpdateBuffer, + response: Response, + session: Session, +) -> Result<()> { + let project_id = ProjectId::from_proto(request.project_id); + let mut capability = Capability::ReadOnly; + + for op in request.operations.iter() { + match op.variant { + None | Some(proto::operation::Variant::UpdateSelections(_)) => {} + Some(_) => capability = Capability::ReadWrite, + } + } + + let host = { + let guard = session + .db() + .await + .connections_for_buffer_update( + project_id, + session.principal_id(), + session.connection_id, + capability, + ) + .await?; + + let (host, guests) = &*guard; + + broadcast( + Some(session.connection_id), + guests.clone(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone()) + }, + ); + + *host + }; + + if host != session.connection_id { + session + .peer + .forward_request(session.connection_id, host, request.clone()) + .await?; + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Notify other participants that a project has been updated. +async fn broadcast_project_message_from_host>( + request: T, + session: Session, +) -> Result<()> { + let project_id = ProjectId::from_proto(request.remote_entity_id()); + let project_connection_ids = session + .db() + .await + .project_connection_ids(project_id, session.connection_id, false) + .await?; + + broadcast( + Some(session.connection_id), + project_connection_ids.iter().copied(), + |connection_id| { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone()) + }, + ); + Ok(()) +} + +/// Start following another user in a call. +async fn follow( + request: proto::Follow, + response: Response, + session: UserSession, +) -> Result<()> { + let room_id = RoomId::from_proto(request.room_id); + let project_id = request.project_id.map(ProjectId::from_proto); + let leader_id = request + .leader_id + .ok_or_else(|| anyhow!("invalid leader id"))? + .into(); + let follower_id = session.connection_id; + + session + .db() + .await + .check_room_participants(room_id, leader_id, session.connection_id) + .await?; + + let response_payload = session + .peer + .forward_request(session.connection_id, leader_id, request) + .await?; + response.send(response_payload)?; + + if let Some(project_id) = project_id { + let room = session + .db() + .await + .follow(room_id, project_id, leader_id, follower_id) + .await?; + room_updated(&room, &session.peer); + } + + Ok(()) +} + +/// Stop following another user in a call. +async fn unfollow(request: proto::Unfollow, session: UserSession) -> Result<()> { + let room_id = RoomId::from_proto(request.room_id); + let project_id = request.project_id.map(ProjectId::from_proto); + let leader_id = request + .leader_id + .ok_or_else(|| anyhow!("invalid leader id"))? + .into(); + let follower_id = session.connection_id; + + session + .db() + .await + .check_room_participants(room_id, leader_id, session.connection_id) + .await?; + + session + .peer + .forward_send(session.connection_id, leader_id, request)?; + + if let Some(project_id) = project_id { + let room = session + .db() + .await + .unfollow(room_id, project_id, leader_id, follower_id) + .await?; + room_updated(&room, &session.peer); + } + + Ok(()) +} + +/// Notify everyone following you of your current location. +async fn update_followers(request: proto::UpdateFollowers, session: UserSession) -> Result<()> { + let room_id = RoomId::from_proto(request.room_id); + let database = session.db.lock().await; + + let connection_ids = if let Some(project_id) = request.project_id { + let project_id = ProjectId::from_proto(project_id); + database + .project_connection_ids(project_id, session.connection_id, true) + .await? + } else { + database + .room_connection_ids(room_id, session.connection_id) + .await? + }; + + // For now, don't send view update messages back to that view's current leader. + let peer_id_to_omit = request.variant.as_ref().and_then(|variant| match variant { + proto::update_followers::Variant::UpdateView(payload) => payload.leader_id, + _ => None, + }); + + for connection_id in connection_ids.iter().cloned() { + if Some(connection_id.into()) != peer_id_to_omit && connection_id != session.connection_id { + session + .peer + .forward_send(session.connection_id, connection_id, request.clone())?; + } + } + Ok(()) +} + +/// Get public data about users. +async fn get_users( + request: proto::GetUsers, + response: Response, + session: Session, +) -> Result<()> { + let user_ids = request + .user_ids + .into_iter() + .map(UserId::from_proto) + .collect(); + let users = session + .db() + .await + .get_users_by_ids(user_ids) + .await? + .into_iter() + .map(|user| proto::User { + id: user.id.to_proto(), + avatar_url: format!("https://github.com/{}.png?size=128", user.github_login), + github_login: user.github_login, + }) + .collect(); + response.send(proto::UsersResponse { users })?; + Ok(()) +} + +/// Search for users (to invite) buy Github login +async fn fuzzy_search_users( + request: proto::FuzzySearchUsers, + response: Response, + session: UserSession, +) -> Result<()> { + let query = request.query; + let users = match query.len() { + 0 => vec![], + 1 | 2 => session + .db() + .await + .get_user_by_github_login(&query) + .await? + .into_iter() + .collect(), + _ => session.db().await.fuzzy_search_users(&query, 10).await?, + }; + let users = users + .into_iter() + .filter(|user| user.id != session.user_id()) + .map(|user| proto::User { + id: user.id.to_proto(), + avatar_url: format!("https://github.com/{}.png?size=128", user.github_login), + github_login: user.github_login, + }) + .collect(); + response.send(proto::UsersResponse { users })?; + Ok(()) +} + +/// Send a contact request to another user. +async fn request_contact( + request: proto::RequestContact, + response: Response, + session: UserSession, +) -> Result<()> { + let requester_id = session.user_id(); + let responder_id = UserId::from_proto(request.responder_id); + if requester_id == responder_id { + return Err(anyhow!("cannot add yourself as a contact"))?; + } + + let notifications = session + .db() + .await + .send_contact_request(requester_id, responder_id) + .await?; + + // Update outgoing contact requests of requester + let mut update = proto::UpdateContacts::default(); + update.outgoing_requests.push(responder_id.to_proto()); + for connection_id in session + .connection_pool() + .await + .user_connection_ids(requester_id) + { + session.peer.send(connection_id, update.clone())?; + } + + // Update incoming contact requests of responder + let mut update = proto::UpdateContacts::default(); + update + .incoming_requests + .push(proto::IncomingContactRequest { + requester_id: requester_id.to_proto(), + }); + let connection_pool = session.connection_pool().await; + for connection_id in connection_pool.user_connection_ids(responder_id) { + session.peer.send(connection_id, update.clone())?; + } + + send_notifications(&connection_pool, &session.peer, notifications); + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Accept or decline a contact request +async fn respond_to_contact_request( + request: proto::RespondToContactRequest, + response: Response, + session: UserSession, +) -> Result<()> { + let responder_id = session.user_id(); + let requester_id = UserId::from_proto(request.requester_id); + let db = session.db().await; + if request.response == proto::ContactRequestResponse::Dismiss as i32 { + db.dismiss_contact_notification(responder_id, requester_id) + .await?; + } else { + let accept = request.response == proto::ContactRequestResponse::Accept as i32; + + let notifications = db + .respond_to_contact_request(responder_id, requester_id, accept) + .await?; + let requester_busy = db.is_user_busy(requester_id).await?; + let responder_busy = db.is_user_busy(responder_id).await?; + + let pool = session.connection_pool().await; + // Update responder with new contact + let mut update = proto::UpdateContacts::default(); + if accept { + update + .contacts + .push(contact_for_user(requester_id, requester_busy, &pool)); + } + update + .remove_incoming_requests + .push(requester_id.to_proto()); + for connection_id in pool.user_connection_ids(responder_id) { + session.peer.send(connection_id, update.clone())?; + } + + // Update requester with new contact + let mut update = proto::UpdateContacts::default(); + if accept { + update + .contacts + .push(contact_for_user(responder_id, responder_busy, &pool)); + } + update + .remove_outgoing_requests + .push(responder_id.to_proto()); + + for connection_id in pool.user_connection_ids(requester_id) { + session.peer.send(connection_id, update.clone())?; + } + + send_notifications(&pool, &session.peer, notifications); + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Remove a contact. +async fn remove_contact( + request: proto::RemoveContact, + response: Response, + session: UserSession, +) -> Result<()> { + let requester_id = session.user_id(); + let responder_id = UserId::from_proto(request.user_id); + let db = session.db().await; + let (contact_accepted, deleted_notification_id) = + db.remove_contact(requester_id, responder_id).await?; + + let pool = session.connection_pool().await; + // Update outgoing contact requests of requester + let mut update = proto::UpdateContacts::default(); + if contact_accepted { + update.remove_contacts.push(responder_id.to_proto()); + } else { + update + .remove_outgoing_requests + .push(responder_id.to_proto()); + } + for connection_id in pool.user_connection_ids(requester_id) { + session.peer.send(connection_id, update.clone())?; + } + + // Update incoming contact requests of responder + let mut update = proto::UpdateContacts::default(); + if contact_accepted { + update.remove_contacts.push(requester_id.to_proto()); + } else { + update + .remove_incoming_requests + .push(requester_id.to_proto()); + } + for connection_id in pool.user_connection_ids(responder_id) { + session.peer.send(connection_id, update.clone())?; + if let Some(notification_id) = deleted_notification_id { + session.peer.send( + connection_id, + proto::DeleteNotification { + notification_id: notification_id.to_proto(), + }, + )?; + } + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Creates a new channel. +async fn create_channel( + request: proto::CreateChannel, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + + let parent_id = request.parent_id.map(|id| ChannelId::from_proto(id)); + let (channel, membership) = db + .create_channel(&request.name, parent_id, session.user_id()) + .await?; + + let root_id = channel.root_id(); + let channel = Channel::from_model(channel); + + response.send(proto::CreateChannelResponse { + channel: Some(channel.to_proto()), + parent_id: request.parent_id, + })?; + + let mut connection_pool = session.connection_pool().await; + if let Some(membership) = membership { + connection_pool.subscribe_to_channel( + membership.user_id, + membership.channel_id, + membership.role, + ); + let update = proto::UpdateUserChannels { + channel_memberships: vec![proto::ChannelMembership { + channel_id: membership.channel_id.to_proto(), + role: membership.role.into(), + }], + ..Default::default() + }; + for connection_id in connection_pool.user_connection_ids(membership.user_id) { + session.peer.send(connection_id, update.clone())?; + } + } + + for (connection_id, role) in connection_pool.channel_connection_ids(root_id) { + if !role.can_see_channel(channel.visibility) { + continue; + } + + let update = proto::UpdateChannels { + channels: vec![channel.to_proto()], + ..Default::default() + }; + session.peer.send(connection_id, update.clone())?; + } + + Ok(()) +} + +/// Delete a channel +async fn delete_channel( + request: proto::DeleteChannel, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + + let channel_id = request.channel_id; + let (root_channel, removed_channels) = db + .delete_channel(ChannelId::from_proto(channel_id), session.user_id()) + .await?; + response.send(proto::Ack {})?; + + // Notify members of removed channels + let mut update = proto::UpdateChannels::default(); + update + .delete_channels + .extend(removed_channels.into_iter().map(|id| id.to_proto())); + + let connection_pool = session.connection_pool().await; + for (connection_id, _) in connection_pool.channel_connection_ids(root_channel) { + session.peer.send(connection_id, update.clone())?; + } + + Ok(()) +} + +/// Invite someone to join a channel. +async fn invite_channel_member( + request: proto::InviteChannelMember, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let invitee_id = UserId::from_proto(request.user_id); + let InviteMemberResult { + channel, + notifications, + } = db + .invite_channel_member( + channel_id, + invitee_id, + session.user_id(), + request.role().into(), + ) + .await?; + + let update = proto::UpdateChannels { + channel_invitations: vec![channel.to_proto()], + ..Default::default() + }; + + let connection_pool = session.connection_pool().await; + for connection_id in connection_pool.user_connection_ids(invitee_id) { + session.peer.send(connection_id, update.clone())?; + } + + send_notifications(&connection_pool, &session.peer, notifications); + + response.send(proto::Ack {})?; + Ok(()) +} + +/// remove someone from a channel +async fn remove_channel_member( + request: proto::RemoveChannelMember, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let member_id = UserId::from_proto(request.user_id); + + let RemoveChannelMemberResult { + membership_update, + notification_id, + } = db + .remove_channel_member(channel_id, member_id, session.user_id()) + .await?; + + let mut connection_pool = session.connection_pool().await; + notify_membership_updated( + &mut connection_pool, + membership_update, + member_id, + &session.peer, + ); + for connection_id in connection_pool.user_connection_ids(member_id) { + if let Some(notification_id) = notification_id { + session + .peer + .send( + connection_id, + proto::DeleteNotification { + notification_id: notification_id.to_proto(), + }, + ) + .trace_err(); + } + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Toggle the channel between public and private. +/// Care is taken to maintain the invariant that public channels only descend from public channels, +/// (though members-only channels can appear at any point in the hierarchy). +async fn set_channel_visibility( + request: proto::SetChannelVisibility, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let visibility = request.visibility().into(); + + let channel_model = db + .set_channel_visibility(channel_id, visibility, session.user_id()) + .await?; + let root_id = channel_model.root_id(); + let channel = Channel::from_model(channel_model); + + let mut connection_pool = session.connection_pool().await; + for (user_id, role) in connection_pool + .channel_user_ids(root_id) + .collect::>() + .into_iter() + { + let update = if role.can_see_channel(channel.visibility) { + connection_pool.subscribe_to_channel(user_id, channel_id, role); + proto::UpdateChannels { + channels: vec![channel.to_proto()], + ..Default::default() + } + } else { + connection_pool.unsubscribe_from_channel(&user_id, &channel_id); + proto::UpdateChannels { + delete_channels: vec![channel.id.to_proto()], + ..Default::default() + } + }; + + for connection_id in connection_pool.user_connection_ids(user_id) { + session.peer.send(connection_id, update.clone())?; + } + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Alter the role for a user in the channel. +async fn set_channel_member_role( + request: proto::SetChannelMemberRole, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let member_id = UserId::from_proto(request.user_id); + let result = db + .set_channel_member_role( + channel_id, + session.user_id(), + member_id, + request.role().into(), + ) + .await?; + + match result { + db::SetMemberRoleResult::MembershipUpdated(membership_update) => { + let mut connection_pool = session.connection_pool().await; + notify_membership_updated( + &mut connection_pool, + membership_update, + member_id, + &session.peer, + ) + } + db::SetMemberRoleResult::InviteUpdated(channel) => { + let update = proto::UpdateChannels { + channel_invitations: vec![channel.to_proto()], + ..Default::default() + }; + + for connection_id in session + .connection_pool() + .await + .user_connection_ids(member_id) + { + session.peer.send(connection_id, update.clone())?; + } + } + } + + response.send(proto::Ack {})?; + Ok(()) +} + +/// Change the name of a channel +async fn rename_channel( + request: proto::RenameChannel, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let channel_model = db + .rename_channel(channel_id, session.user_id(), &request.name) + .await?; + let root_id = channel_model.root_id(); + let channel = Channel::from_model(channel_model); + + response.send(proto::RenameChannelResponse { + channel: Some(channel.to_proto()), + })?; + + let connection_pool = session.connection_pool().await; + let update = proto::UpdateChannels { + channels: vec![channel.to_proto()], + ..Default::default() + }; + for (connection_id, role) in connection_pool.channel_connection_ids(root_id) { + if role.can_see_channel(channel.visibility) { + session.peer.send(connection_id, update.clone())?; + } + } + + Ok(()) +} + +/// Move a channel to a new parent. +async fn move_channel( + request: proto::MoveChannel, + response: Response, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + let to = ChannelId::from_proto(request.to); + + let (root_id, channels) = session + .db() + .await + .move_channel(channel_id, to, session.user_id()) + .await?; + + let connection_pool = session.connection_pool().await; + for (connection_id, role) in connection_pool.channel_connection_ids(root_id) { + let channels = channels + .iter() + .filter_map(|channel| { + if role.can_see_channel(channel.visibility) { + Some(channel.to_proto()) + } else { + None + } + }) + .collect::>(); + if channels.is_empty() { + continue; + } + + let update = proto::UpdateChannels { + channels, + ..Default::default() + }; + + session.peer.send(connection_id, update.clone())?; + } + + response.send(Ack {})?; + Ok(()) +} + +/// Get the list of channel members +async fn get_channel_members( + request: proto::GetChannelMembers, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let limit = if request.limit == 0 { + u16::MAX as u64 + } else { + request.limit + }; + let (members, users) = db + .get_channel_participant_details(channel_id, &request.query, limit, session.user_id()) + .await?; + response.send(proto::GetChannelMembersResponse { members, users })?; + Ok(()) +} + +/// Accept or decline a channel invitation. +async fn respond_to_channel_invite( + request: proto::RespondToChannelInvite, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + let RespondToChannelInvite { + membership_update, + notifications, + } = db + .respond_to_channel_invite(channel_id, session.user_id(), request.accept) + .await?; + + let mut connection_pool = session.connection_pool().await; + if let Some(membership_update) = membership_update { + notify_membership_updated( + &mut connection_pool, + membership_update, + session.user_id(), + &session.peer, + ); + } else { + let update = proto::UpdateChannels { + remove_channel_invitations: vec![channel_id.to_proto()], + ..Default::default() + }; + + for connection_id in connection_pool.user_connection_ids(session.user_id()) { + session.peer.send(connection_id, update.clone())?; + } + }; + + send_notifications(&connection_pool, &session.peer, notifications); + + response.send(proto::Ack {})?; + + Ok(()) +} + +/// Join the channels' room +async fn join_channel( + request: proto::JoinChannel, + response: Response, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + join_channel_internal(channel_id, Box::new(response), session).await +} + +trait JoinChannelInternalResponse { + fn send(self, result: proto::JoinRoomResponse) -> Result<()>; +} +impl JoinChannelInternalResponse for Response { + fn send(self, result: proto::JoinRoomResponse) -> Result<()> { + Response::::send(self, result) + } +} +impl JoinChannelInternalResponse for Response { + fn send(self, result: proto::JoinRoomResponse) -> Result<()> { + Response::::send(self, result) + } +} + +async fn join_channel_internal( + channel_id: ChannelId, + response: Box, + session: UserSession, +) -> Result<()> { + let joined_room = { + let mut db = session.db().await; + // If zed quits without leaving the room, and the user re-opens zed before the + // RECONNECT_TIMEOUT, we need to make sure that we kick the user out of the previous + // room they were in. + if let Some(connection) = db.stale_room_connection(session.user_id()).await? { + tracing::info!( + stale_connection_id = %connection, + "cleaning up stale connection", + ); + drop(db); + leave_room_for_session(&session, connection).await?; + db = session.db().await; + } + + let (joined_room, membership_updated, role) = db + .join_channel(channel_id, session.user_id(), session.connection_id) + .await?; + + let live_kit_connection_info = session.live_kit_client.as_ref().and_then(|live_kit| { + let (can_publish, token) = if role == ChannelRole::Guest { + ( + false, + live_kit + .guest_token( + &joined_room.room.live_kit_room, + &session.user_id().to_string(), + ) + .trace_err()?, + ) + } else { + ( + true, + live_kit + .room_token( + &joined_room.room.live_kit_room, + &session.user_id().to_string(), + ) + .trace_err()?, + ) + }; + + Some(LiveKitConnectionInfo { + server_url: live_kit.url().into(), + token, + can_publish, + }) + }); + + response.send(proto::JoinRoomResponse { + room: Some(joined_room.room.clone()), + channel_id: joined_room + .channel + .as_ref() + .map(|channel| channel.id.to_proto()), + live_kit_connection_info, + })?; + + let mut connection_pool = session.connection_pool().await; + if let Some(membership_updated) = membership_updated { + notify_membership_updated( + &mut connection_pool, + membership_updated, + session.user_id(), + &session.peer, + ); + } + + room_updated(&joined_room.room, &session.peer); + + joined_room + }; + + channel_updated( + &joined_room + .channel + .ok_or_else(|| anyhow!("channel not returned"))?, + &joined_room.room, + &session.peer, + &*session.connection_pool().await, + ); + + update_user_contacts(session.user_id(), &session).await?; + Ok(()) +} + +/// Start editing the channel notes +async fn join_channel_buffer( + request: proto::JoinChannelBuffer, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + + let open_response = db + .join_channel_buffer(channel_id, session.user_id(), session.connection_id) + .await?; + + let collaborators = open_response.collaborators.clone(); + response.send(open_response)?; + + let update = UpdateChannelBufferCollaborators { + channel_id: channel_id.to_proto(), + collaborators: collaborators.clone(), + }; + channel_buffer_updated( + session.connection_id, + collaborators + .iter() + .filter_map(|collaborator| Some(collaborator.peer_id?.into())), + &update, + &session.peer, + ); + + Ok(()) +} + +/// Edit the channel notes +async fn update_channel_buffer( + request: proto::UpdateChannelBuffer, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + + let (collaborators, epoch, version) = db + .update_channel_buffer(channel_id, session.user_id(), &request.operations) + .await?; + + channel_buffer_updated( + session.connection_id, + collaborators.clone(), + &proto::UpdateChannelBuffer { + channel_id: channel_id.to_proto(), + operations: request.operations, + }, + &session.peer, + ); + + let pool = &*session.connection_pool().await; + + let non_collaborators = + pool.channel_connection_ids(channel_id) + .filter_map(|(connection_id, _)| { + if collaborators.contains(&connection_id) { + None + } else { + Some(connection_id) + } + }); + + broadcast(None, non_collaborators, |peer_id| { + session.peer.send( + peer_id, + proto::UpdateChannels { + latest_channel_buffer_versions: vec![proto::ChannelBufferVersion { + channel_id: channel_id.to_proto(), + epoch: epoch as u64, + version: version.clone(), + }], + ..Default::default() + }, + ) + }); + + Ok(()) +} + +/// Rejoin the channel notes after a connection blip +async fn rejoin_channel_buffers( + request: proto::RejoinChannelBuffers, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let buffers = db + .rejoin_channel_buffers(&request.buffers, session.user_id(), session.connection_id) + .await?; + + for rejoined_buffer in &buffers { + let collaborators_to_notify = rejoined_buffer + .buffer + .collaborators + .iter() + .filter_map(|c| Some(c.peer_id?.into())); + channel_buffer_updated( + session.connection_id, + collaborators_to_notify, + &proto::UpdateChannelBufferCollaborators { + channel_id: rejoined_buffer.buffer.channel_id, + collaborators: rejoined_buffer.buffer.collaborators.clone(), + }, + &session.peer, + ); + } + + response.send(proto::RejoinChannelBuffersResponse { + buffers: buffers.into_iter().map(|b| b.buffer).collect(), + })?; + + Ok(()) +} + +/// Stop editing the channel notes +async fn leave_channel_buffer( + request: proto::LeaveChannelBuffer, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + + let left_buffer = db + .leave_channel_buffer(channel_id, session.connection_id) + .await?; + + response.send(Ack {})?; + + channel_buffer_updated( + session.connection_id, + left_buffer.connections, + &proto::UpdateChannelBufferCollaborators { + channel_id: channel_id.to_proto(), + collaborators: left_buffer.collaborators, + }, + &session.peer, + ); + + Ok(()) +} + +fn channel_buffer_updated( + sender_id: ConnectionId, + collaborators: impl IntoIterator, + message: &T, + peer: &Peer, +) { + broadcast(Some(sender_id), collaborators, |peer_id| { + peer.send(peer_id, message.clone()) + }); +} + +fn send_notifications( + connection_pool: &ConnectionPool, + peer: &Peer, + notifications: db::NotificationBatch, +) { + for (user_id, notification) in notifications { + for connection_id in connection_pool.user_connection_ids(user_id) { + if let Err(error) = peer.send( + connection_id, + proto::AddNotification { + notification: Some(notification.clone()), + }, + ) { + tracing::error!( + "failed to send notification to {:?} {}", + connection_id, + error + ); + } + } + } +} + +/// Send a message to the channel +async fn send_channel_message( + request: proto::SendChannelMessage, + response: Response, + session: UserSession, +) -> Result<()> { + // Validate the message body. + let body = request.body.trim().to_string(); + if body.len() > MAX_MESSAGE_LEN { + return Err(anyhow!("message is too long"))?; + } + if body.is_empty() { + return Err(anyhow!("message can't be blank"))?; + } + + // TODO: adjust mentions if body is trimmed + + let timestamp = OffsetDateTime::now_utc(); + let nonce = request + .nonce + .ok_or_else(|| anyhow!("nonce can't be blank"))?; + + let channel_id = ChannelId::from_proto(request.channel_id); + let CreatedChannelMessage { + message_id, + participant_connection_ids, + notifications, + } = session + .db() + .await + .create_channel_message( + channel_id, + session.user_id(), + &body, + &request.mentions, + timestamp, + nonce.clone().into(), + match request.reply_to_message_id { + Some(reply_to_message_id) => Some(MessageId::from_proto(reply_to_message_id)), + None => None, + }, + ) + .await?; + + let message = proto::ChannelMessage { + sender_id: session.user_id().to_proto(), + id: message_id.to_proto(), + body, + mentions: request.mentions, + timestamp: timestamp.unix_timestamp() as u64, + nonce: Some(nonce), + reply_to_message_id: request.reply_to_message_id, + edited_at: None, + }; + broadcast( + Some(session.connection_id), + participant_connection_ids.clone(), + |connection| { + session.peer.send( + connection, + proto::ChannelMessageSent { + channel_id: channel_id.to_proto(), + message: Some(message.clone()), + }, + ) + }, + ); + response.send(proto::SendChannelMessageResponse { + message: Some(message), + })?; + + let pool = &*session.connection_pool().await; + let non_participants = + pool.channel_connection_ids(channel_id) + .filter_map(|(connection_id, _)| { + if participant_connection_ids.contains(&connection_id) { + None + } else { + Some(connection_id) + } + }); + broadcast(None, non_participants, |peer_id| { + session.peer.send( + peer_id, + proto::UpdateChannels { + latest_channel_message_ids: vec![proto::ChannelMessageId { + channel_id: channel_id.to_proto(), + message_id: message_id.to_proto(), + }], + ..Default::default() + }, + ) + }); + send_notifications(pool, &session.peer, notifications); + + Ok(()) +} + +/// Delete a channel message +async fn remove_channel_message( + request: proto::RemoveChannelMessage, + response: Response, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + let message_id = MessageId::from_proto(request.message_id); + let (connection_ids, existing_notification_ids) = session + .db() + .await + .remove_channel_message(channel_id, message_id, session.user_id()) + .await?; + + broadcast( + Some(session.connection_id), + connection_ids, + move |connection| { + session.peer.send(connection, request.clone())?; + + for notification_id in &existing_notification_ids { + session.peer.send( + connection, + proto::DeleteNotification { + notification_id: (*notification_id).to_proto(), + }, + )?; + } + + Ok(()) + }, + ); + response.send(proto::Ack {})?; + Ok(()) +} + +async fn update_channel_message( + request: proto::UpdateChannelMessage, + response: Response, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + let message_id = MessageId::from_proto(request.message_id); + let updated_at = OffsetDateTime::now_utc(); + let UpdatedChannelMessage { + message_id, + participant_connection_ids, + notifications, + reply_to_message_id, + timestamp, + deleted_mention_notification_ids, + updated_mention_notifications, + } = session + .db() + .await + .update_channel_message( + channel_id, + message_id, + session.user_id(), + request.body.as_str(), + &request.mentions, + updated_at, + ) + .await?; + + let nonce = request + .nonce + .clone() + .ok_or_else(|| anyhow!("nonce can't be blank"))?; + + let message = proto::ChannelMessage { + sender_id: session.user_id().to_proto(), + id: message_id.to_proto(), + body: request.body.clone(), + mentions: request.mentions.clone(), + timestamp: timestamp.assume_utc().unix_timestamp() as u64, + nonce: Some(nonce), + reply_to_message_id: reply_to_message_id.map(|id| id.to_proto()), + edited_at: Some(updated_at.unix_timestamp() as u64), + }; + + response.send(proto::Ack {})?; + + let pool = &*session.connection_pool().await; + broadcast( + Some(session.connection_id), + participant_connection_ids, + |connection| { + session.peer.send( + connection, + proto::ChannelMessageUpdate { + channel_id: channel_id.to_proto(), + message: Some(message.clone()), + }, + )?; + + for notification_id in &deleted_mention_notification_ids { + session.peer.send( + connection, + proto::DeleteNotification { + notification_id: (*notification_id).to_proto(), + }, + )?; + } + + for notification in &updated_mention_notifications { + session.peer.send( + connection, + proto::UpdateNotification { + notification: Some(notification.clone()), + }, + )?; + } + + Ok(()) + }, + ); + + send_notifications(pool, &session.peer, notifications); + + Ok(()) +} + +/// Mark a channel message as read +async fn acknowledge_channel_message( + request: proto::AckChannelMessage, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + let message_id = MessageId::from_proto(request.message_id); + let notifications = session + .db() + .await + .observe_channel_message(channel_id, session.user_id(), message_id) + .await?; + send_notifications( + &*session.connection_pool().await, + &session.peer, + notifications, + ); + Ok(()) +} + +/// Mark a buffer version as synced +async fn acknowledge_buffer_version( + request: proto::AckBufferOperation, + session: UserSession, +) -> Result<()> { + let buffer_id = BufferId::from_proto(request.buffer_id); + session + .db() + .await + .observe_buffer_version( + buffer_id, + session.user_id(), + request.epoch as i32, + &request.version, + ) + .await?; + Ok(()) +} + +struct CompleteWithLanguageModelRateLimit; + +impl RateLimit for CompleteWithLanguageModelRateLimit { + fn capacity() -> usize { + std::env::var("COMPLETE_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR") + .ok() + .and_then(|v| v.parse().ok()) + .unwrap_or(120) // Picked arbitrarily + } + + fn refill_duration() -> chrono::Duration { + chrono::Duration::hours(1) + } + + fn db_name() -> &'static str { + "complete-with-language-model" + } +} + +async fn complete_with_language_model( + request: proto::CompleteWithLanguageModel, + response: StreamingResponse, + session: Session, + open_ai_api_key: Option>, + google_ai_api_key: Option>, + anthropic_api_key: Option>, +) -> Result<()> { + let Some(session) = session.for_user() else { + return Err(anyhow!("user not found"))?; + }; + authorize_access_to_language_models(&session).await?; + session + .rate_limiter + .check::(session.user_id()) + .await?; + + if request.model.starts_with("gpt") { + let api_key = + open_ai_api_key.ok_or_else(|| anyhow!("no OpenAI API key configured on the server"))?; + complete_with_open_ai(request, response, session, api_key).await?; + } else if request.model.starts_with("gemini") { + let api_key = google_ai_api_key + .ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?; + complete_with_google_ai(request, response, session, api_key).await?; + } else if request.model.starts_with("claude") { + let api_key = anthropic_api_key + .ok_or_else(|| anyhow!("no Anthropic AI API key configured on the server"))?; + complete_with_anthropic(request, response, session, api_key).await?; + } + + Ok(()) +} + +async fn complete_with_open_ai( + request: proto::CompleteWithLanguageModel, + response: StreamingResponse, + session: UserSession, + api_key: Arc, +) -> Result<()> { + let mut completion_stream = open_ai::stream_completion( + session.http_client.as_ref(), + OPEN_AI_API_URL, + &api_key, + crate::ai::language_model_request_to_open_ai(request)?, + None, + ) + .await + .context("open_ai::stream_completion request failed within collab")?; + + while let Some(event) = completion_stream.next().await { + let event = event?; + response.send(proto::LanguageModelResponse { + choices: event + .choices + .into_iter() + .map(|choice| proto::LanguageModelChoiceDelta { + index: choice.index, + delta: Some(proto::LanguageModelResponseMessage { + role: choice.delta.role.map(|role| match role { + open_ai::Role::User => LanguageModelRole::LanguageModelUser, + open_ai::Role::Assistant => LanguageModelRole::LanguageModelAssistant, + open_ai::Role::System => LanguageModelRole::LanguageModelSystem, + open_ai::Role::Tool => LanguageModelRole::LanguageModelTool, + } as i32), + content: choice.delta.content, + tool_calls: choice + .delta + .tool_calls + .into_iter() + .map(|delta| proto::ToolCallDelta { + index: delta.index as u32, + id: delta.id, + variant: match delta.function { + Some(function) => { + let name = function.name; + let arguments = function.arguments; + + Some(proto::tool_call_delta::Variant::Function( + proto::tool_call_delta::FunctionCallDelta { + name, + arguments, + }, + )) + } + None => None, + }, + }) + .collect(), + }), + finish_reason: choice.finish_reason, + }) + .collect(), + })?; + } + + Ok(()) +} + +async fn complete_with_google_ai( + request: proto::CompleteWithLanguageModel, + response: StreamingResponse, + session: UserSession, + api_key: Arc, +) -> Result<()> { + let mut stream = google_ai::stream_generate_content( + session.http_client.clone(), + google_ai::API_URL, + api_key.as_ref(), + crate::ai::language_model_request_to_google_ai(request)?, + ) + .await + .context("google_ai::stream_generate_content request failed")?; + + while let Some(event) = stream.next().await { + let event = event?; + response.send(proto::LanguageModelResponse { + choices: event + .candidates + .unwrap_or_default() + .into_iter() + .map(|candidate| proto::LanguageModelChoiceDelta { + index: candidate.index as u32, + delta: Some(proto::LanguageModelResponseMessage { + role: Some(match candidate.content.role { + google_ai::Role::User => LanguageModelRole::LanguageModelUser, + google_ai::Role::Model => LanguageModelRole::LanguageModelAssistant, + } as i32), + content: Some( + candidate + .content + .parts + .into_iter() + .filter_map(|part| match part { + google_ai::Part::TextPart(part) => Some(part.text), + google_ai::Part::InlineDataPart(_) => None, + }) + .collect(), + ), + // Tool calls are not supported for Google + tool_calls: Vec::new(), + }), + finish_reason: candidate.finish_reason.map(|reason| reason.to_string()), + }) + .collect(), + })?; + } + + Ok(()) +} + +async fn complete_with_anthropic( + request: proto::CompleteWithLanguageModel, + response: StreamingResponse, + session: UserSession, + api_key: Arc, +) -> Result<()> { + let model = anthropic::Model::from_id(&request.model)?; + + let mut system_message = String::new(); + let messages = request + .messages + .into_iter() + .filter_map(|message| { + match message.role() { + LanguageModelRole::LanguageModelUser => Some(anthropic::RequestMessage { + role: anthropic::Role::User, + content: message.content, + }), + LanguageModelRole::LanguageModelAssistant => Some(anthropic::RequestMessage { + role: anthropic::Role::Assistant, + content: message.content, + }), + // Anthropic's API breaks system instructions out as a separate field rather + // than having a system message role. + LanguageModelRole::LanguageModelSystem => { + if !system_message.is_empty() { + system_message.push_str("\n\n"); + } + system_message.push_str(&message.content); + + None + } + // We don't yet support tool calls for Anthropic + LanguageModelRole::LanguageModelTool => None, + } + }) + .collect(); + + let mut stream = anthropic::stream_completion( + session.http_client.as_ref(), + anthropic::ANTHROPIC_API_URL, + &api_key, + anthropic::Request { + model, + messages, + stream: true, + system: system_message, + max_tokens: 4092, + }, + None, + ) + .await?; + + let mut current_role = proto::LanguageModelRole::LanguageModelAssistant; + + while let Some(event) = stream.next().await { + let event = event?; + + match event { + anthropic::ResponseEvent::MessageStart { message } => { + if let Some(role) = message.role { + if role == "assistant" { + current_role = proto::LanguageModelRole::LanguageModelAssistant; + } else if role == "user" { + current_role = proto::LanguageModelRole::LanguageModelUser; + } + } + } + anthropic::ResponseEvent::ContentBlockStart { content_block, .. } => { + match content_block { + anthropic::ContentBlock::Text { text } => { + if !text.is_empty() { + response.send(proto::LanguageModelResponse { + choices: vec![proto::LanguageModelChoiceDelta { + index: 0, + delta: Some(proto::LanguageModelResponseMessage { + role: Some(current_role as i32), + content: Some(text), + tool_calls: Vec::new(), + }), + finish_reason: None, + }], + })?; + } + } + } + } + anthropic::ResponseEvent::ContentBlockDelta { delta, .. } => match delta { + anthropic::TextDelta::TextDelta { text } => { + response.send(proto::LanguageModelResponse { + choices: vec![proto::LanguageModelChoiceDelta { + index: 0, + delta: Some(proto::LanguageModelResponseMessage { + role: Some(current_role as i32), + content: Some(text), + tool_calls: Vec::new(), + }), + finish_reason: None, + }], + })?; + } + }, + anthropic::ResponseEvent::MessageDelta { delta, .. } => { + if let Some(stop_reason) = delta.stop_reason { + response.send(proto::LanguageModelResponse { + choices: vec![proto::LanguageModelChoiceDelta { + index: 0, + delta: None, + finish_reason: Some(stop_reason), + }], + })?; + } + } + anthropic::ResponseEvent::ContentBlockStop { .. } => {} + anthropic::ResponseEvent::MessageStop {} => {} + anthropic::ResponseEvent::Ping {} => {} + } + } + + Ok(()) +} + +struct CountTokensWithLanguageModelRateLimit; + +impl RateLimit for CountTokensWithLanguageModelRateLimit { + fn capacity() -> usize { + std::env::var("COUNT_TOKENS_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR") + .ok() + .and_then(|v| v.parse().ok()) + .unwrap_or(600) // Picked arbitrarily + } + + fn refill_duration() -> chrono::Duration { + chrono::Duration::hours(1) + } + + fn db_name() -> &'static str { + "count-tokens-with-language-model" + } +} + +async fn count_tokens_with_language_model( + request: proto::CountTokensWithLanguageModel, + response: Response, + session: UserSession, + google_ai_api_key: Option>, +) -> Result<()> { + authorize_access_to_language_models(&session).await?; + + if !request.model.starts_with("gemini") { + return Err(anyhow!( + "counting tokens for model: {:?} is not supported", + request.model + ))?; + } + + session + .rate_limiter + .check::(session.user_id()) + .await?; + + let api_key = google_ai_api_key + .ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?; + let tokens_response = google_ai::count_tokens( + session.http_client.as_ref(), + google_ai::API_URL, + &api_key, + crate::ai::count_tokens_request_to_google_ai(request)?, + ) + .await?; + response.send(proto::CountTokensResponse { + token_count: tokens_response.total_tokens as u32, + })?; + Ok(()) +} + +struct ComputeEmbeddingsRateLimit; + +impl RateLimit for ComputeEmbeddingsRateLimit { + fn capacity() -> usize { + std::env::var("EMBED_TEXTS_RATE_LIMIT_PER_HOUR") + .ok() + .and_then(|v| v.parse().ok()) + .unwrap_or(5000) // Picked arbitrarily + } + + fn refill_duration() -> chrono::Duration { + chrono::Duration::hours(1) + } + + fn db_name() -> &'static str { + "compute-embeddings" + } +} + +async fn compute_embeddings( + request: proto::ComputeEmbeddings, + response: Response, + session: UserSession, + api_key: Option>, +) -> Result<()> { + let api_key = api_key.context("no OpenAI API key configured on the server")?; + authorize_access_to_language_models(&session).await?; + + session + .rate_limiter + .check::(session.user_id()) + .await?; + + let embeddings = match request.model.as_str() { + "openai/text-embedding-3-small" => { + open_ai::embed( + session.http_client.as_ref(), + OPEN_AI_API_URL, + &api_key, + OpenAiEmbeddingModel::TextEmbedding3Small, + request.texts.iter().map(|text| text.as_str()), + ) + .await? + } + provider => return Err(anyhow!("unsupported embedding provider {:?}", provider))?, + }; + + let embeddings = request + .texts + .iter() + .map(|text| { + let mut hasher = sha2::Sha256::new(); + hasher.update(text.as_bytes()); + let result = hasher.finalize(); + result.to_vec() + }) + .zip( + embeddings + .data + .into_iter() + .map(|embedding| embedding.embedding), + ) + .collect::>(); + + let db = session.db().await; + db.save_embeddings(&request.model, &embeddings) + .await + .context("failed to save embeddings") + .trace_err(); + + response.send(proto::ComputeEmbeddingsResponse { + embeddings: embeddings + .into_iter() + .map(|(digest, dimensions)| proto::Embedding { digest, dimensions }) + .collect(), + })?; + Ok(()) +} + +async fn get_cached_embeddings( + request: proto::GetCachedEmbeddings, + response: Response, + session: UserSession, +) -> Result<()> { + authorize_access_to_language_models(&session).await?; + + let db = session.db().await; + let embeddings = db.get_embeddings(&request.model, &request.digests).await?; + + response.send(proto::GetCachedEmbeddingsResponse { + embeddings: embeddings + .into_iter() + .map(|(digest, dimensions)| proto::Embedding { digest, dimensions }) + .collect(), + })?; + Ok(()) +} + +async fn authorize_access_to_language_models(session: &UserSession) -> Result<(), Error> { + let db = session.db().await; + let flags = db.get_user_flags(session.user_id()).await?; + if flags.iter().any(|flag| flag == "language-models") { + Ok(()) + } else { + Err(anyhow!("permission denied"))? + } +} + +/// Get a Supermaven API key for the user +async fn get_supermaven_api_key( + _request: proto::GetSupermavenApiKey, + response: Response, + session: UserSession, +) -> Result<()> { + let user_id: String = session.user_id().to_string(); + if !session.is_staff() { + return Err(anyhow!("supermaven not enabled for this account"))?; + } + + let email = session + .email() + .ok_or_else(|| anyhow!("user must have an email"))?; + + let supermaven_admin_api = session + .supermaven_client + .as_ref() + .ok_or_else(|| anyhow!("supermaven not configured"))?; + + let result = supermaven_admin_api + .try_get_or_create_user(CreateExternalUserRequest { id: user_id, email }) + .await?; + + response.send(proto::GetSupermavenApiKeyResponse { + api_key: result.api_key, + })?; + + Ok(()) +} + +/// Start receiving chat updates for a channel +async fn join_channel_chat( + request: proto::JoinChannelChat, + response: Response, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + + let db = session.db().await; + db.join_channel_chat(channel_id, session.connection_id, session.user_id()) + .await?; + let messages = db + .get_channel_messages(channel_id, session.user_id(), MESSAGE_COUNT_PER_PAGE, None) + .await?; + response.send(proto::JoinChannelChatResponse { + done: messages.len() < MESSAGE_COUNT_PER_PAGE, + messages, + })?; + Ok(()) +} + +/// Stop receiving chat updates for a channel +async fn leave_channel_chat(request: proto::LeaveChannelChat, session: UserSession) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + session + .db() + .await + .leave_channel_chat(channel_id, session.connection_id, session.user_id()) + .await?; + Ok(()) +} + +/// Retrieve the chat history for a channel +async fn get_channel_messages( + request: proto::GetChannelMessages, + response: Response, + session: UserSession, +) -> Result<()> { + let channel_id = ChannelId::from_proto(request.channel_id); + let messages = session + .db() + .await + .get_channel_messages( + channel_id, + session.user_id(), + MESSAGE_COUNT_PER_PAGE, + Some(MessageId::from_proto(request.before_message_id)), + ) + .await?; + response.send(proto::GetChannelMessagesResponse { + done: messages.len() < MESSAGE_COUNT_PER_PAGE, + messages, + })?; + Ok(()) +} + +/// Retrieve specific chat messages +async fn get_channel_messages_by_id( + request: proto::GetChannelMessagesById, + response: Response, + session: UserSession, +) -> Result<()> { + let message_ids = request + .message_ids + .iter() + .map(|id| MessageId::from_proto(*id)) + .collect::>(); + let messages = session + .db() + .await + .get_channel_messages_by_id(session.user_id(), &message_ids) + .await?; + response.send(proto::GetChannelMessagesResponse { + done: messages.len() < MESSAGE_COUNT_PER_PAGE, + messages, + })?; + Ok(()) +} + +/// Retrieve the current users notifications +async fn get_notifications( + request: proto::GetNotifications, + response: Response, + session: UserSession, +) -> Result<()> { + let notifications = session + .db() + .await + .get_notifications( + session.user_id(), + NOTIFICATION_COUNT_PER_PAGE, + request + .before_id + .map(|id| db::NotificationId::from_proto(id)), + ) + .await?; + response.send(proto::GetNotificationsResponse { + done: notifications.len() < NOTIFICATION_COUNT_PER_PAGE, + notifications, + })?; + Ok(()) +} + +/// Mark notifications as read +async fn mark_notification_as_read( + request: proto::MarkNotificationRead, + response: Response, + session: UserSession, +) -> Result<()> { + let database = &session.db().await; + let notifications = database + .mark_notification_as_read_by_id( + session.user_id(), + NotificationId::from_proto(request.notification_id), + ) + .await?; + send_notifications( + &*session.connection_pool().await, + &session.peer, + notifications, + ); + response.send(proto::Ack {})?; + Ok(()) +} + +/// Get the current users information +async fn get_private_user_info( + _request: proto::GetPrivateUserInfo, + response: Response, + session: UserSession, +) -> Result<()> { + let db = session.db().await; + + let metrics_id = db.get_user_metrics_id(session.user_id()).await?; + let user = db + .get_user_by_id(session.user_id()) + .await? + .ok_or_else(|| anyhow!("user not found"))?; + let flags = db.get_user_flags(session.user_id()).await?; + + response.send(proto::GetPrivateUserInfoResponse { + metrics_id, + staff: user.admin, + flags, + })?; + Ok(()) +} + +fn to_axum_message(message: TungsteniteMessage) -> AxumMessage { + match message { + TungsteniteMessage::Text(payload) => AxumMessage::Text(payload), + TungsteniteMessage::Binary(payload) => AxumMessage::Binary(payload), + TungsteniteMessage::Ping(payload) => AxumMessage::Ping(payload), + TungsteniteMessage::Pong(payload) => AxumMessage::Pong(payload), + TungsteniteMessage::Close(frame) => AxumMessage::Close(frame.map(|frame| AxumCloseFrame { + code: frame.code.into(), + reason: frame.reason, + })), + } +} + +fn to_tungstenite_message(message: AxumMessage) -> TungsteniteMessage { + match message { + AxumMessage::Text(payload) => TungsteniteMessage::Text(payload), + AxumMessage::Binary(payload) => TungsteniteMessage::Binary(payload), + AxumMessage::Ping(payload) => TungsteniteMessage::Ping(payload), + AxumMessage::Pong(payload) => TungsteniteMessage::Pong(payload), + AxumMessage::Close(frame) => { + TungsteniteMessage::Close(frame.map(|frame| TungsteniteCloseFrame { + code: frame.code.into(), + reason: frame.reason, + })) + } + } +} + +fn notify_membership_updated( + connection_pool: &mut ConnectionPool, + result: MembershipUpdated, + user_id: UserId, + peer: &Peer, +) { + for membership in &result.new_channels.channel_memberships { + connection_pool.subscribe_to_channel(user_id, membership.channel_id, membership.role) + } + for channel_id in &result.removed_channels { + connection_pool.unsubscribe_from_channel(&user_id, channel_id) + } + + let user_channels_update = proto::UpdateUserChannels { + channel_memberships: result + .new_channels + .channel_memberships + .iter() + .map(|cm| proto::ChannelMembership { + channel_id: cm.channel_id.to_proto(), + role: cm.role.into(), + }) + .collect(), + ..Default::default() + }; + + let mut update = build_channels_update(result.new_channels, vec![]); + update.delete_channels = result + .removed_channels + .into_iter() + .map(|id| id.to_proto()) + .collect(); + update.remove_channel_invitations = vec![result.channel_id.to_proto()]; + + for connection_id in connection_pool.user_connection_ids(user_id) { + peer.send(connection_id, user_channels_update.clone()) + .trace_err(); + peer.send(connection_id, update.clone()).trace_err(); + } +} + +fn build_update_user_channels(channels: &ChannelsForUser) -> proto::UpdateUserChannels { + proto::UpdateUserChannels { + channel_memberships: channels + .channel_memberships + .iter() + .map(|m| proto::ChannelMembership { + channel_id: m.channel_id.to_proto(), + role: m.role.into(), + }) + .collect(), + observed_channel_buffer_version: channels.observed_buffer_versions.clone(), + observed_channel_message_id: channels.observed_channel_messages.clone(), + } +} + +fn build_channels_update( + channels: ChannelsForUser, + channel_invites: Vec, +) -> proto::UpdateChannels { + let mut update = proto::UpdateChannels::default(); + + for channel in channels.channels { + update.channels.push(channel.to_proto()); + } + + update.latest_channel_buffer_versions = channels.latest_buffer_versions; + update.latest_channel_message_ids = channels.latest_channel_messages; + + for (channel_id, participants) in channels.channel_participants { + update + .channel_participants + .push(proto::ChannelParticipants { + channel_id: channel_id.to_proto(), + participant_user_ids: participants.into_iter().map(|id| id.to_proto()).collect(), + }); + } + + for channel in channel_invites { + update.channel_invitations.push(channel.to_proto()); + } + + update.hosted_projects = channels.hosted_projects; + update +} + +fn build_initial_contacts_update( + contacts: Vec, + pool: &ConnectionPool, +) -> proto::UpdateContacts { + let mut update = proto::UpdateContacts::default(); + + for contact in contacts { + match contact { + db::Contact::Accepted { user_id, busy } => { + update.contacts.push(contact_for_user(user_id, busy, &pool)); + } + db::Contact::Outgoing { user_id } => update.outgoing_requests.push(user_id.to_proto()), + db::Contact::Incoming { user_id } => { + update + .incoming_requests + .push(proto::IncomingContactRequest { + requester_id: user_id.to_proto(), + }) + } + } + } + + update +} + +fn contact_for_user(user_id: UserId, busy: bool, pool: &ConnectionPool) -> proto::Contact { + proto::Contact { + user_id: user_id.to_proto(), + online: pool.is_user_online(user_id), + busy, + } +} + +fn room_updated(room: &proto::Room, peer: &Peer) { + broadcast( + None, + room.participants + .iter() + .filter_map(|participant| Some(participant.peer_id?.into())), + |peer_id| { + peer.send( + peer_id, + proto::RoomUpdated { + room: Some(room.clone()), + }, + ) + }, + ); +} + +fn channel_updated( + channel: &db::channel::Model, + room: &proto::Room, + peer: &Peer, + pool: &ConnectionPool, +) { + let participants = room + .participants + .iter() + .map(|p| p.user_id) + .collect::>(); + + broadcast( + None, + pool.channel_connection_ids(channel.root_id()) + .filter_map(|(channel_id, role)| { + role.can_see_channel(channel.visibility).then(|| channel_id) + }), + |peer_id| { + peer.send( + peer_id, + proto::UpdateChannels { + channel_participants: vec![proto::ChannelParticipants { + channel_id: channel.id.to_proto(), + participant_user_ids: participants.clone(), + }], + ..Default::default() + }, + ) + }, + ); +} + +async fn send_dev_server_projects_update( + user_id: UserId, + mut status: proto::DevServerProjectsUpdate, + session: &Session, +) { + let pool = session.connection_pool().await; + for dev_server in &mut status.dev_servers { + dev_server.status = + pool.dev_server_status(DevServerId(dev_server.dev_server_id as i32)) as i32; + } + let connections = pool.user_connection_ids(user_id); + for connection_id in connections { + session.peer.send(connection_id, status.clone()).trace_err(); + } +} + +async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> { + let db = session.db().await; + + let contacts = db.get_contacts(user_id).await?; + let busy = db.is_user_busy(user_id).await?; + + let pool = session.connection_pool().await; + let updated_contact = contact_for_user(user_id, busy, &pool); + for contact in contacts { + if let db::Contact::Accepted { + user_id: contact_user_id, + .. + } = contact + { + for contact_conn_id in pool.user_connection_ids(contact_user_id) { + session + .peer + .send( + contact_conn_id, + proto::UpdateContacts { + contacts: vec![updated_contact.clone()], + remove_contacts: Default::default(), + incoming_requests: Default::default(), + remove_incoming_requests: Default::default(), + outgoing_requests: Default::default(), + remove_outgoing_requests: Default::default(), + }, + ) + .trace_err(); + } + } + } + Ok(()) +} + +async fn lost_dev_server_connection(session: &DevServerSession) -> Result<()> { + log::info!("lost dev server connection, unsharing projects"); + let project_ids = session + .db() + .await + .get_stale_dev_server_projects(session.connection_id) + .await?; + + for project_id in project_ids { + // not unshare re-checks the connection ids match, so we get away with no transaction + unshare_project_internal(project_id, session.connection_id, None, &session).await?; + } + + let user_id = session.dev_server().user_id; + let update = session + .db() + .await + .dev_server_projects_update(user_id) + .await?; + + send_dev_server_projects_update(user_id, update, session).await; + + Ok(()) +} + +async fn leave_room_for_session(session: &UserSession, connection_id: ConnectionId) -> Result<()> { + let mut contacts_to_update = HashSet::default(); + + let room_id; + let canceled_calls_to_user_ids; + let live_kit_room; + let delete_live_kit_room; + let room; + let channel; + + if let Some(mut left_room) = session.db().await.leave_room(connection_id).await? { + contacts_to_update.insert(session.user_id()); + + for project in left_room.left_projects.values() { + project_left(project, session); + } + + room_id = RoomId::from_proto(left_room.room.id); + canceled_calls_to_user_ids = mem::take(&mut left_room.canceled_calls_to_user_ids); + live_kit_room = mem::take(&mut left_room.room.live_kit_room); + delete_live_kit_room = left_room.deleted; + room = mem::take(&mut left_room.room); + channel = mem::take(&mut left_room.channel); + + room_updated(&room, &session.peer); + } else { + return Ok(()); + } + + if let Some(channel) = channel { + channel_updated( + &channel, + &room, + &session.peer, + &*session.connection_pool().await, + ); + } + + { + let pool = session.connection_pool().await; + for canceled_user_id in canceled_calls_to_user_ids { + for connection_id in pool.user_connection_ids(canceled_user_id) { + session + .peer + .send( + connection_id, + proto::CallCanceled { + room_id: room_id.to_proto(), + }, + ) + .trace_err(); + } + contacts_to_update.insert(canceled_user_id); + } + } + + for contact_user_id in contacts_to_update { + update_user_contacts(contact_user_id, &session).await?; + } + + if let Some(live_kit) = session.live_kit_client.as_ref() { + live_kit + .remove_participant(live_kit_room.clone(), session.user_id().to_string()) + .await + .trace_err(); + + if delete_live_kit_room { + live_kit.delete_room(live_kit_room).await.trace_err(); + } + } + + Ok(()) +} + +async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> { + let left_channel_buffers = session + .db() + .await + .leave_channel_buffers(session.connection_id) + .await?; + + for left_buffer in left_channel_buffers { + channel_buffer_updated( + session.connection_id, + left_buffer.connections, + &proto::UpdateChannelBufferCollaborators { + channel_id: left_buffer.channel_id.to_proto(), + collaborators: left_buffer.collaborators, + }, + &session.peer, + ); + } + + Ok(()) +} + +fn project_left(project: &db::LeftProject, session: &UserSession) { + for connection_id in &project.connection_ids { + if project.should_unshare { + session + .peer + .send( + *connection_id, + proto::UnshareProject { + project_id: project.id.to_proto(), + }, + ) + .trace_err(); + } else { + session + .peer + .send( + *connection_id, + proto::RemoveProjectCollaborator { + project_id: project.id.to_proto(), + peer_id: Some(session.connection_id.into()), + }, + ) + .trace_err(); + } + } +} + +pub trait ResultExt { + type Ok; + + fn trace_err(self) -> Option; +} + +impl ResultExt for Result +where + E: std::fmt::Debug, +{ + type Ok = T; + + #[track_caller] + fn trace_err(self) -> Option { + match self { + Ok(value) => Some(value), + Err(error) => { + tracing::error!("{:?}", error); + None + } + } + } +} diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs new file mode 100644 index 0000000..197e82a --- /dev/null +++ b/crates/collab/src/rpc/connection_pool.rs @@ -0,0 +1,334 @@ +use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId}; +use anyhow::{anyhow, Result}; +use collections::{BTreeMap, HashMap, HashSet}; +use rpc::{proto, ConnectionId}; +use semantic_version::SemanticVersion; +use serde::Serialize; +use std::fmt; +use tracing::instrument; + +#[derive(Default, Serialize)] +pub struct ConnectionPool { + connections: BTreeMap, + connected_users: BTreeMap, + connected_dev_servers: BTreeMap, + channels: ChannelPool, + offline_dev_servers: HashSet, +} + +#[derive(Default, Serialize)] +struct ConnectedPrincipal { + connection_ids: HashSet, +} + +#[derive(Copy, Clone, Debug, Serialize, PartialOrd, PartialEq, Eq, Ord)] +pub struct ZedVersion(pub SemanticVersion); + +impl fmt::Display for ZedVersion { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl ZedVersion { + pub fn can_collaborate(&self) -> bool { + self.0 >= SemanticVersion::new(0, 129, 2) + } + + pub fn with_save_as() -> ZedVersion { + ZedVersion(SemanticVersion::new(0, 134, 0)) + } +} + +pub trait VersionedMessage { + fn required_host_version(&self) -> Option { + None + } +} + +impl VersionedMessage for proto::SaveBuffer { + fn required_host_version(&self) -> Option { + if self.new_path.is_some() { + Some(ZedVersion::with_save_as()) + } else { + None + } + } +} + +impl VersionedMessage for proto::OpenNewBuffer { + fn required_host_version(&self) -> Option { + Some(ZedVersion::with_save_as()) + } +} + +#[derive(Serialize)] +pub struct Connection { + pub principal_id: PrincipalId, + pub admin: bool, + pub zed_version: ZedVersion, +} + +impl ConnectionPool { + pub fn reset(&mut self) { + self.connections.clear(); + self.connected_users.clear(); + self.channels.clear(); + } + + pub fn connection(&mut self, connection_id: ConnectionId) -> Option<&Connection> { + self.connections.get(&connection_id) + } + + #[instrument(skip(self))] + pub fn add_connection( + &mut self, + connection_id: ConnectionId, + user_id: UserId, + admin: bool, + zed_version: ZedVersion, + ) { + self.connections.insert( + connection_id, + Connection { + principal_id: PrincipalId::UserId(user_id), + admin, + zed_version, + }, + ); + let connected_user = self.connected_users.entry(user_id).or_default(); + connected_user.connection_ids.insert(connection_id); + } + + pub fn add_dev_server( + &mut self, + connection_id: ConnectionId, + dev_server_id: DevServerId, + zed_version: ZedVersion, + ) { + self.connections.insert( + connection_id, + Connection { + principal_id: PrincipalId::DevServerId(dev_server_id), + admin: false, + zed_version, + }, + ); + + self.connected_dev_servers + .insert(dev_server_id, connection_id); + } + + #[instrument(skip(self))] + pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> { + let connection = self + .connections + .get_mut(&connection_id) + .ok_or_else(|| anyhow!("no such connection"))?; + + match connection.principal_id { + PrincipalId::UserId(user_id) => { + let connected_user = self.connected_users.get_mut(&user_id).unwrap(); + connected_user.connection_ids.remove(&connection_id); + if connected_user.connection_ids.is_empty() { + self.connected_users.remove(&user_id); + self.channels.remove_user(&user_id); + } + } + PrincipalId::DevServerId(dev_server_id) => { + self.connected_dev_servers.remove(&dev_server_id); + self.offline_dev_servers.remove(&dev_server_id); + } + } + self.connections.remove(&connection_id).unwrap(); + Ok(()) + } + + pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) { + self.offline_dev_servers.insert(dev_server_id); + } + + pub fn connections(&self) -> impl Iterator { + self.connections.values() + } + + pub fn user_connections(&self, user_id: UserId) -> impl Iterator + '_ { + self.connected_users + .get(&user_id) + .into_iter() + .flat_map(|state| { + state + .connection_ids + .iter() + .flat_map(|cid| self.connections.get(cid)) + }) + } + + pub fn user_connection_ids(&self, user_id: UserId) -> impl Iterator + '_ { + self.connected_users + .get(&user_id) + .into_iter() + .flat_map(|state| &state.connection_ids) + .copied() + } + + pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus { + if self.dev_server_connection_id(dev_server_id).is_some() + && !self.offline_dev_servers.contains(&dev_server_id) + { + proto::DevServerStatus::Online + } else { + proto::DevServerStatus::Offline + } + } + + pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option { + self.connected_dev_servers.get(&dev_server_id).copied() + } + + pub fn channel_user_ids( + &self, + channel_id: ChannelId, + ) -> impl Iterator + '_ { + self.channels.users_to_notify(channel_id) + } + + pub fn channel_connection_ids( + &self, + channel_id: ChannelId, + ) -> impl Iterator + '_ { + self.channels + .users_to_notify(channel_id) + .flat_map(|(user_id, role)| { + self.user_connection_ids(user_id) + .map(move |connection_id| (connection_id, role)) + }) + } + + pub fn subscribe_to_channel( + &mut self, + user_id: UserId, + channel_id: ChannelId, + role: ChannelRole, + ) { + self.channels.subscribe(user_id, channel_id, role); + } + + pub fn unsubscribe_from_channel(&mut self, user_id: &UserId, channel_id: &ChannelId) { + self.channels.unsubscribe(user_id, channel_id); + } + + pub fn is_user_online(&self, user_id: UserId) -> bool { + !self + .connected_users + .get(&user_id) + .unwrap_or(&Default::default()) + .connection_ids + .is_empty() + } + + #[cfg(test)] + pub fn check_invariants(&self) { + for (connection_id, connection) in &self.connections { + match &connection.principal_id { + PrincipalId::UserId(user_id) => { + assert!(self + .connected_users + .get(user_id) + .unwrap() + .connection_ids + .contains(connection_id)); + } + PrincipalId::DevServerId(dev_server_id) => { + assert_eq!( + self.connected_dev_servers.get(&dev_server_id).unwrap(), + connection_id + ); + } + } + } + + for (user_id, state) in &self.connected_users { + for connection_id in &state.connection_ids { + assert_eq!( + self.connections.get(connection_id).unwrap().principal_id, + PrincipalId::UserId(*user_id) + ); + } + } + + for (dev_server_id, connection_id) in &self.connected_dev_servers { + assert_eq!( + self.connections.get(connection_id).unwrap().principal_id, + PrincipalId::DevServerId(*dev_server_id) + ); + } + } +} + +#[derive(Default, Serialize)] +pub struct ChannelPool { + by_user: HashMap>, + by_channel: HashMap>, +} + +impl ChannelPool { + pub fn clear(&mut self) { + self.by_user.clear(); + self.by_channel.clear(); + } + + pub fn subscribe(&mut self, user_id: UserId, channel_id: ChannelId, role: ChannelRole) { + self.by_user + .entry(user_id) + .or_default() + .insert(channel_id, role); + self.by_channel + .entry(channel_id) + .or_default() + .insert(user_id); + } + + pub fn unsubscribe(&mut self, user_id: &UserId, channel_id: &ChannelId) { + if let Some(channels) = self.by_user.get_mut(user_id) { + channels.remove(channel_id); + if channels.is_empty() { + self.by_user.remove(user_id); + } + } + if let Some(users) = self.by_channel.get_mut(channel_id) { + users.remove(user_id); + if users.is_empty() { + self.by_channel.remove(channel_id); + } + } + } + + pub fn remove_user(&mut self, user_id: &UserId) { + if let Some(channels) = self.by_user.remove(&user_id) { + for channel_id in channels.keys() { + self.unsubscribe(user_id, &channel_id) + } + } + } + + pub fn users_to_notify( + &self, + channel_id: ChannelId, + ) -> impl '_ + Iterator { + self.by_channel + .get(&channel_id) + .into_iter() + .flat_map(move |users| { + users.iter().flat_map(move |user_id| { + Some(( + *user_id, + self.by_user + .get(user_id) + .and_then(|channels| channels.get(&channel_id)) + .copied()?, + )) + }) + }) + } +} diff --git a/crates/collab/src/seed.rs b/crates/collab/src/seed.rs new file mode 100644 index 0000000..b851aa2 --- /dev/null +++ b/crates/collab/src/seed.rs @@ -0,0 +1,137 @@ +use crate::db::{self, ChannelRole, NewUserParams}; + +use anyhow::Context; +use db::Database; +use serde::{de::DeserializeOwned, Deserialize}; +use std::{fmt::Write, fs, path::Path}; + +use crate::Config; + +#[derive(Debug, Deserialize)] +struct GitHubUser { + id: i32, + login: String, + email: Option, +} + +#[derive(Deserialize)] +struct SeedConfig { + // Which users to create as admins. + admins: Vec, + // Which channels to create (all admins are invited to all channels) + channels: Vec, + // Number of random users to create from the Github API + number_of_users: Option, +} + +pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result<()> { + let client = reqwest::Client::new(); + + if !db.get_all_users(0, 1).await?.is_empty() && !force { + return Ok(()); + } + + let seed_path = config + .seed_path + .as_ref() + .context("called seed with no SEED_PATH")?; + + let seed_config = load_admins(seed_path) + .context(format!("failed to load {}", seed_path.to_string_lossy()))?; + + let mut first_user = None; + let mut others = vec![]; + + for admin_login in seed_config.admins { + let user = fetch_github::( + &client, + &format!("https://api.github.com/users/{admin_login}"), + ) + .await; + let user = db + .create_user( + &user.email.unwrap_or(format!("{admin_login}@example.com")), + true, + NewUserParams { + github_login: user.login, + github_user_id: user.id, + }, + ) + .await + .context("failed to create admin user")?; + if first_user.is_none() { + first_user = Some(user.user_id); + } else { + others.push(user.user_id) + } + } + + for channel in seed_config.channels { + let (channel, _) = db + .create_channel(&channel, None, first_user.unwrap()) + .await + .context("failed to create channel")?; + + for user_id in &others { + db.invite_channel_member( + channel.id, + *user_id, + first_user.unwrap(), + ChannelRole::Admin, + ) + .await + .context("failed to add user to channel")?; + } + } + + if let Some(number_of_users) = seed_config.number_of_users { + // Fetch 100 other random users from GitHub and insert them into the database + // (for testing autocompleters, etc.) + let mut user_count = db + .get_all_users(0, 200) + .await + .expect("failed to load users from db") + .len(); + let mut last_user_id = None; + while user_count < number_of_users { + let mut uri = "https://api.github.com/users?per_page=100".to_string(); + if let Some(last_user_id) = last_user_id { + write!(&mut uri, "&since={}", last_user_id).unwrap(); + } + let users = fetch_github::>(&client, &uri).await; + + for github_user in users { + last_user_id = Some(github_user.id); + user_count += 1; + db.get_or_create_user_by_github_account( + &github_user.login, + Some(github_user.id), + github_user.email.as_deref(), + None, + ) + .await + .expect("failed to insert user"); + } + } + } + + Ok(()) +} + +fn load_admins(path: impl AsRef) -> anyhow::Result { + let file_content = fs::read_to_string(path)?; + Ok(serde_json::from_str(&file_content)?) +} + +async fn fetch_github(client: &reqwest::Client, url: &str) -> T { + let response = client + .get(url) + .header("user-agent", "zed") + .send() + .await + .unwrap_or_else(|_| panic!("failed to fetch '{}'", url)); + response + .json() + .await + .unwrap_or_else(|_| panic!("failed to deserialize github user from '{}'", url)) +} diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs new file mode 100644 index 0000000..bb9ea43 --- /dev/null +++ b/crates/collab/src/tests.rs @@ -0,0 +1,67 @@ +use std::sync::Arc; + +use call::Room; +use client::ChannelId; +use gpui::{Model, TestAppContext}; + +mod channel_buffer_tests; +mod channel_guest_tests; +mod channel_message_tests; +mod channel_tests; +mod dev_server_tests; +mod editor_tests; +mod following_tests; +mod integration_tests; +mod notification_tests; +mod random_channel_buffer_tests; +mod random_project_collaboration_tests; +mod randomized_test_helpers; +mod test_server; + +use language::{tree_sitter_rust, Language, LanguageConfig, LanguageMatcher}; +pub use randomized_test_helpers::{ + run_randomized_test, save_randomized_test_plan, RandomizedTest, TestError, UserTestPlan, +}; +pub use test_server::{TestClient, TestServer}; + +#[derive(Debug, Eq, PartialEq)] +struct RoomParticipants { + remote: Vec, + pending: Vec, +} + +fn room_participants(room: &Model, cx: &mut TestAppContext) -> RoomParticipants { + room.read_with(cx, |room, _| { + let mut remote = room + .remote_participants() + .iter() + .map(|(_, participant)| participant.user.github_login.clone()) + .collect::>(); + let mut pending = room + .pending_participants() + .iter() + .map(|user| user.github_login.clone()) + .collect::>(); + remote.sort(); + pending.sort(); + RoomParticipants { remote, pending } + }) +} + +fn channel_id(room: &Model, cx: &mut TestAppContext) -> Option { + cx.read(|cx| room.read(cx).channel_id()) +} + +fn rust_lang() -> Arc { + Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )) +} diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs new file mode 100644 index 0000000..9b006e4 --- /dev/null +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -0,0 +1,694 @@ +use crate::{ + rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}, + tests::{test_server::open_channel_notes, TestServer}, +}; +use call::ActiveCall; +use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL; +use client::{Collaborator, ParticipantIndex, UserId}; +use collab_ui::channel_view::ChannelView; +use collections::HashMap; +use editor::{Anchor, Editor, ToOffset}; +use futures::future; +use gpui::{BackgroundExecutor, Model, TestAppContext, ViewContext}; +use rpc::{proto::PeerId, RECEIVE_TIMEOUT}; +use serde_json::json; +use std::ops::Range; + +#[gpui::test] +async fn test_core_channel_buffers( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel("zed", None, (&client_a, cx_a), &mut [(&client_b, cx_b)]) + .await; + + // Client A joins the channel buffer + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + // Client A edits the buffer + let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer()); + buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(0..0, "hello world")], None, cx) + }); + buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(5..5, ", cruel")], None, cx) + }); + buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(0..5, "goodbye")], None, cx) + }); + buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx)); + assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world"); + executor.run_until_parked(); + + // Client B joins the channel buffer + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators( + buffer.collaborators(), + &[client_a.user_id(), client_b.user_id()], + ); + }); + + // Client B sees the correct text, and then edits it + let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer()); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()), + buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id()) + ); + assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world"); + buffer_b.update(cx_b, |buffer, cx| { + buffer.edit([(7..12, "beautiful")], None, cx) + }); + + // Both A and B see the new edit + executor.run_until_parked(); + assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world"); + assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world"); + + // Client A closes the channel buffer. + cx_a.update(|_| drop(channel_buffer_a)); + executor.run_until_parked(); + + // Client B sees that client A is gone from the channel buffer. + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]); + }); + + // Client A rejoins the channel buffer + let _channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + executor.run_until_parked(); + + // Sanity test, make sure we saw A rejoining + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators( + &buffer.collaborators(), + &[client_a.user_id(), client_b.user_id()], + ); + }); + + // Client A loses connection. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + // Client B observes A disconnect + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]); + }); + + // TODO: + // - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects + // - Test interaction with channel deletion while buffer is open +} + +#[gpui::test] +async fn test_channel_notes_participant_indices( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + cx_c.update(editor::init); + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + client_a + .fs() + .insert_tree("/root", json!({"file.txt": "123"})) + .await; + let (project_a, worktree_id_a) = client_a.build_local_project("/root", cx_a).await; + let project_b = client_b.build_empty_local_project(cx_b); + let project_c = client_c.build_empty_local_project(cx_c); + + let (workspace_a, mut cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, mut cx_b) = client_b.build_workspace(&project_b, cx_b); + let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c); + + // Clients A, B, and C open the channel notes + let channel_view_a = cx_a + .update(|cx| ChannelView::open(channel_id, None, workspace_a.clone(), cx)) + .await + .unwrap(); + let channel_view_b = cx_b + .update(|cx| ChannelView::open(channel_id, None, workspace_b.clone(), cx)) + .await + .unwrap(); + let channel_view_c = cx_c + .update(|cx| ChannelView::open(channel_id, None, workspace_c.clone(), cx)) + .await + .unwrap(); + + // Clients A, B, and C all insert and select some text + channel_view_a.update(cx_a, |notes, cx| { + notes.editor.update(cx, |editor, cx| { + editor.insert("a", cx); + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![0..1]); + }); + }); + }); + executor.run_until_parked(); + channel_view_b.update(cx_b, |notes, cx| { + notes.editor.update(cx, |editor, cx| { + editor.move_down(&Default::default(), cx); + editor.insert("b", cx); + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![1..2]); + }); + }); + }); + executor.run_until_parked(); + channel_view_c.update(cx_c, |notes, cx| { + notes.editor.update(cx, |editor, cx| { + editor.move_down(&Default::default(), cx); + editor.insert("c", cx); + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![2..3]); + }); + }); + }); + + // Client A sees clients B and C without assigned colors, because they aren't + // in a call together. + executor.run_until_parked(); + channel_view_a.update(cx_a, |notes, cx| { + notes.editor.update(cx, |editor, cx| { + assert_remote_selections(editor, &[(None, 1..2), (None, 2..3)], cx); + }); + }); + + // Clients A and B join the same call. + for (call, cx) in [(&active_call_a, &mut cx_a), (&active_call_b, &mut cx_b)] { + call.update(*cx, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + } + + // Clients A and B see each other with two different assigned colors. Client C + // still doesn't have a color. + executor.run_until_parked(); + channel_view_a.update(cx_a, |notes, cx| { + notes.editor.update(cx, |editor, cx| { + assert_remote_selections( + editor, + &[(Some(ParticipantIndex(1)), 1..2), (None, 2..3)], + cx, + ); + }); + }); + channel_view_b.update(cx_b, |notes, cx| { + notes.editor.update(cx, |editor, cx| { + assert_remote_selections( + editor, + &[(Some(ParticipantIndex(0)), 0..1), (None, 2..3)], + cx, + ); + }); + }); + + // Client A shares a project, and client B joins. + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + // Clients A and B open the same file. + let editor_a = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id_a, "file.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id_a, "file.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + editor_a.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![0..1]); + }); + }); + editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![2..3]); + }); + }); + executor.run_until_parked(); + + // Clients A and B see each other with the same colors as in the channel notes. + editor_a.update(cx_a, |editor, cx| { + assert_remote_selections(editor, &[(Some(ParticipantIndex(1)), 2..3)], cx); + }); + editor_b.update(cx_b, |editor, cx| { + assert_remote_selections(editor, &[(Some(ParticipantIndex(0)), 0..1)], cx); + }); +} + +#[track_caller] +fn assert_remote_selections( + editor: &mut Editor, + expected_selections: &[(Option, Range)], + cx: &mut ViewContext, +) { + let snapshot = editor.snapshot(cx); + let range = Anchor::min()..Anchor::max(); + let remote_selections = snapshot + .remote_selections_in_range(&range, editor.collaboration_hub().unwrap(), cx) + .map(|s| { + let start = s.selection.start.to_offset(&snapshot.buffer_snapshot); + let end = s.selection.end.to_offset(&snapshot.buffer_snapshot); + (s.participant_index, start..end) + }) + .collect::>(); + assert_eq!( + remote_selections, expected_selections, + "incorrect remote selections" + ); +} + +#[gpui::test] +async fn test_multiple_handles_to_channel_buffer( + deterministic: BackgroundExecutor, + cx_a: &mut TestAppContext, +) { + let mut server = TestServer::start(deterministic.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + + let channel_id = server + .make_channel("the-channel", None, (&client_a, cx_a), &mut []) + .await; + + let channel_buffer_1 = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)); + let channel_buffer_2 = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)); + let channel_buffer_3 = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)); + + // All concurrent tasks for opening a channel buffer return the same model handle. + let (channel_buffer, channel_buffer_2, channel_buffer_3) = + future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3) + .await + .unwrap(); + let channel_buffer_model_id = channel_buffer.entity_id(); + assert_eq!(channel_buffer, channel_buffer_2); + assert_eq!(channel_buffer, channel_buffer_3); + + channel_buffer.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "hello")], None, cx); + }) + }); + deterministic.run_until_parked(); + + cx_a.update(|_| { + drop(channel_buffer); + drop(channel_buffer_2); + drop(channel_buffer_3); + }); + deterministic.run_until_parked(); + + // The channel buffer can be reopened after dropping it. + let channel_buffer = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + assert_ne!(channel_buffer.entity_id(), channel_buffer_model_id); + channel_buffer.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, _| { + assert_eq!(buffer.text(), "hello"); + }) + }); +} + +#[gpui::test] +async fn test_channel_buffer_disconnect( + deterministic: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(deterministic.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + channel_buffer_a.update(cx_a, |buffer, cx| { + assert_eq!(buffer.channel(cx).unwrap().name, "the-channel"); + assert!(!buffer.is_connected()); + }); + + deterministic.run_until_parked(); + + server.allow_connections(); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + deterministic.run_until_parked(); + + client_a + .channel_store() + .update(cx_a, |channel_store, _| { + channel_store.remove_channel(channel_id) + }) + .await + .unwrap(); + deterministic.run_until_parked(); + + // Channel buffer observed the deletion + channel_buffer_b.update(cx_b, |buffer, cx| { + assert!(buffer.channel(cx).is_none()); + assert!(!buffer.is_connected()); + }); +} + +#[gpui::test] +async fn test_rejoin_channel_buffer( + deterministic: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(deterministic.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "1")], None, cx); + }) + }); + deterministic.run_until_parked(); + + // Client A disconnects. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + + // Both clients make an edit. + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(1..1, "2")], None, cx); + }) + }); + channel_buffer_b.update(cx_b, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "0")], None, cx); + }) + }); + + // Both clients see their own edit. + deterministic.run_until_parked(); + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "12"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "01"); + }); + + // Client A reconnects. Both clients see each other's edits, and see + // the same collaborators. + server.allow_connections(); + deterministic.advance_clock(RECEIVE_TIMEOUT); + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "012"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "012"); + }); + + channel_buffer_a.read_with(cx_a, |buffer_a, _| { + channel_buffer_b.read_with(cx_b, |buffer_b, _| { + assert_eq!(buffer_a.collaborators(), buffer_b.collaborators()); + }); + }); +} + +#[gpui::test] +async fn test_channel_buffers_and_server_restarts( + deterministic: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(deterministic.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + let _channel_buffer_c = client_c + .channel_store() + .update(cx_c, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "1")], None, cx); + }) + }); + deterministic.run_until_parked(); + + // Client C can't reconnect. + client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending())); + + // Server stops. + server.reset().await; + deterministic.advance_clock(RECEIVE_TIMEOUT); + + // While the server is down, both clients make an edit. + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(1..1, "2")], None, cx); + }) + }); + channel_buffer_b.update(cx_b, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "0")], None, cx); + }) + }); + + // Server restarts. + server.start().await.unwrap(); + deterministic.advance_clock(CLEANUP_TIMEOUT); + + // Clients reconnects. Clients A and B see each other's edits, and see + // that client C has disconnected. + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "012"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "012"); + }); + + channel_buffer_a.read_with(cx_a, |buffer_a, _| { + channel_buffer_b.read_with(cx_b, |buffer_b, _| { + assert_collaborators( + buffer_a.collaborators(), + &[client_a.user_id(), client_b.user_id()], + ); + assert_eq!(buffer_a.collaborators(), buffer_b.collaborators()); + }); + }); +} + +#[gpui::test] +async fn test_channel_buffer_changes( + deterministic: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let (server, client_a, client_b, channel_id) = TestServer::start2(cx_a, cx_b).await; + let (_, cx_a) = client_a.build_test_workspace(cx_a).await; + let (workspace_b, cx_b) = client_b.build_test_workspace(cx_b).await; + let channel_store_b = client_b.channel_store().clone(); + + // Editing the channel notes should set them to dirty + open_channel_notes(channel_id, cx_a).await.unwrap(); + cx_a.simulate_keystrokes("1"); + channel_store_b.read_with(cx_b, |channel_store, _| { + assert!(channel_store.has_channel_buffer_changed(channel_id)) + }); + + // Opening the buffer should clear the changed flag. + open_channel_notes(channel_id, cx_b).await.unwrap(); + channel_store_b.read_with(cx_b, |channel_store, _| { + assert!(!channel_store.has_channel_buffer_changed(channel_id)) + }); + + // Editing the channel while the buffer is open should not show that the buffer has changed. + cx_a.simulate_keystrokes("2"); + channel_store_b.read_with(cx_b, |channel_store, _| { + assert!(!channel_store.has_channel_buffer_changed(channel_id)) + }); + + // Test that the server is tracking things correctly, and we retain our 'not changed' + // state across a disconnect + deterministic.advance_clock(ACKNOWLEDGE_DEBOUNCE_INTERVAL); + server + .simulate_long_connection_interruption(client_b.peer_id().unwrap(), deterministic.clone()); + channel_store_b.read_with(cx_b, |channel_store, _| { + assert!(!channel_store.has_channel_buffer_changed(channel_id)) + }); + + // Closing the buffer should re-enable change tracking + cx_b.update(|cx| { + workspace_b.update(cx, |workspace, cx| { + workspace.close_all_items_and_panes(&Default::default(), cx) + }); + }); + deterministic.run_until_parked(); + + cx_a.simulate_keystrokes("3"); + channel_store_b.read_with(cx_b, |channel_store, _| { + assert!(channel_store.has_channel_buffer_changed(channel_id)) + }); +} + +#[gpui::test] +async fn test_channel_buffer_changes_persist( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_b2: &mut TestAppContext, +) { + let (mut server, client_a, client_b, channel_id) = TestServer::start2(cx_a, cx_b).await; + let (_, cx_a) = client_a.build_test_workspace(cx_a).await; + let (_, cx_b) = client_b.build_test_workspace(cx_b).await; + + // a) edits the notes + open_channel_notes(channel_id, cx_a).await.unwrap(); + cx_a.simulate_keystrokes("1"); + // b) opens them to observe the current version + open_channel_notes(channel_id, cx_b).await.unwrap(); + + // On boot the client should get the correct state. + let client_b2 = server.create_client(cx_b2, "user_b").await; + let channel_store_b2 = client_b2.channel_store().clone(); + channel_store_b2.read_with(cx_b2, |channel_store, _| { + assert!(!channel_store.has_channel_buffer_changed(channel_id)) + }); +} + +#[track_caller] +fn assert_collaborators(collaborators: &HashMap, ids: &[Option]) { + let mut user_ids = collaborators + .values() + .map(|collaborator| collaborator.user_id) + .collect::>(); + user_ids.sort(); + assert_eq!( + user_ids, + ids.into_iter().map(|id| id.unwrap()).collect::>() + ); +} + +fn buffer_text(channel_buffer: &Model, cx: &mut TestAppContext) -> String { + channel_buffer.read_with(cx, |buffer, _| buffer.text()) +} diff --git a/crates/collab/src/tests/channel_guest_tests.rs b/crates/collab/src/tests/channel_guest_tests.rs new file mode 100644 index 0000000..1d92691 --- /dev/null +++ b/crates/collab/src/tests/channel_guest_tests.rs @@ -0,0 +1,288 @@ +use crate::{db::ChannelId, tests::TestServer}; +use call::ActiveCall; +use editor::Editor; +use gpui::{BackgroundExecutor, TestAppContext}; +use rpc::proto; + +#[gpui::test] +async fn test_channel_guests( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let active_call_a = cx_a.read(ActiveCall::global); + + let channel_id = server + .make_public_channel("the-channel", &client_a, cx_a) + .await; + + // Client A shares a project in the channel + let project_a = client_a.build_test_project(cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + cx_a.executor().run_until_parked(); + + // Client B joins channel A as a guest + cx_b.update(|cx| workspace::join_channel(channel_id, client_b.app_state.clone(), None, cx)) + .await + .unwrap(); + + // b should be following a in the shared project. + // B is a guest, + executor.run_until_parked(); + + let active_call_b = cx_b.read(ActiveCall::global); + let project_b = + active_call_b.read_with(cx_b, |call, _| call.location().unwrap().upgrade().unwrap()); + let room_b = active_call_b.update(cx_b, |call, _| call.room().unwrap().clone()); + + assert_eq!( + project_b.read_with(cx_b, |project, _| project.remote_id()), + Some(project_id), + ); + assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); + assert!(project_b + .update(cx_b, |project, cx| { + let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + project.create_entry((worktree_id, "b.txt"), false, cx) + }) + .await + .is_err()); + assert!(room_b.read_with(cx_b, |room, _| room.is_muted())); +} + +#[gpui::test] +async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let active_call_a = cx_a.read(ActiveCall::global); + + let channel_id = server + .make_public_channel("the-channel", &client_a, cx_a) + .await; + + let project_a = client_a.build_test_project(cx_a).await; + cx_a.update(|cx| workspace::join_channel(channel_id, client_a.app_state.clone(), None, cx)) + .await + .unwrap(); + + // Client A shares a project in the channel + active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + + // Client B joins channel A as a guest + cx_b.update(|cx| workspace::join_channel(channel_id, client_b.app_state.clone(), None, cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + + // client B opens 1.txt as a guest + let (workspace_b, cx_b) = client_b.active_workspace(cx_b); + let room_b = cx_b + .read(ActiveCall::global) + .update(cx_b, |call, _| call.room().unwrap().clone()); + cx_b.simulate_keystrokes("cmd-p 1 enter"); + + let (project_b, editor_b) = workspace_b.update(cx_b, |workspace, cx| { + ( + workspace.project().clone(), + workspace.active_item_as::(cx).unwrap(), + ) + }); + assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); + assert!(editor_b.update(cx_b, |e, cx| e.read_only(cx))); + assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone())); + assert!(room_b + .update(cx_b, |room, cx| room.share_microphone(cx)) + .await + .is_err()); + + // B is promoted + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_participant_role( + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + cx, + ) + }) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + // project and buffers are now editable + assert!(project_b.read_with(cx_b, |project, _| !project.is_read_only())); + assert!(editor_b.update(cx_b, |editor, cx| !editor.read_only(cx))); + + // B sees themselves as muted, and can unmute. + assert!(room_b.read_with(cx_b, |room, _| room.can_use_microphone())); + room_b.read_with(cx_b, |room, _| assert!(room.is_muted())); + room_b.update(cx_b, |room, cx| room.toggle_mute(cx)); + cx_a.run_until_parked(); + room_b.read_with(cx_b, |room, _| assert!(!room.is_muted())); + + // B is demoted + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_participant_role( + client_b.user_id().unwrap(), + proto::ChannelRole::Guest, + cx, + ) + }) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + // project and buffers are no longer editable + assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); + assert!(editor_b.update(cx_b, |editor, cx| editor.read_only(cx))); + assert!(room_b + .update(cx_b, |room, cx| room.share_microphone(cx)) + .await + .is_err()); +} + +#[gpui::test] +async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + + server + .app_state + .db + .get_or_create_user_by_github_account("user_b", Some(100), None, None) + .await + .unwrap(); + + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + // Create a parent channel that requires the Zed CLA + let parent_channel_id = server + .make_channel("the-channel", None, (&client_a, cx_a), &mut []) + .await; + server + .app_state + .db + .set_channel_requires_zed_cla(ChannelId::from_proto(parent_channel_id.0), true) + .await + .unwrap(); + + // Create a public channel that is a child of the parent channel. + let channel_id = client_a + .channel_store() + .update(cx_a, |store, cx| { + store.create_channel("the-sub-channel", Some(parent_channel_id), cx) + }) + .await + .unwrap(); + client_a + .channel_store() + .update(cx_a, |store, cx| { + store.set_channel_visibility(parent_channel_id, proto::ChannelVisibility::Public, cx) + }) + .await + .unwrap(); + client_a + .channel_store() + .update(cx_a, |store, cx| { + store.set_channel_visibility(channel_id, proto::ChannelVisibility::Public, cx) + }) + .await + .unwrap(); + + // Users A and B join the channel. B is a guest. + active_call_a + .update(cx_a, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + active_call_b + .update(cx_b, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + let room_b = cx_b + .read(ActiveCall::global) + .update(cx_b, |call, _| call.room().unwrap().clone()); + assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone())); + + // A tries to grant write access to B, but cannot because B has not + // yet signed the zed CLA. + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_participant_role( + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + cx, + ) + }) + }) + .await + .unwrap_err(); + cx_a.run_until_parked(); + assert!(room_b.read_with(cx_b, |room, _| !room.can_share_projects())); + assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone())); + + // A tries to grant write access to B, but cannot because B has not + // yet signed the zed CLA. + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_participant_role( + client_b.user_id().unwrap(), + proto::ChannelRole::Talker, + cx, + ) + }) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + assert!(room_b.read_with(cx_b, |room, _| !room.can_share_projects())); + assert!(room_b.read_with(cx_b, |room, _| room.can_use_microphone())); + + // User B signs the zed CLA. + server + .app_state + .db + .add_contributor("user_b", Some(100), None, None) + .await + .unwrap(); + + // A can now grant write access to B. + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_participant_role( + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + cx, + ) + }) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + assert!(room_b.read_with(cx_b, |room, _| room.can_share_projects())); + assert!(room_b.read_with(cx_b, |room, _| room.can_use_microphone())); +} diff --git a/crates/collab/src/tests/channel_message_tests.rs b/crates/collab/src/tests/channel_message_tests.rs new file mode 100644 index 0000000..4596044 --- /dev/null +++ b/crates/collab/src/tests/channel_message_tests.rs @@ -0,0 +1,725 @@ +use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; +use channel::{ChannelChat, ChannelMessageId, MessageParams}; +use collab_ui::chat_panel::ChatPanel; +use gpui::{BackgroundExecutor, Model, TestAppContext}; +use rpc::Notification; +use workspace::dock::Panel; + +#[gpui::test] +async fn test_basic_channel_messages( + executor: BackgroundExecutor, + mut cx_a: &mut TestAppContext, + mut cx_b: &mut TestAppContext, + mut cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let channel_chat_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + let channel_chat_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + let message_id = channel_chat_a + .update(cx_a, |c, cx| { + c.send_message( + MessageParams { + text: "hi @user_c!".into(), + mentions: vec![(3..10, client_c.id())], + reply_to_message_id: None, + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + channel_chat_a + .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap()) + .await + .unwrap(); + + executor.run_until_parked(); + channel_chat_b + .update(cx_b, |c, cx| c.send_message("three".into(), cx).unwrap()) + .await + .unwrap(); + + executor.run_until_parked(); + + let channel_chat_c = client_c + .channel_store() + .update(cx_c, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + for (chat, cx) in [ + (&channel_chat_a, &mut cx_a), + (&channel_chat_b, &mut cx_b), + (&channel_chat_c, &mut cx_c), + ] { + chat.update(*cx, |c, _| { + assert_eq!( + c.messages() + .iter() + .map(|m| (m.body.as_str(), m.mentions.as_slice())) + .collect::>(), + vec![ + ("hi @user_c!", [(3..10, client_c.id())].as_slice()), + ("two", &[]), + ("three", &[]) + ], + "results for user {}", + c.client().id(), + ); + }); + } + + client_c.notification_store().update(cx_c, |store, _| { + assert_eq!(store.notification_count(), 2); + assert_eq!(store.unread_notification_count(), 1); + assert_eq!( + store.notification_at(0).unwrap().notification, + Notification::ChannelMessageMention { + message_id, + sender_id: client_a.id(), + channel_id: channel_id.0, + } + ); + assert_eq!( + store.notification_at(1).unwrap().notification, + Notification::ChannelInvitation { + channel_id: channel_id.0, + channel_name: "the-channel".to_string(), + inviter_id: client_a.id() + } + ); + }); +} + +#[gpui::test] +async fn test_rejoin_channel_chat( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + let channel_chat_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + let channel_chat_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + channel_chat_a + .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap()) + .await + .unwrap(); + channel_chat_b + .update(cx_b, |c, cx| c.send_message("two".into(), cx).unwrap()) + .await + .unwrap(); + + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + + // While client A is disconnected, clients A and B both send new messages. + channel_chat_a + .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap()) + .await + .unwrap_err(); + channel_chat_a + .update(cx_a, |c, cx| c.send_message("four".into(), cx).unwrap()) + .await + .unwrap_err(); + channel_chat_b + .update(cx_b, |c, cx| c.send_message("five".into(), cx).unwrap()) + .await + .unwrap(); + channel_chat_b + .update(cx_b, |c, cx| c.send_message("six".into(), cx).unwrap()) + .await + .unwrap(); + + // Client A reconnects. + server.allow_connections(); + executor.advance_clock(RECONNECT_TIMEOUT); + + // Client A fetches the messages that were sent while they were disconnected + // and resends their own messages which failed to send. + let expected_messages = &["one", "two", "five", "six", "three", "four"]; + assert_messages(&channel_chat_a, expected_messages, cx_a); + assert_messages(&channel_chat_b, expected_messages, cx_b); +} + +#[gpui::test] +async fn test_remove_channel_message( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let channel_chat_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + let channel_chat_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + // Client A sends some messages. + channel_chat_a + .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap()) + .await + .unwrap(); + let msg_id_2 = channel_chat_a + .update(cx_a, |c, cx| { + c.send_message( + MessageParams { + text: "two @user_b".to_string(), + mentions: vec![(4..12, client_b.id())], + reply_to_message_id: None, + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + channel_chat_a + .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap()) + .await + .unwrap(); + + // Clients A and B see all of the messages. + executor.run_until_parked(); + let expected_messages = &["one", "two @user_b", "three"]; + assert_messages(&channel_chat_a, expected_messages, cx_a); + assert_messages(&channel_chat_b, expected_messages, cx_b); + + // Ensure that client B received a notification for the mention. + client_b.notification_store().read_with(cx_b, |store, _| { + assert_eq!(store.notification_count(), 2); + let entry = store.notification_at(0).unwrap(); + assert_eq!( + entry.notification, + Notification::ChannelMessageMention { + message_id: msg_id_2, + sender_id: client_a.id(), + channel_id: channel_id.0, + } + ); + }); + + // Client A deletes one of their messages. + channel_chat_a + .update(cx_a, |c, cx| { + let ChannelMessageId::Saved(id) = c.message(1).id else { + panic!("message not saved") + }; + c.remove_message(id, cx) + }) + .await + .unwrap(); + + // Client B sees that the message is gone. + executor.run_until_parked(); + let expected_messages = &["one", "three"]; + assert_messages(&channel_chat_a, expected_messages, cx_a); + assert_messages(&channel_chat_b, expected_messages, cx_b); + + // Client C joins the channel chat, and does not see the deleted message. + let channel_chat_c = client_c + .channel_store() + .update(cx_c, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + assert_messages(&channel_chat_c, expected_messages, cx_c); + + // Ensure we remove the notifications when the message is removed + client_b.notification_store().read_with(cx_b, |store, _| { + // First notification is the channel invitation, second would be the mention + // notification, which should now be removed. + assert_eq!(store.notification_count(), 1); + }); +} + +#[track_caller] +fn assert_messages(chat: &Model, messages: &[&str], cx: &mut TestAppContext) { + assert_eq!( + chat.read_with(cx, |chat, _| { + chat.messages() + .iter() + .map(|m| m.body.clone()) + .collect::>() + }), + messages + ); +} + +#[gpui::test] +async fn test_channel_message_changes( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + // Client A sends a message, client B should see that there is a new message. + let channel_chat_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + channel_chat_a + .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap()) + .await + .unwrap(); + + executor.run_until_parked(); + + let b_has_messages = cx_b.update(|cx| { + client_b + .channel_store() + .read(cx) + .has_new_messages(channel_id) + }); + + assert!(b_has_messages); + + // Opening the chat should clear the changed flag. + cx_b.update(|cx| { + collab_ui::init(&client_b.app_state, cx); + }); + let project_b = client_b.build_empty_local_project(cx_b); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let chat_panel_b = workspace_b.update(cx_b, |workspace, cx| ChatPanel::new(workspace, cx)); + chat_panel_b + .update(cx_b, |chat_panel, cx| { + chat_panel.set_active(true, cx); + chat_panel.select_channel(channel_id, None, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let b_has_messages = cx_b.update(|cx| { + client_b + .channel_store() + .read(cx) + .has_new_messages(channel_id) + }); + + assert!(!b_has_messages); + + // Sending a message while the chat is open should not change the flag. + channel_chat_a + .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap()) + .await + .unwrap(); + + executor.run_until_parked(); + + let b_has_messages = cx_b.update(|cx| { + client_b + .channel_store() + .read(cx) + .has_new_messages(channel_id) + }); + + assert!(!b_has_messages); + + // Sending a message while the chat is closed should change the flag. + chat_panel_b.update(cx_b, |chat_panel, cx| { + chat_panel.set_active(false, cx); + }); + + // Sending a message while the chat is open should not change the flag. + channel_chat_a + .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap()) + .await + .unwrap(); + + executor.run_until_parked(); + + let b_has_messages = cx_b.update(|cx| { + client_b + .channel_store() + .read(cx) + .has_new_messages(channel_id) + }); + + assert!(b_has_messages); + + // Closing the chat should re-enable change tracking + cx_b.update(|_| drop(chat_panel_b)); + + channel_chat_a + .update(cx_a, |c, cx| c.send_message("four".into(), cx).unwrap()) + .await + .unwrap(); + + executor.run_until_parked(); + + let b_has_messages = cx_b.update(|cx| { + client_b + .channel_store() + .read(cx) + .has_new_messages(channel_id) + }); + + assert!(b_has_messages); +} + +#[gpui::test] +async fn test_chat_replies(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + // Client A sends a message, client B should see that there is a new message. + let channel_chat_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + let channel_chat_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + let msg_id = channel_chat_a + .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap()) + .await + .unwrap(); + + cx_a.run_until_parked(); + + let reply_id = channel_chat_b + .update(cx_b, |c, cx| { + c.send_message( + MessageParams { + text: "reply".into(), + reply_to_message_id: Some(msg_id), + mentions: Vec::new(), + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + + channel_chat_a.update(cx_a, |channel_chat, _| { + assert_eq!( + channel_chat + .find_loaded_message(reply_id) + .unwrap() + .reply_to_message_id, + Some(msg_id), + ) + }); +} + +#[gpui::test] +async fn test_chat_editing(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + // Client A sends a message, client B should see that there is a new message. + let channel_chat_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + let channel_chat_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx)) + .await + .unwrap(); + + let msg_id = channel_chat_a + .update(cx_a, |c, cx| { + c.send_message( + MessageParams { + text: "Initial message".into(), + reply_to_message_id: None, + mentions: Vec::new(), + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + + channel_chat_a + .update(cx_a, |c, cx| { + c.update_message( + msg_id, + MessageParams { + text: "Updated body".into(), + reply_to_message_id: None, + mentions: Vec::new(), + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + channel_chat_a.update(cx_a, |channel_chat, _| { + let update_message = channel_chat.find_loaded_message(msg_id).unwrap(); + + assert_eq!(update_message.body, "Updated body"); + assert_eq!(update_message.mentions, Vec::new()); + }); + channel_chat_b.update(cx_b, |channel_chat, _| { + let update_message = channel_chat.find_loaded_message(msg_id).unwrap(); + + assert_eq!(update_message.body, "Updated body"); + assert_eq!(update_message.mentions, Vec::new()); + }); + + // test mentions are updated correctly + + client_b.notification_store().read_with(cx_b, |store, _| { + assert_eq!(store.notification_count(), 1); + let entry = store.notification_at(0).unwrap(); + assert!(matches!( + entry.notification, + Notification::ChannelInvitation { .. } + ),); + }); + + channel_chat_a + .update(cx_a, |c, cx| { + c.update_message( + msg_id, + MessageParams { + text: "Updated body including a mention for @user_b".into(), + reply_to_message_id: None, + mentions: vec![(37..45, client_b.id())], + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + channel_chat_a.update(cx_a, |channel_chat, _| { + assert_eq!( + channel_chat.find_loaded_message(msg_id).unwrap().body, + "Updated body including a mention for @user_b", + ) + }); + channel_chat_b.update(cx_b, |channel_chat, _| { + assert_eq!( + channel_chat.find_loaded_message(msg_id).unwrap().body, + "Updated body including a mention for @user_b", + ) + }); + client_b.notification_store().read_with(cx_b, |store, _| { + assert_eq!(store.notification_count(), 2); + let entry = store.notification_at(0).unwrap(); + assert_eq!( + entry.notification, + Notification::ChannelMessageMention { + message_id: msg_id, + sender_id: client_a.id(), + channel_id: channel_id.0, + } + ); + }); + + // Test update message and keep the mention and check that the body is updated correctly + + channel_chat_a + .update(cx_a, |c, cx| { + c.update_message( + msg_id, + MessageParams { + text: "Updated body v2 including a mention for @user_b".into(), + reply_to_message_id: None, + mentions: vec![(37..45, client_b.id())], + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + channel_chat_a.update(cx_a, |channel_chat, _| { + assert_eq!( + channel_chat.find_loaded_message(msg_id).unwrap().body, + "Updated body v2 including a mention for @user_b", + ) + }); + channel_chat_b.update(cx_b, |channel_chat, _| { + assert_eq!( + channel_chat.find_loaded_message(msg_id).unwrap().body, + "Updated body v2 including a mention for @user_b", + ) + }); + + client_b.notification_store().read_with(cx_b, |store, _| { + let message = store.channel_message_for_id(msg_id); + assert!(message.is_some()); + assert_eq!( + message.unwrap().body, + "Updated body v2 including a mention for @user_b" + ); + assert_eq!(store.notification_count(), 2); + let entry = store.notification_at(0).unwrap(); + assert_eq!( + entry.notification, + Notification::ChannelMessageMention { + message_id: msg_id, + sender_id: client_a.id(), + channel_id: channel_id.0, + } + ); + }); + + // If we remove a mention from a message the corresponding mention notification + // should also be removed. + + channel_chat_a + .update(cx_a, |c, cx| { + c.update_message( + msg_id, + MessageParams { + text: "Updated body without a mention".into(), + reply_to_message_id: None, + mentions: vec![], + }, + cx, + ) + .unwrap() + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + channel_chat_a.update(cx_a, |channel_chat, _| { + assert_eq!( + channel_chat.find_loaded_message(msg_id).unwrap().body, + "Updated body without a mention", + ) + }); + channel_chat_b.update(cx_b, |channel_chat, _| { + assert_eq!( + channel_chat.find_loaded_message(msg_id).unwrap().body, + "Updated body without a mention", + ) + }); + client_b.notification_store().read_with(cx_b, |store, _| { + // First notification is the channel invitation, second would be the mention + // notification, which should now be removed. + assert_eq!(store.notification_count(), 1); + }); +} diff --git a/crates/collab/src/tests/channel_tests.rs b/crates/collab/src/tests/channel_tests.rs new file mode 100644 index 0000000..d9fdab7 --- /dev/null +++ b/crates/collab/src/tests/channel_tests.rs @@ -0,0 +1,1460 @@ +use crate::{ + db::{self, UserId}, + rpc::RECONNECT_TIMEOUT, + tests::{room_participants, RoomParticipants, TestServer}, +}; +use call::ActiveCall; +use channel::{ChannelMembership, ChannelStore}; +use client::{ChannelId, User}; +use futures::future::try_join_all; +use gpui::{BackgroundExecutor, Model, SharedString, TestAppContext}; +use rpc::{ + proto::{self, ChannelRole}, + RECEIVE_TIMEOUT, +}; +use std::sync::Arc; + +#[gpui::test] +async fn test_core_channels( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_a_id = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("channel-a", None, cx) + }) + .await + .unwrap(); + let channel_b_id = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("channel-b", Some(channel_a_id), cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_channels( + client_a.channel_store(), + cx_a, + &[ + ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }, + ExpectedChannel { + id: channel_b_id, + name: "channel-b".into(), + depth: 1, + }, + ], + ); + + cx_b.read(|cx| { + client_b.channel_store().read_with(cx, |channels, _| { + assert!(channels.ordered_channels().collect::>().is_empty()) + }) + }); + + // Invite client B to channel A as client A. + client_a + .channel_store() + .update(cx_a, |store, cx| { + assert!(!store.has_pending_channel_invite(channel_a_id, client_b.user_id().unwrap())); + + let invite = store.invite_member( + channel_a_id, + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + cx, + ); + + // Make sure we're synchronously storing the pending invite + assert!(store.has_pending_channel_invite(channel_a_id, client_b.user_id().unwrap())); + invite + }) + .await + .unwrap(); + + // Client A sees that B has been invited. + executor.run_until_parked(); + assert_channel_invitations( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }], + ); + + let members = client_a + .channel_store() + .update(cx_a, |store, cx| { + assert!(!store.has_pending_channel_invite(channel_a_id, client_b.user_id().unwrap())); + store.fuzzy_search_members(channel_a_id, "".to_string(), 10, cx) + }) + .await + .unwrap(); + assert_members_eq( + &members, + &[ + ( + client_a.user_id().unwrap(), + proto::ChannelRole::Admin, + proto::channel_member::Kind::Member, + ), + ( + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + proto::channel_member::Kind::Invitee, + ), + ], + ); + + // Client B accepts the invitation. + client_b + .channel_store() + .update(cx_b, |channels, cx| { + channels.respond_to_channel_invite(channel_a_id, true, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + + // Client B now sees that they are a member of channel A and its existing subchannels. + assert_channel_invitations(client_b.channel_store(), cx_b, &[]); + assert_channels( + client_b.channel_store(), + cx_b, + &[ + ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }, + ExpectedChannel { + id: channel_b_id, + name: "channel-b".into(), + depth: 1, + }, + ], + ); + + let channel_c_id = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("channel-c", Some(channel_b_id), cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_channels( + client_b.channel_store(), + cx_b, + &[ + ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }, + ExpectedChannel { + id: channel_b_id, + name: "channel-b".into(), + depth: 1, + }, + ExpectedChannel { + id: channel_c_id, + name: "channel-c".into(), + depth: 2, + }, + ], + ); + + // Update client B's membership to channel A to be an admin. + client_a + .channel_store() + .update(cx_a, |store, cx| { + store.set_member_role( + channel_a_id, + client_b.user_id().unwrap(), + proto::ChannelRole::Admin, + cx, + ) + }) + .await + .unwrap(); + executor.run_until_parked(); + + // Observe that client B is now an admin of channel A, and that + // their admin privileges extend to subchannels of channel A. + assert_channel_invitations(client_b.channel_store(), cx_b, &[]); + assert_channels( + client_b.channel_store(), + cx_b, + &[ + ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }, + ExpectedChannel { + id: channel_b_id, + name: "channel-b".into(), + depth: 1, + }, + ExpectedChannel { + id: channel_c_id, + name: "channel-c".into(), + depth: 2, + }, + ], + ); + + // Client A deletes the channel, deletion also deletes subchannels. + client_a + .channel_store() + .update(cx_a, |channel_store, _| { + channel_store.remove_channel(channel_b_id) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_channels( + client_a.channel_store(), + cx_a, + &[ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }], + ); + assert_channels( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }], + ); + + // Remove client B + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.remove_member(channel_a_id, client_b.user_id().unwrap(), cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // Client A still has their channel + assert_channels( + client_a.channel_store(), + cx_a, + &[ExpectedChannel { + id: channel_a_id, + name: "channel-a".into(), + depth: 0, + }], + ); + + // Client B no longer has access to the channel + assert_channels(client_b.channel_store(), cx_b, &[]); + + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + server + .app_state + .db + .rename_channel( + db::ChannelId::from_proto(channel_a_id.0), + UserId::from_proto(client_a.id()), + "channel-a-renamed", + ) + .await + .unwrap(); + + server.allow_connections(); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + assert_channels( + client_a.channel_store(), + cx_a, + &[ExpectedChannel { + id: channel_a_id, + name: "channel-a-renamed".into(), + depth: 0, + }], + ); +} + +#[track_caller] +fn assert_participants_eq(participants: &[Arc], expected_partitipants: &[u64]) { + assert_eq!( + participants.iter().map(|p| p.id).collect::>(), + expected_partitipants + ); +} + +#[track_caller] +fn assert_members_eq( + members: &[ChannelMembership], + expected_members: &[(u64, proto::ChannelRole, proto::channel_member::Kind)], +) { + assert_eq!( + members + .iter() + .map(|member| (member.user.id, member.role, member.kind)) + .collect::>(), + expected_members + ); +} + +#[gpui::test] +async fn test_joining_channel_ancestor_member( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let parent_id = server + .make_channel("parent", None, (&client_a, cx_a), &mut [(&client_b, cx_b)]) + .await; + + let sub_id = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("sub_channel", Some(parent_id), cx) + }) + .await + .unwrap(); + + let active_call_b = cx_b.read(ActiveCall::global); + + assert!(active_call_b + .update(cx_b, |active_call, cx| active_call.join_channel(sub_id, cx)) + .await + .is_ok()); +} + +#[gpui::test] +async fn test_channel_room( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let zed_id = server + .make_channel( + "zed", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + active_call_a + .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx)) + .await + .unwrap(); + + // Give everyone a chance to observe user A joining + executor.run_until_parked(); + let room_a = + cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone())); + cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected()))); + + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap()], + ); + }) + }); + + assert_channels( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + id: zed_id, + name: "zed".into(), + depth: 0, + }], + ); + cx_b.read(|cx| { + client_b.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap()], + ); + }) + }); + + cx_c.read(|cx| { + client_c.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap()], + ); + }) + }); + + active_call_b + .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx)) + .await + .unwrap(); + + executor.run_until_parked(); + + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap(), client_b.user_id().unwrap()], + ); + }) + }); + + cx_b.read(|cx| { + client_b.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap(), client_b.user_id().unwrap()], + ); + }) + }); + + cx_c.read(|cx| { + client_c.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap(), client_b.user_id().unwrap()], + ); + }) + }); + + let room_a = + cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone())); + cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected()))); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec![] + } + ); + + let room_b = + cx_b.read(|cx| active_call_b.read_with(cx, |call, _| call.room().unwrap().clone())); + cx_b.read(|cx| room_b.read_with(cx, |room, _| assert!(room.is_connected()))); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec![] + } + ); + + // Make sure that leaving and rejoining works + + active_call_a + .update(cx_a, |active_call, cx| active_call.hang_up(cx)) + .await + .unwrap(); + + executor.run_until_parked(); + + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_b.user_id().unwrap()], + ); + }) + }); + + cx_b.read(|cx| { + client_b.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_b.user_id().unwrap()], + ); + }) + }); + + cx_c.read(|cx| { + client_c.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_b.user_id().unwrap()], + ); + }) + }); + + active_call_b + .update(cx_b, |active_call, cx| active_call.hang_up(cx)) + .await + .unwrap(); + + executor.run_until_parked(); + + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq(channels.channel_participants(zed_id), &[]); + }) + }); + + cx_b.read(|cx| { + client_b.channel_store().read_with(cx, |channels, _| { + assert_participants_eq(channels.channel_participants(zed_id), &[]); + }) + }); + + cx_c.read(|cx| { + client_c.channel_store().read_with(cx, |channels, _| { + assert_participants_eq(channels.channel_participants(zed_id), &[]); + }) + }); + + active_call_a + .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx)) + .await + .unwrap(); + + active_call_b + .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx)) + .await + .unwrap(); + + executor.run_until_parked(); + + let room_a = + cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone())); + cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected()))); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec![] + } + ); + + let room_b = + cx_b.read(|cx| active_call_b.read_with(cx, |call, _| call.room().unwrap().clone())); + cx_b.read(|cx| room_b.read_with(cx, |room, _| assert!(room.is_connected()))); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec![] + } + ); +} + +#[gpui::test] +async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppContext) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + + let zed_id = server + .make_channel("zed", None, (&client_a, cx_a), &mut []) + .await; + let rust_id = server + .make_channel("rust", None, (&client_a, cx_a), &mut []) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + + active_call_a + .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx)) + .await + .unwrap(); + + // Give everything a chance to observe user A joining + executor.run_until_parked(); + + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(zed_id), + &[client_a.user_id().unwrap()], + ); + assert_participants_eq(channels.channel_participants(rust_id), &[]); + }) + }); + + active_call_a + .update(cx_a, |active_call, cx| { + active_call.join_channel(rust_id, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq(channels.channel_participants(zed_id), &[]); + assert_participants_eq( + channels.channel_participants(rust_id), + &[client_a.user_id().unwrap()], + ); + }) + }); +} + +#[gpui::test] +async fn test_permissions_update_while_invited( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let rust_id = server + .make_channel("rust", None, (&client_a, cx_a), &mut []) + .await; + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.invite_member( + rust_id, + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + assert_channel_invitations( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + depth: 0, + id: rust_id, + name: "rust".into(), + }], + ); + assert_channels(client_b.channel_store(), cx_b, &[]); + + // Update B's invite before they've accepted it + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.set_member_role( + rust_id, + client_b.user_id().unwrap(), + proto::ChannelRole::Admin, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + assert_channel_invitations( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + depth: 0, + id: rust_id, + name: "rust".into(), + }], + ); + assert_channels(client_b.channel_store(), cx_b, &[]); +} + +#[gpui::test] +async fn test_channel_rename( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let rust_id = server + .make_channel("rust", None, (&client_a, cx_a), &mut [(&client_b, cx_b)]) + .await; + + // Rename the channel + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.rename(rust_id, "#rust-archive", cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // Client A sees the channel with its new name. + assert_channels( + client_a.channel_store(), + cx_a, + &[ExpectedChannel { + depth: 0, + id: rust_id, + name: "rust-archive".into(), + }], + ); + + // Client B sees the channel with its new name. + assert_channels( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + depth: 0, + id: rust_id, + name: "rust-archive".into(), + }], + ); +} + +#[gpui::test] +async fn test_call_from_channel( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let channel_id = server + .make_channel( + "x", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + active_call_a + .update(cx_a, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + + // Client A calls client B while in the channel. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + // Client B accepts the call. + executor.run_until_parked(); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + // Client B sees that they are now in the channel + executor.run_until_parked(); + cx_b.read(|cx| { + active_call_b.read_with(cx, |call, cx| { + assert_eq!(call.channel_id(cx), Some(channel_id)); + }) + }); + cx_b.read(|cx| { + client_b.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(channel_id), + &[client_a.user_id().unwrap(), client_b.user_id().unwrap()], + ); + }) + }); + + // Clients A and C also see that client B is in the channel. + cx_a.read(|cx| { + client_a.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(channel_id), + &[client_a.user_id().unwrap(), client_b.user_id().unwrap()], + ); + }) + }); + cx_c.read(|cx| { + client_c.channel_store().read_with(cx, |channels, _| { + assert_participants_eq( + channels.channel_participants(channel_id), + &[client_a.user_id().unwrap(), client_b.user_id().unwrap()], + ); + }) + }); +} + +#[gpui::test] +async fn test_lost_channel_creation( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let channel_id = server + .make_channel("x", None, (&client_a, cx_a), &mut []) + .await; + + // Invite a member + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.invite_member( + channel_id, + client_b.user_id().unwrap(), + proto::ChannelRole::Member, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // Sanity check, B has the invitation + assert_channel_invitations( + client_b.channel_store(), + cx_b, + &[ExpectedChannel { + depth: 0, + id: channel_id, + name: "x".into(), + }], + ); + + // A creates a subchannel while the invite is still pending. + let subchannel_id = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("subchannel", Some(channel_id), cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // Make sure A sees their new channel + assert_channels( + client_a.channel_store(), + cx_a, + &[ + ExpectedChannel { + depth: 0, + id: channel_id, + name: "x".into(), + }, + ExpectedChannel { + depth: 1, + id: subchannel_id, + name: "subchannel".into(), + }, + ], + ); + + // Client B accepts the invite + client_b + .channel_store() + .update(cx_b, |channel_store, cx| { + channel_store.respond_to_channel_invite(channel_id, true, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // Client B should now see the channel + assert_channels( + client_b.channel_store(), + cx_b, + &[ + ExpectedChannel { + depth: 0, + id: channel_id, + name: "x".into(), + }, + ExpectedChannel { + depth: 1, + id: subchannel_id, + name: "subchannel".into(), + }, + ], + ); +} + +#[gpui::test] +async fn test_channel_link_notifications( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let user_b = client_b.user_id().unwrap(); + let user_c = client_c.user_id().unwrap(); + + let channels = server + .make_channel_tree(&[("zed", None)], (&client_a, cx_a)) + .await; + let zed_channel = channels[0]; + + try_join_all(client_a.channel_store().update(cx_a, |channel_store, cx| { + [ + channel_store.set_channel_visibility(zed_channel, proto::ChannelVisibility::Public, cx), + channel_store.invite_member(zed_channel, user_b, proto::ChannelRole::Member, cx), + channel_store.invite_member(zed_channel, user_c, proto::ChannelRole::Guest, cx), + ] + })) + .await + .unwrap(); + + executor.run_until_parked(); + + client_b + .channel_store() + .update(cx_b, |channel_store, cx| { + channel_store.respond_to_channel_invite(zed_channel, true, cx) + }) + .await + .unwrap(); + + client_c + .channel_store() + .update(cx_c, |channel_store, cx| { + channel_store.respond_to_channel_invite(zed_channel, true, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // we have an admin (a), member (b) and guest (c) all part of the zed channel. + + // create a new private channel, make it public, and move it under the previous one, and verify it shows for b and not c + let active_channel = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("active", Some(zed_channel), cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // the new channel shows for b and not c + assert_channels_list_shape( + client_a.channel_store(), + cx_a, + &[(zed_channel, 0), (active_channel, 1)], + ); + assert_channels_list_shape( + client_b.channel_store(), + cx_b, + &[(zed_channel, 0), (active_channel, 1)], + ); + assert_channels_list_shape(client_c.channel_store(), cx_c, &[(zed_channel, 0)]); + + let vim_channel = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("vim", Some(zed_channel), cx) + }) + .await + .unwrap(); + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.set_channel_visibility(vim_channel, proto::ChannelVisibility::Public, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // the new channel shows for b and c + assert_channels_list_shape( + client_a.channel_store(), + cx_a, + &[(zed_channel, 0), (active_channel, 1), (vim_channel, 1)], + ); + assert_channels_list_shape( + client_b.channel_store(), + cx_b, + &[(zed_channel, 0), (active_channel, 1), (vim_channel, 1)], + ); + assert_channels_list_shape( + client_c.channel_store(), + cx_c, + &[(zed_channel, 0), (vim_channel, 1)], + ); + + let helix_channel = client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.create_channel("helix", Some(zed_channel), cx) + }) + .await + .unwrap(); + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.move_channel(helix_channel, vim_channel, cx) + }) + .await + .unwrap(); + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.set_channel_visibility( + helix_channel, + proto::ChannelVisibility::Public, + cx, + ) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + // the new channel shows for b and c + assert_channels_list_shape( + client_b.channel_store(), + cx_b, + &[ + (zed_channel, 0), + (active_channel, 1), + (vim_channel, 1), + (helix_channel, 2), + ], + ); + assert_channels_list_shape( + client_c.channel_store(), + cx_c, + &[(zed_channel, 0), (vim_channel, 1), (helix_channel, 2)], + ); +} + +#[gpui::test] +async fn test_channel_membership_notifications( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_c").await; + + let user_b = client_b.user_id().unwrap(); + + let channels = server + .make_channel_tree( + &[("zed", None), ("vim", Some("zed")), ("opensource", None)], + (&client_a, cx_a), + ) + .await; + let zed_channel = channels[0]; + let vim_channel = channels[1]; + let opensource_channel = channels[2]; + + try_join_all(client_a.channel_store().update(cx_a, |channel_store, cx| { + [ + channel_store.set_channel_visibility(zed_channel, proto::ChannelVisibility::Public, cx), + channel_store.set_channel_visibility(vim_channel, proto::ChannelVisibility::Public, cx), + channel_store.invite_member(zed_channel, user_b, proto::ChannelRole::Admin, cx), + channel_store.invite_member(opensource_channel, user_b, proto::ChannelRole::Member, cx), + ] + })) + .await + .unwrap(); + + executor.run_until_parked(); + + client_b + .channel_store() + .update(cx_b, |channel_store, cx| { + channel_store.respond_to_channel_invite(zed_channel, true, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // we have an admin (a), and a guest (b) with access to all of zed, and membership in vim. + assert_channels( + client_b.channel_store(), + cx_b, + &[ + ExpectedChannel { + depth: 0, + id: zed_channel, + name: "zed".into(), + }, + ExpectedChannel { + depth: 1, + id: vim_channel, + name: "vim".into(), + }, + ], + ); + + client_b.channel_store().update(cx_b, |channel_store, _| { + channel_store.is_channel_admin(zed_channel) + }); + + client_b + .channel_store() + .update(cx_b, |channel_store, cx| { + channel_store.respond_to_channel_invite(opensource_channel, true, cx) + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.set_member_role(opensource_channel, user_b, ChannelRole::Admin, cx) + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + + client_b.channel_store().update(cx_b, |channel_store, _| { + channel_store.is_channel_admin(opensource_channel) + }); +} + +#[gpui::test] +async fn test_guest_access( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channels = server + .make_channel_tree( + &[("channel-a", None), ("channel-b", Some("channel-a"))], + (&client_a, cx_a), + ) + .await; + let channel_a = channels[0]; + let channel_b = channels[1]; + + let active_call_b = cx_b.read(ActiveCall::global); + + // Non-members should not be allowed to join + assert!(active_call_b + .update(cx_b, |call, cx| call.join_channel(channel_a, cx)) + .await + .is_err()); + + // Make channels A and B public + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.set_channel_visibility(channel_a, proto::ChannelVisibility::Public, cx) + }) + .await + .unwrap(); + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.set_channel_visibility(channel_b, proto::ChannelVisibility::Public, cx) + }) + .await + .unwrap(); + + // Client B joins channel A as a guest + active_call_b + .update(cx_b, |call, cx| call.join_channel(channel_a, cx)) + .await + .unwrap(); + + executor.run_until_parked(); + assert_channels_list_shape( + client_a.channel_store(), + cx_a, + &[(channel_a, 0), (channel_b, 1)], + ); + assert_channels_list_shape( + client_b.channel_store(), + cx_b, + &[(channel_a, 0), (channel_b, 1)], + ); + + client_a.channel_store().update(cx_a, |channel_store, _| { + let participants = channel_store.channel_participants(channel_a); + assert_eq!(participants.len(), 1); + assert_eq!(participants[0].id, client_b.user_id().unwrap()); + }); +} + +#[gpui::test] +async fn test_invite_access( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channels = server + .make_channel_tree( + &[("channel-a", None), ("channel-b", Some("channel-a"))], + (&client_a, cx_a), + ) + .await; + let channel_a_id = channels[0]; + let channel_b_id = channels[0]; + + let active_call_b = cx_b.read(ActiveCall::global); + + // should not be allowed to join + assert!(active_call_b + .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx)) + .await + .is_err()); + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.invite_member( + channel_a_id, + client_b.user_id().unwrap(), + ChannelRole::Member, + cx, + ) + }) + .await + .unwrap(); + + active_call_b + .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx)) + .await + .unwrap(); + + executor.run_until_parked(); + + client_b.channel_store().update(cx_b, |channel_store, _| { + assert!(channel_store.channel_for_id(channel_b_id).is_some()); + assert!(channel_store.channel_for_id(channel_a_id).is_some()); + }); + + client_a.channel_store().update(cx_a, |channel_store, _| { + let participants = channel_store.channel_participants(channel_b_id); + assert_eq!(participants.len(), 1); + assert_eq!(participants[0].id, client_b.user_id().unwrap()); + }) +} + +#[gpui::test] +async fn test_leave_channel(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let (_server, _client_a, client_b, channel_id) = TestServer::start2(cx_a, cx_b).await; + + client_b + .channel_store() + .update(cx_b, |channel_store, cx| { + channel_store.remove_member(channel_id, client_b.user_id().unwrap(), cx) + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + + assert_eq!( + client_b + .channel_store() + .read_with(cx_b, |store, _| store.channels().count()), + 0 + ); +} + +#[gpui::test] +async fn test_channel_moving( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + _cx_b: &mut TestAppContext, + _cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + + let channels = server + .make_channel_tree( + &[ + ("channel-a", None), + ("channel-b", Some("channel-a")), + ("channel-c", Some("channel-b")), + ("channel-d", Some("channel-c")), + ], + (&client_a, cx_a), + ) + .await; + let channel_a_id = channels[0]; + let channel_b_id = channels[1]; + let channel_c_id = channels[2]; + let channel_d_id = channels[3]; + + // Current shape: + // a - b - c - d + assert_channels_list_shape( + client_a.channel_store(), + cx_a, + &[ + (channel_a_id, 0), + (channel_b_id, 1), + (channel_c_id, 2), + (channel_d_id, 3), + ], + ); + + client_a + .channel_store() + .update(cx_a, |channel_store, cx| { + channel_store.move_channel(channel_d_id, channel_b_id, cx) + }) + .await + .unwrap(); + + // Current shape: + // /- d + // a - b -- c + assert_channels_list_shape( + client_a.channel_store(), + cx_a, + &[ + (channel_a_id, 0), + (channel_b_id, 1), + (channel_c_id, 2), + (channel_d_id, 2), + ], + ); +} + +#[derive(Debug, PartialEq)] +struct ExpectedChannel { + depth: usize, + id: ChannelId, + name: SharedString, +} + +#[track_caller] +fn assert_channel_invitations( + channel_store: &Model, + cx: &TestAppContext, + expected_channels: &[ExpectedChannel], +) { + let actual = cx.read(|cx| { + channel_store.read_with(cx, |store, _| { + store + .channel_invitations() + .iter() + .map(|channel| ExpectedChannel { + depth: 0, + name: channel.name.clone(), + id: channel.id, + }) + .collect::>() + }) + }); + assert_eq!(actual, expected_channels); +} + +#[track_caller] +fn assert_channels( + channel_store: &Model, + cx: &TestAppContext, + expected_channels: &[ExpectedChannel], +) { + let actual = cx.read(|cx| { + channel_store.read_with(cx, |store, _| { + store + .ordered_channels() + .map(|(depth, channel)| ExpectedChannel { + depth, + name: channel.name.clone(), + id: channel.id, + }) + .collect::>() + }) + }); + pretty_assertions::assert_eq!(actual, expected_channels); +} + +#[track_caller] +fn assert_channels_list_shape( + channel_store: &Model, + cx: &TestAppContext, + expected_channels: &[(ChannelId, usize)], +) { + let actual = cx.read(|cx| { + channel_store.read_with(cx, |store, _| { + store + .ordered_channels() + .map(|(depth, channel)| (channel.id, depth)) + .collect::>() + }) + }); + pretty_assertions::assert_eq!(actual, expected_channels); +} diff --git a/crates/collab/src/tests/dev_server_tests.rs b/crates/collab/src/tests/dev_server_tests.rs new file mode 100644 index 0000000..c0b8f55 --- /dev/null +++ b/crates/collab/src/tests/dev_server_tests.rs @@ -0,0 +1,615 @@ +use std::{path::Path, sync::Arc}; + +use call::ActiveCall; +use editor::Editor; +use fs::Fs; +use gpui::{TestAppContext, VisualTestContext, WindowHandle}; +use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt}; +use serde_json::json; +use workspace::{AppState, Workspace}; + +use crate::tests::{following_tests::join_channel, TestServer}; + +use super::TestClient; + +#[gpui::test] +async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { + let (server, client) = TestServer::start1(cx).await; + + let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone()); + + let resp = store + .update(cx, |store, cx| { + store.create_dev_server("server-1".to_string(), None, cx) + }) + .await + .unwrap(); + + store.update(cx, |store, _| { + assert_eq!(store.dev_servers().len(), 1); + assert_eq!(store.dev_servers()[0].name, "server-1"); + assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline); + }); + + let dev_server = server.create_dev_server(resp.access_token, cx2).await; + cx.executor().run_until_parked(); + store.update(cx, |store, _| { + assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online); + }); + + dev_server + .fs() + .insert_tree( + "/remote", + json!({ + "1.txt": "remote\nremote\nremote", + "2.js": "function two() { return 2; }", + "3.rs": "mod test", + }), + ) + .await; + + store + .update(cx, |store, cx| { + store.create_dev_server_project( + client::DevServerId(resp.dev_server_id), + "/remote".to_string(), + cx, + ) + }) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + let remote_workspace = store + .update(cx, |store, cx| { + let projects = store.dev_server_projects(); + assert_eq!(projects.len(), 1); + assert_eq!(projects[0].path, "/remote"); + workspace::join_dev_server_project( + projects[0].project_id.unwrap(), + client.app_state.clone(), + None, + cx, + ) + }) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut(); + cx.simulate_keystrokes("cmd-p 1 enter"); + + let editor = remote_workspace + .update(cx, |ws, cx| { + ws.active_item_as::(cx).unwrap().clone() + }) + .unwrap(); + editor.update(cx, |ed, cx| { + assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote"); + }); + cx.simulate_input("wow!"); + cx.simulate_keystrokes("cmd-s"); + + let content = dev_server + .fs() + .load(&Path::new("/remote/1.txt")) + .await + .unwrap(); + assert_eq!(content, "wow!remote\nremote\nremote\n"); +} + +#[gpui::test] +async fn test_dev_server_env_files( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, +) { + let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; + + let (_dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; + + cx1.executor().run_until_parked(); + + let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut(); + cx1.simulate_keystrokes("cmd-p . e enter"); + + let editor = remote_workspace + .update(cx1, |ws, cx| { + ws.active_item_as::(cx).unwrap().clone() + }) + .unwrap(); + editor.update(cx1, |ed, cx| { + assert_eq!(ed.text(cx).to_string(), "SECRET"); + }); + + cx1.update(|cx| { + workspace::join_channel( + channel_id, + client1.app_state.clone(), + Some(remote_workspace), + cx, + ) + }) + .await + .unwrap(); + cx1.executor().run_until_parked(); + + remote_workspace + .update(cx1, |ws, cx| { + assert!(ws.project().read(cx).is_shared()); + }) + .unwrap(); + + join_channel(channel_id, &client2, cx2).await.unwrap(); + cx2.executor().run_until_parked(); + + let (workspace2, cx2) = client2.active_workspace(cx2); + let editor = workspace2.update(cx2, |ws, cx| { + ws.active_item_as::(cx).unwrap().clone() + }); + // TODO: it'd be nice to hide .env files from other people + editor.update(cx2, |ed, cx| { + assert_eq!(ed.text(cx).to_string(), "SECRET"); + }); +} + +async fn create_dev_server_project( + server: &TestServer, + client_app_state: Arc, + cx: &mut TestAppContext, + cx_devserver: &mut TestAppContext, +) -> (TestClient, WindowHandle) { + let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone()); + + let resp = store + .update(cx, |store, cx| { + store.create_dev_server("server-1".to_string(), None, cx) + }) + .await + .unwrap(); + let dev_server = server + .create_dev_server(resp.access_token, cx_devserver) + .await; + + cx.executor().run_until_parked(); + + dev_server + .fs() + .insert_tree( + "/remote", + json!({ + "1.txt": "remote\nremote\nremote", + ".env": "SECRET", + }), + ) + .await; + + store + .update(cx, |store, cx| { + store.create_dev_server_project( + client::DevServerId(resp.dev_server_id), + "/remote".to_string(), + cx, + ) + }) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + let workspace = store + .update(cx, |store, cx| { + let projects = store.dev_server_projects(); + assert_eq!(projects.len(), 1); + assert_eq!(projects[0].path, "/remote"); + workspace::join_dev_server_project( + projects[0].project_id.unwrap(), + client_app_state, + None, + cx, + ) + }) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + (dev_server, workspace) +} + +#[gpui::test] +async fn test_dev_server_leave_room( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, +) { + let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; + + let (_dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; + + cx1.update(|cx| { + workspace::join_channel( + channel_id, + client1.app_state.clone(), + Some(remote_workspace), + cx, + ) + }) + .await + .unwrap(); + cx1.executor().run_until_parked(); + + remote_workspace + .update(cx1, |ws, cx| { + assert!(ws.project().read(cx).is_shared()); + }) + .unwrap(); + + join_channel(channel_id, &client2, cx2).await.unwrap(); + cx2.executor().run_until_parked(); + + cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx))) + .await + .unwrap(); + + cx1.executor().run_until_parked(); + + let (workspace, cx2) = client2.active_workspace(cx2); + cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected())); +} + +#[gpui::test] +async fn test_dev_server_delete( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, +) { + let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; + + let (_dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; + + cx1.update(|cx| { + workspace::join_channel( + channel_id, + client1.app_state.clone(), + Some(remote_workspace), + cx, + ) + }) + .await + .unwrap(); + cx1.executor().run_until_parked(); + + remote_workspace + .update(cx1, |ws, cx| { + assert!(ws.project().read(cx).is_shared()); + }) + .unwrap(); + + join_channel(channel_id, &client2, cx2).await.unwrap(); + cx2.executor().run_until_parked(); + + cx1.update(|cx| { + dev_server_projects::Store::global(cx).update(cx, |store, cx| { + store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx) + }) + }) + .await + .unwrap(); + + cx1.executor().run_until_parked(); + + let (workspace, cx2) = client2.active_workspace(cx2); + cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected())); + + cx1.update(|cx| { + dev_server_projects::Store::global(cx).update(cx, |store, _| { + assert_eq!(store.dev_server_projects().len(), 0); + }) + }) +} + +#[gpui::test] +async fn test_dev_server_rename( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, +) { + let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; + + let (_dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; + + cx1.update(|cx| { + workspace::join_channel( + channel_id, + client1.app_state.clone(), + Some(remote_workspace), + cx, + ) + }) + .await + .unwrap(); + cx1.executor().run_until_parked(); + + remote_workspace + .update(cx1, |ws, cx| { + assert!(ws.project().read(cx).is_shared()); + }) + .unwrap(); + + join_channel(channel_id, &client2, cx2).await.unwrap(); + cx2.executor().run_until_parked(); + + cx1.update(|cx| { + dev_server_projects::Store::global(cx).update(cx, |store, cx| { + store.rename_dev_server( + store.dev_servers().first().unwrap().id, + "name-edited".to_string(), + cx, + ) + }) + }) + .await + .unwrap(); + + cx1.executor().run_until_parked(); + + cx1.update(|cx| { + dev_server_projects::Store::global(cx).update(cx, |store, _| { + assert_eq!(store.dev_servers().first().unwrap().name, "name-edited"); + }) + }) +} + +#[gpui::test] +async fn test_dev_server_refresh_access_token( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, + cx4: &mut gpui::TestAppContext, +) { + let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; + + let (_dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; + + cx1.update(|cx| { + workspace::join_channel( + channel_id, + client1.app_state.clone(), + Some(remote_workspace), + cx, + ) + }) + .await + .unwrap(); + cx1.executor().run_until_parked(); + + remote_workspace + .update(cx1, |ws, cx| { + assert!(ws.project().read(cx).is_shared()); + }) + .unwrap(); + + join_channel(channel_id, &client2, cx2).await.unwrap(); + cx2.executor().run_until_parked(); + + // Regenerate the access token + let new_token_response = cx1 + .update(|cx| { + dev_server_projects::Store::global(cx).update(cx, |store, cx| { + store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx) + }) + }) + .await + .unwrap(); + + cx1.executor().run_until_parked(); + + // Assert that the other client was disconnected + let (workspace, cx2) = client2.active_workspace(cx2); + cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected())); + + // Assert that the owner of the dev server does not see the dev server as online anymore + let (workspace, cx1) = client1.active_workspace(cx1); + cx1.update(|cx| { + assert!(workspace.read(cx).project().read(cx).is_disconnected()); + dev_server_projects::Store::global(cx).update(cx, |store, _| { + assert_eq!( + store.dev_servers().first().unwrap().status, + DevServerStatus::Offline + ); + }) + }); + + // Reconnect the dev server with the new token + let _dev_server = server + .create_dev_server(new_token_response.access_token, cx4) + .await; + + cx1.executor().run_until_parked(); + + // Assert that the dev server is online again + cx1.update(|cx| { + dev_server_projects::Store::global(cx).update(cx, |store, _| { + assert_eq!(store.dev_servers().len(), 1); + assert_eq!( + store.dev_servers().first().unwrap().status, + DevServerStatus::Online + ); + }) + }); +} + +#[gpui::test] +async fn test_dev_server_reconnect( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, +) { + let (mut server, client1) = TestServer::start1(cx1).await; + let channel_id = server + .make_channel("test", None, (&client1, cx1), &mut []) + .await; + + let (_dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; + + cx1.update(|cx| { + workspace::join_channel( + channel_id, + client1.app_state.clone(), + Some(remote_workspace), + cx, + ) + }) + .await + .unwrap(); + cx1.executor().run_until_parked(); + + remote_workspace + .update(cx1, |ws, cx| { + assert!(ws.project().read(cx).is_shared()); + }) + .unwrap(); + + drop(client1); + + let client2 = server.create_client(cx2, "user_a").await; + + let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone()); + + store + .update(cx2, |store, cx| { + let projects = store.dev_server_projects(); + workspace::join_dev_server_project( + projects[0].project_id.unwrap(), + client2.app_state.clone(), + None, + cx, + ) + }) + .await + .unwrap(); +} + +#[gpui::test] +async fn test_create_dev_server_project_path_validation( + cx1: &mut gpui::TestAppContext, + cx2: &mut gpui::TestAppContext, + cx3: &mut gpui::TestAppContext, +) { + let (server, client1) = TestServer::start1(cx1).await; + let _channel_id = server + .make_channel("test", None, (&client1, cx1), &mut []) + .await; + + // Creating a project with a path that does exist should not fail + let (_dev_server, _) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; + + cx1.executor().run_until_parked(); + + let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone()); + + let resp = store + .update(cx1, |store, cx| { + store.create_dev_server("server-2".to_string(), None, cx) + }) + .await + .unwrap(); + + cx1.executor().run_until_parked(); + + let _dev_server = server.create_dev_server(resp.access_token, cx3).await; + + cx1.executor().run_until_parked(); + + // Creating a remote project with a path that does not exist should fail + let result = store + .update(cx1, |store, cx| { + store.create_dev_server_project( + client::DevServerId(resp.dev_server_id), + "/notfound".to_string(), + cx, + ) + }) + .await; + + cx1.executor().run_until_parked(); + + let error = result.unwrap_err(); + assert!(matches!( + error.error_code(), + ErrorCode::DevServerProjectPathDoesNotExist + )); +} + +#[gpui::test] +async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { + let (server, client1) = TestServer::start1(cx1).await; + + // Creating a project with a path that does exist should not fail + let (dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; + + let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1); + + cx.simulate_keystrokes("cmd-p 1 enter"); + cx.simulate_keystrokes("cmd-shift-s"); + cx.simulate_input("2.txt"); + cx.simulate_keystrokes("enter"); + + cx.executor().run_until_parked(); + + let title = remote_workspace + .update(&mut cx, |ws, cx| { + ws.active_item(cx).unwrap().tab_description(0, &cx).unwrap() + }) + .unwrap(); + + assert_eq!(title, "2.txt"); + + let path = Path::new("/remote/2.txt"); + assert_eq!( + dev_server.fs().load(&path).await.unwrap(), + "remote\nremote\nremote" + ); +} + +#[gpui::test] +async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { + let (server, client1) = TestServer::start1(cx1).await; + + // Creating a project with a path that does exist should not fail + let (dev_server, remote_workspace) = + create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; + + let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1); + + cx.simulate_keystrokes("cmd-n"); + cx.simulate_input("new!"); + cx.simulate_keystrokes("cmd-shift-s"); + cx.simulate_input("2.txt"); + cx.simulate_keystrokes("enter"); + + cx.executor().run_until_parked(); + + let title = remote_workspace + .update(&mut cx, |ws, cx| { + ws.active_item(cx).unwrap().tab_description(0, &cx).unwrap() + }) + .unwrap(); + + assert_eq!(title, "2.txt"); + + let path = Path::new("/remote/2.txt"); + assert_eq!(dev_server.fs().load(&path).await.unwrap(), "new!"); +} diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs new file mode 100644 index 0000000..88146ba --- /dev/null +++ b/crates/collab/src/tests/editor_tests.rs @@ -0,0 +1,2462 @@ +use crate::{ + rpc::RECONNECT_TIMEOUT, + tests::{rust_lang, TestServer}, +}; +use call::ActiveCall; +use collections::HashMap; +use editor::{ + actions::{ + ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename, + RevertSelectedHunks, ToggleCodeActions, Undo, + }, + display_map::DisplayRow, + test::{ + editor_hunks, + editor_test_context::{AssertionContextManager, EditorTestContext}, + expanded_hunks, expanded_hunks_background_highlights, + }, + Editor, +}; +use futures::StreamExt; +use git::diff::DiffHunkStatus; +use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; +use indoc::indoc; +use language::{ + language_settings::{AllLanguageSettings, InlayHintSettings}, + FakeLspAdapter, +}; +use multi_buffer::MultiBufferRow; +use project::{ + project_settings::{InlineBlameSettings, ProjectSettings}, + SERVER_PROGRESS_DEBOUNCE_TIMEOUT, +}; +use rpc::RECEIVE_TIMEOUT; +use serde_json::json; +use settings::SettingsStore; +use std::{ + ops::Range, + path::Path, + sync::{ + atomic::{self, AtomicBool, AtomicUsize}, + Arc, + }, +}; +use text::Point; +use workspace::{Workspace, WorkspaceId}; + +#[gpui::test(iterations = 10)] +async fn test_host_disconnect( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + "/a", + serde_json::json!({ + "a.txt": "a-contents", + "b.txt": "b-contents", + }), + ) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + + let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + cx_a.background_executor.run_until_parked(); + + assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared())); + + let workspace_b = cx_b.add_window(|cx| { + Workspace::new( + WorkspaceId::default(), + project_b.clone(), + client_b.app_state.clone(), + cx, + ) + }); + let cx_b = &mut VisualTestContext::from_window(*workspace_b, cx_b); + let workspace_b_view = workspace_b.root_view(cx_b).unwrap(); + + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "b.txt"), None, true, cx) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); + + //TODO: focus + assert!(cx_b.update_view(&editor_b, |editor, cx| editor.is_focused(cx))); + editor_b.update(cx_b, |editor, cx| editor.insert("X", cx)); + + cx_b.update(|cx| { + assert!(workspace_b_view.read(cx).is_edited()); + }); + + // Drop client A's connection. Collaborators should disappear and the project should not be shown as shared. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + cx_a.background_executor + .advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + project_a.read_with(cx_a, |project, _| project.collaborators().is_empty()); + + project_a.read_with(cx_a, |project, _| assert!(!project.is_shared())); + + project_b.read_with(cx_b, |project, _| project.is_read_only()); + + assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared())); + + // Ensure client B's edited state is reset and that the whole window is blurred. + + workspace_b + .update(cx_b, |workspace, cx| { + assert_eq!(cx.focused(), None); + assert!(!workspace.is_edited()) + }) + .unwrap(); + + // Ensure client B is not prompted to save edits when closing window after disconnecting. + let can_close = workspace_b + .update(cx_b, |workspace, cx| workspace.prepare_to_close(true, cx)) + .unwrap() + .await + .unwrap(); + assert!(can_close); + + // Allow client A to reconnect to the server. + server.allow_connections(); + cx_a.background_executor.advance_clock(RECEIVE_TIMEOUT); + + // Client B calls client A again after they reconnected. + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| { + call.invite(client_a.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + cx_a.background_executor.run_until_parked(); + active_call_a + .update(cx_a, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Drop client A's connection again. We should still unshare it successfully. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + cx_a.background_executor + .advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + project_a.read_with(cx_a, |project, _| assert!(!project.is_shared())); +} + +#[gpui::test] +async fn test_newline_above_or_below_does_not_move_guest_cursor( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let executor = cx_a.executor(); + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/dir", json!({ "a.txt": "Some text\n" })) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a buffer as client A + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + let cx_a = cx_a.add_empty_window(); + let editor_a = cx_a.new_view(|cx| Editor::for_buffer(buffer_a, Some(project_a), cx)); + + let mut editor_cx_a = EditorTestContext { + cx: cx_a.clone(), + window: cx_a.handle(), + editor: editor_a, + assertion_cx: AssertionContextManager::new(), + }; + + let cx_b = cx_b.add_empty_window(); + // Open a buffer as client B + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + let editor_b = cx_b.new_view(|cx| Editor::for_buffer(buffer_b, Some(project_b), cx)); + + let mut editor_cx_b = EditorTestContext { + cx: cx_b.clone(), + window: cx_b.handle(), + editor: editor_b, + assertion_cx: AssertionContextManager::new(), + }; + + // Test newline above + editor_cx_a.set_selections_state(indoc! {" + Some textˇ + "}); + editor_cx_b.set_selections_state(indoc! {" + Some textˇ + "}); + editor_cx_a + .update_editor(|editor, cx| editor.newline_above(&editor::actions::NewlineAbove, cx)); + executor.run_until_parked(); + editor_cx_a.assert_editor_state(indoc! {" + ˇ + Some text + "}); + editor_cx_b.assert_editor_state(indoc! {" + + Some textˇ + "}); + + // Test newline below + editor_cx_a.set_selections_state(indoc! {" + + Some textˇ + "}); + editor_cx_b.set_selections_state(indoc! {" + + Some textˇ + "}); + editor_cx_a + .update_editor(|editor, cx| editor.newline_below(&editor::actions::NewlineBelow, cx)); + executor.run_until_parked(); + editor_cx_a.assert_editor_state(indoc! {" + + Some text + ˇ + "}); + editor_cx_b.assert_editor_state(indoc! {" + + Some textˇ + + "}); +} + +#[gpui::test(iterations = 10)] +async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + resolve_provider: Some(true), + ..Default::default() + }), + ..Default::default() + }, + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a }", + "other.rs": "", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a file in an editor as the guest. + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + let cx_b = cx_b.add_empty_window(); + let editor_b = + cx_b.new_view(|cx| Editor::for_buffer(buffer_b.clone(), Some(project_b.clone()), cx)); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + cx_a.background_executor.run_until_parked(); + + buffer_b.read_with(cx_b, |buffer, _| { + assert!(!buffer.completion_triggers().is_empty()) + }); + + // Type a completion trigger character as the guest. + editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input(".", cx); + }); + cx_b.focus_view(&editor_b); + + // Receive a completion request as the host's language server. + // Return some completions from the host's language server. + cx_a.executor().start_waiting(); + fake_language_server + .handle_request::(|params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 14), + ); + + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "first_method(…)".into(), + detail: Some("fn(&mut self, B) -> C".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "first_method($1)".to_string(), + range: lsp::Range::new( + lsp::Position::new(0, 14), + lsp::Position::new(0, 14), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }, + lsp::CompletionItem { + label: "second_method(…)".into(), + detail: Some("fn(&mut self, C) -> D".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "second_method()".to_string(), + range: lsp::Range::new( + lsp::Position::new(0, 14), + lsp::Position::new(0, 14), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }, + ]))) + }) + .next() + .await + .unwrap(); + cx_a.executor().finish_waiting(); + + // Open the buffer on the host. + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + cx_a.executor().run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a. }") + }); + + // Confirm a completion on the guest. + editor_b.update(cx_b, |editor, cx| { + assert!(editor.context_menu_visible()); + editor.confirm_completion(&ConfirmCompletion { item_ix: Some(0) }, cx); + assert_eq!(editor.text(cx), "fn main() { a.first_method() }"); + }); + + // Return a resolved completion from the host's language server. + // The resolved completion has an additional text edit. + fake_language_server.handle_request::( + |params, _| async move { + assert_eq!(params.label, "first_method(…)"); + Ok(lsp::CompletionItem { + label: "first_method(…)".into(), + detail: Some("fn(&mut self, B) -> C".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "first_method($1)".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 14), lsp::Position::new(0, 14)), + })), + additional_text_edits: Some(vec![lsp::TextEdit { + new_text: "use d::SomeTrait;\n".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), + }]), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }) + }, + ); + + // The additional edit is applied. + cx_a.executor().run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!( + buffer.text(), + "use d::SomeTrait;\nfn main() { a.first_method() }" + ); + }); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.text(), + "use d::SomeTrait;\nfn main() { a.first_method() }" + ); + }); + + // Now we do a second completion, this time to ensure that documentation/snippets are + // resolved + editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([46..46])); + editor.handle_input("; a", cx); + editor.handle_input(".", cx); + }); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.text(), + "use d::SomeTrait;\nfn main() { a.first_method(); a. }" + ); + }); + + let mut completion_response = fake_language_server + .handle_request::(|params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(1, 32), + ); + + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "third_method(…)".into(), + detail: Some("fn(&mut self, B, C, D) -> E".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + // no snippet placehodlers + new_text: "third_method".to_string(), + range: lsp::Range::new( + lsp::Position::new(1, 32), + lsp::Position::new(1, 32), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + documentation: None, + ..Default::default() + }, + ]))) + }); + + // The completion now gets a new `text_edit.new_text` when resolving the completion item + let mut resolve_completion_response = fake_language_server + .handle_request::(|params, _| async move { + assert_eq!(params.label, "third_method(…)"); + Ok(lsp::CompletionItem { + label: "third_method(…)".into(), + detail: Some("fn(&mut self, B, C, D) -> E".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + // Now it's a snippet + new_text: "third_method($1, $2, $3)".to_string(), + range: lsp::Range::new(lsp::Position::new(1, 32), lsp::Position::new(1, 32)), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + documentation: Some(lsp::Documentation::String( + "this is the documentation".into(), + )), + ..Default::default() + }) + }); + + cx_b.executor().run_until_parked(); + + completion_response.next().await.unwrap(); + + editor_b.update(cx_b, |editor, cx| { + assert!(editor.context_menu_visible()); + editor.context_menu_first(&ContextMenuFirst {}, cx); + }); + + resolve_completion_response.next().await.unwrap(); + cx_b.executor().run_until_parked(); + + // When accepting the completion, the snippet is insert. + editor_b.update(cx_b, |editor, cx| { + assert!(editor.context_menu_visible()); + editor.confirm_completion(&ConfirmCompletion { item_ix: Some(0) }, cx); + assert_eq!( + editor.text(cx), + "use d::SomeTrait;\nfn main() { a.first_method(); a.third_method(, , ) }" + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_collaborating_with_code_actions( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + // + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + cx_b.update(editor::init); + + // Set up a fake language server. + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", FakeLspAdapter::default()); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": "mod other;\nfn main() { let foo = other::foo(); }", + "other.rs": "pub fn foo() -> usize { 4 }", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Join the project as client B. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let mut fake_language_server = fake_language_servers.next().await.unwrap(); + let mut requests = fake_language_server + .handle_request::(|params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!(params.range.start, lsp::Position::new(0, 0)); + assert_eq!(params.range.end, lsp::Position::new(0, 0)); + Ok(None) + }); + cx_a.background_executor + .advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2); + requests.next().await; + + // Move cursor to a location that contains code actions. + editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(1, 31)..Point::new(1, 31)]) + }); + }); + cx_b.focus_view(&editor_b); + + let mut requests = fake_language_server + .handle_request::(|params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!(params.range.start, lsp::Position::new(1, 31)); + assert_eq!(params.range.end, lsp::Position::new(1, 31)); + + Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction( + lsp::CodeAction { + title: "Inline into all callers".to_string(), + edit: Some(lsp::WorkspaceEdit { + changes: Some( + [ + ( + lsp::Url::from_file_path("/a/main.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(1, 22), + lsp::Position::new(1, 34), + ), + "4".to_string(), + )], + ), + ( + lsp::Url::from_file_path("/a/other.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(0, 27), + ), + "".to_string(), + )], + ), + ] + .into_iter() + .collect(), + ), + ..Default::default() + }), + data: Some(json!({ + "codeActionParams": { + "range": { + "start": {"line": 1, "column": 31}, + "end": {"line": 1, "column": 31}, + } + } + })), + ..Default::default() + }, + )])) + }); + cx_a.background_executor + .advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2); + requests.next().await; + + // Toggle code actions and wait for them to display. + editor_b.update(cx_b, |editor, cx| { + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from_indicator: None, + }, + cx, + ); + }); + cx_a.background_executor.run_until_parked(); + + editor_b.update(cx_b, |editor, _| assert!(editor.context_menu_visible())); + + fake_language_server.remove_request_handler::(); + + // Confirming the code action will trigger a resolve request. + let confirm_action = editor_b + .update(cx_b, |editor, cx| { + Editor::confirm_code_action(editor, &ConfirmCodeAction { item_ix: Some(0) }, cx) + }) + .unwrap(); + fake_language_server.handle_request::( + |_, _| async move { + Ok(lsp::CodeAction { + title: "Inline into all callers".to_string(), + edit: Some(lsp::WorkspaceEdit { + changes: Some( + [ + ( + lsp::Url::from_file_path("/a/main.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(1, 22), + lsp::Position::new(1, 34), + ), + "4".to_string(), + )], + ), + ( + lsp::Url::from_file_path("/a/other.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(0, 27), + ), + "".to_string(), + )], + ), + ] + .into_iter() + .collect(), + ), + ..Default::default() + }), + ..Default::default() + }) + }, + ); + + // After the action is confirmed, an editor containing both modified files is opened. + confirm_action.await.unwrap(); + + let code_action_editor = workspace_b.update(cx_b, |workspace, cx| { + workspace + .active_item(cx) + .unwrap() + .downcast::() + .unwrap() + }); + code_action_editor.update(cx_b, |editor, cx| { + assert_eq!(editor.text(cx), "mod other;\nfn main() { let foo = 4; }\n"); + editor.undo(&Undo, cx); + assert_eq!( + editor.text(cx), + "mod other;\nfn main() { let foo = other::foo(); }\npub fn foo() -> usize { 4 }" + ); + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "mod other;\nfn main() { let foo = 4; }\n"); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + cx_b.update(editor::init); + + // Set up a fake language server. + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions { + prepare_provider: Some(true), + work_done_progress_options: Default::default(), + })), + ..Default::default() + }, + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + "one.rs": "const ONE: usize = 1;", + "two.rs": "const TWO: usize = one::ONE + one::ONE;" + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "one.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let fake_language_server = fake_language_servers.next().await.unwrap(); + + // Move cursor to a location that can be renamed. + let prepare_rename = editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([7..7])); + editor.rename(&Rename, cx).unwrap() + }); + + fake_language_server + .handle_request::(|params, _| async move { + assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs"); + assert_eq!(params.position, lsp::Position::new(0, 7)); + Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new( + lsp::Position::new(0, 6), + lsp::Position::new(0, 9), + )))) + }) + .next() + .await + .unwrap(); + prepare_rename.await.unwrap(); + editor_b.update(cx_b, |editor, cx| { + use editor::ToOffset; + let rename = editor.pending_rename().unwrap(); + let buffer = editor.buffer().read(cx).snapshot(cx); + assert_eq!( + rename.range.start.to_offset(&buffer)..rename.range.end.to_offset(&buffer), + 6..9 + ); + rename.editor.update(cx, |rename_editor, cx| { + let rename_selection = rename_editor.selections.newest::(cx); + assert_eq!( + rename_selection.range(), + 0..3, + "Rename that was triggered from zero selection caret, should propose the whole word." + ); + rename_editor.buffer().update(cx, |rename_buffer, cx| { + rename_buffer.edit([(0..3, "THREE")], None, cx); + }); + }); + }); + + // Cancel the rename, and repeat the same, but use selections instead of cursor movement + editor_b.update(cx_b, |editor, cx| { + editor.cancel(&editor::actions::Cancel, cx); + }); + let prepare_rename = editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([7..8])); + editor.rename(&Rename, cx).unwrap() + }); + + fake_language_server + .handle_request::(|params, _| async move { + assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs"); + assert_eq!(params.position, lsp::Position::new(0, 8)); + Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new( + lsp::Position::new(0, 6), + lsp::Position::new(0, 9), + )))) + }) + .next() + .await + .unwrap(); + prepare_rename.await.unwrap(); + editor_b.update(cx_b, |editor, cx| { + use editor::ToOffset; + let rename = editor.pending_rename().unwrap(); + let buffer = editor.buffer().read(cx).snapshot(cx); + let lsp_rename_start = rename.range.start.to_offset(&buffer); + let lsp_rename_end = rename.range.end.to_offset(&buffer); + assert_eq!(lsp_rename_start..lsp_rename_end, 6..9); + rename.editor.update(cx, |rename_editor, cx| { + let rename_selection = rename_editor.selections.newest::(cx); + assert_eq!( + rename_selection.range(), + 1..2, + "Rename that was triggered from a selection, should have the same selection range in the rename proposal" + ); + rename_editor.buffer().update(cx, |rename_buffer, cx| { + rename_buffer.edit([(0..lsp_rename_end - lsp_rename_start, "THREE")], None, cx); + }); + }); + }); + + let confirm_rename = editor_b.update(cx_b, |editor, cx| { + Editor::confirm_rename(editor, &ConfirmRename, cx).unwrap() + }); + fake_language_server + .handle_request::(|params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri.as_str(), + "file:///dir/one.rs" + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 6) + ); + assert_eq!(params.new_name, "THREE"); + Ok(Some(lsp::WorkspaceEdit { + changes: Some( + [ + ( + lsp::Url::from_file_path("/dir/one.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), + "THREE".to_string(), + )], + ), + ( + lsp::Url::from_file_path("/dir/two.rs").unwrap(), + vec![ + lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(0, 24), + lsp::Position::new(0, 27), + ), + "THREE".to_string(), + ), + lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(0, 35), + lsp::Position::new(0, 38), + ), + "THREE".to_string(), + ), + ], + ), + ] + .into_iter() + .collect(), + ), + ..Default::default() + })) + }) + .next() + .await + .unwrap(); + confirm_rename.await.unwrap(); + + let rename_editor = workspace_b.update(cx_b, |workspace, cx| { + workspace.active_item_as::(cx).unwrap() + }); + + rename_editor.update(cx_b, |editor, cx| { + assert_eq!( + editor.text(cx), + "const THREE: usize = 1;\nconst TWO: usize = one::THREE + one::THREE;" + ); + editor.undo(&Undo, cx); + assert_eq!( + editor.text(cx), + "const ONE: usize = 1;\nconst TWO: usize = one::ONE + one::ONE;" + ); + editor.redo(&Redo, cx); + assert_eq!( + editor.text(cx), + "const THREE: usize = 1;\nconst TWO: usize = one::THREE + one::THREE;" + ); + }); + + // Ensure temporary rename edits cannot be undone/redone. + editor_b.update(cx_b, |editor, cx| { + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "const ONE: usize = 1;"); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "const ONE: usize = 1;"); + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "const THREE: usize = 1;"); + }) +} + +#[gpui::test(iterations = 10)] +async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let executor = cx_a.executor(); + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + cx_b.update(editor::init); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "the-language-server", + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + "main.rs": "const ONE: usize = 1;", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + + let _buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.start_progress("the-token").await; + fake_language_server.notify::(lsp::ProgressParams { + token: lsp::NumberOrString::String("the-token".to_string()), + value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report( + lsp::WorkDoneProgressReport { + message: Some("the-message".to_string()), + ..Default::default() + }, + )), + }); + executor.advance_clock(SERVER_PROGRESS_DEBOUNCE_TIMEOUT); + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + let status = project.language_server_statuses().next().unwrap(); + assert_eq!(status.name, "the-language-server"); + assert_eq!(status.pending_work.len(), 1); + assert_eq!( + status.pending_work["the-token"].message.as_ref().unwrap(), + "the-message" + ); + }); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + executor.run_until_parked(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + project_b.read_with(cx_b, |project, _| { + let status = project.language_server_statuses().next().unwrap(); + assert_eq!(status.name, "the-language-server"); + }); + + fake_language_server.notify::(lsp::ProgressParams { + token: lsp::NumberOrString::String("the-token".to_string()), + value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report( + lsp::WorkDoneProgressReport { + message: Some("the-message-2".to_string()), + ..Default::default() + }, + )), + }); + executor.advance_clock(SERVER_PROGRESS_DEBOUNCE_TIMEOUT); + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + let status = project.language_server_statuses().next().unwrap(); + assert_eq!(status.name, "the-language-server"); + assert_eq!(status.pending_work.len(), 1); + assert_eq!( + status.pending_work["the-token"].message.as_ref().unwrap(), + "the-message-2" + ); + }); + + project_b.read_with(cx_b, |project, _| { + let status = project.language_server_statuses().next().unwrap(); + assert_eq!(status.name, "the-language-server"); + assert_eq!(status.pending_work.len(), 1); + assert_eq!( + status.pending_work["the-token"].message.as_ref().unwrap(), + "the-message-2" + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_share_project( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let executor = cx_a.executor(); + let cx_b = cx_b.add_empty_window(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + ".gitignore": "ignored-dir", + "a.txt": "a-contents", + "b.txt": "b-contents", + "ignored-dir": { + "c.txt": "", + "d.txt": "", + } + }), + ) + .await; + + // Invite client B to collaborate on a project + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), Some(project_a.clone()), cx) + }) + .await + .unwrap(); + + // Join that project as client B + + let incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + executor.run_until_parked(); + let call = incoming_call_b.borrow().clone().unwrap(); + assert_eq!(call.calling_user.github_login, "user_a"); + let initial_project = call.initial_project.unwrap(); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + let client_b_peer_id = client_b.peer_id().unwrap(); + let project_b = client_b + .build_dev_server_project(initial_project.id, cx_b) + .await; + + let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id()); + + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + let client_b_collaborator = project.collaborators().get(&client_b_peer_id).unwrap(); + assert_eq!(client_b_collaborator.replica_id, replica_id_b); + }); + + project_b.read_with(cx_b, |project, cx| { + let worktree = project.worktrees().next().unwrap().read(cx); + assert_eq!( + worktree.paths().map(AsRef::as_ref).collect::>(), + [ + Path::new(".gitignore"), + Path::new("a.txt"), + Path::new("b.txt"), + Path::new("ignored-dir"), + ] + ); + }); + + project_b + .update(cx_b, |project, cx| { + let worktree = project.worktrees().next().unwrap(); + let entry = worktree.read(cx).entry_for_path("ignored-dir").unwrap(); + project.expand_entry(worktree_id, entry.id, cx).unwrap() + }) + .await + .unwrap(); + + project_b.read_with(cx_b, |project, cx| { + let worktree = project.worktrees().next().unwrap().read(cx); + assert_eq!( + worktree.paths().map(AsRef::as_ref).collect::>(), + [ + Path::new(".gitignore"), + Path::new("a.txt"), + Path::new("b.txt"), + Path::new("ignored-dir"), + Path::new("ignored-dir/c.txt"), + Path::new("ignored-dir/d.txt"), + ] + ); + }); + + // Open the same file as client B and client A. + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) + .await + .unwrap(); + + buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), "b-contents")); + + project_a.read_with(cx_a, |project, cx| { + assert!(project.has_open_buffer((worktree_id, "b.txt"), cx)) + }); + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) + .await + .unwrap(); + + let editor_b = cx_b.new_view(|cx| Editor::for_buffer(buffer_b, None, cx)); + + // Client A sees client B's selection + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + buffer + .snapshot() + .remote_selections_in_range(text::Anchor::MIN..text::Anchor::MAX) + .count() + == 1 + }); + + // Edit the buffer as client B and see that edit as client A. + editor_b.update(cx_b, |editor, cx| editor.handle_input("ok, ", cx)); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.text(), "ok, b-contents") + }); + + // Client B can invite client C on a project shared by client A. + active_call_b + .update(cx_b, |call, cx| { + call.invite(client_c.user_id().unwrap(), Some(project_b.clone()), cx) + }) + .await + .unwrap(); + + let incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming()); + executor.run_until_parked(); + let call = incoming_call_c.borrow().clone().unwrap(); + assert_eq!(call.calling_user.github_login, "user_b"); + let initial_project = call.initial_project.unwrap(); + active_call_c + .update(cx_c, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + let _project_c = client_c + .build_dev_server_project(initial_project.id, cx_c) + .await; + + // Client B closes the editor, and client A sees client B's selections removed. + cx_b.update(move |_| drop(editor_b)); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + buffer + .snapshot() + .remote_selections_in_range(text::Anchor::MIN..text::Anchor::MAX) + .count() + == 0 + }); +} + +#[gpui::test(iterations = 10)] +async fn test_on_input_format_from_host_to_guest( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let executor = cx_a.executor(); + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_on_type_formatting_provider: Some(lsp::DocumentOnTypeFormattingOptions { + first_trigger_character: ":".to_string(), + more_trigger_character: Some(vec![">".to_string()]), + }), + ..Default::default() + }, + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a }", + "other.rs": "// Test file", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a file in an editor as the host. + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + let cx_a = cx_a.add_empty_window(); + let editor_a = cx_a.new_view(|cx| Editor::for_buffer(buffer_a, Some(project_a.clone()), cx)); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + executor.run_until_parked(); + + // Receive an OnTypeFormatting request as the host's language server. + // Return some formatting from the host's language server. + fake_language_server.handle_request::( + |params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 14), + ); + + Ok(Some(vec![lsp::TextEdit { + new_text: "~<".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 14), lsp::Position::new(0, 14)), + }])) + }, + ); + + // Open the buffer on the guest and see that the formatting worked + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + + // Type a on type formatting trigger character as the guest. + cx_a.focus_view(&editor_a); + editor_a.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input(">", cx); + }); + + executor.run_until_parked(); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a>~< }") + }); + + // Undo should remove LSP edits first + editor_a.update(cx_a, |editor, cx| { + assert_eq!(editor.text(cx), "fn main() { a>~< }"); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "fn main() { a> }"); + }); + executor.run_until_parked(); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a> }") + }); + + editor_a.update(cx_a, |editor, cx| { + assert_eq!(editor.text(cx), "fn main() { a> }"); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "fn main() { a }"); + }); + executor.run_until_parked(); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a }") + }); +} + +#[gpui::test(iterations = 10)] +async fn test_on_input_format_from_guest_to_host( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let executor = cx_a.executor(); + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_on_type_formatting_provider: Some(lsp::DocumentOnTypeFormattingOptions { + first_trigger_character: ":".to_string(), + more_trigger_character: Some(vec![">".to_string()]), + }), + ..Default::default() + }, + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a }", + "other.rs": "// Test file", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a file in an editor as the guest. + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + let cx_b = cx_b.add_empty_window(); + let editor_b = cx_b.new_view(|cx| Editor::for_buffer(buffer_b, Some(project_b.clone()), cx)); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + executor.run_until_parked(); + + // Type a on type formatting trigger character as the guest. + cx_b.focus_view(&editor_b); + editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input(":", cx); + }); + + // Receive an OnTypeFormatting request as the host's language server. + // Return some formatting from the host's language server. + executor.start_waiting(); + fake_language_server + .handle_request::(|params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 14), + ); + + Ok(Some(vec![lsp::TextEdit { + new_text: "~:".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 14), lsp::Position::new(0, 14)), + }])) + }) + .next() + .await + .unwrap(); + executor.finish_waiting(); + + // Open the buffer on the host and see that the formatting worked + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .await + .unwrap(); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a:~: }") + }); + + // Undo should remove LSP edits first + editor_b.update(cx_b, |editor, cx| { + assert_eq!(editor.text(cx), "fn main() { a:~: }"); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "fn main() { a: }"); + }); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a: }") + }); + + editor_b.update(cx_b, |editor, cx| { + assert_eq!(editor.text(cx), "fn main() { a: }"); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "fn main() { a }"); + }); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.text(), "fn main() { a }") + }); +} + +#[gpui::test(iterations = 10)] +async fn test_mutual_editor_inlay_hint_cache_update( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let executor = cx_a.executor(); + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: false, + show_other_hints: true, + }) + }); + }); + }); + cx_b.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: false, + show_other_hints: true, + }) + }); + }); + }); + + client_a.language_registry().add(rust_lang()); + client_b.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + // Client A opens a project. + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a } // and some long comment to ensure inlay hints are not trimmed out", + "other.rs": "// Test file", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Client B joins the project + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + executor.start_waiting(); + + // The host opens a rust file. + let _buffer_a = project_a + .update(cx_a, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + let fake_language_server = fake_language_servers.next().await.unwrap(); + let editor_a = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Set up the language server to return an additional inlay hint on each request. + let edits_made = Arc::new(AtomicUsize::new(0)); + let closure_edits_made = Arc::clone(&edits_made); + fake_language_server + .handle_request::(move |params, _| { + let task_edits_made = Arc::clone(&closure_edits_made); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + let edits_made = task_edits_made.load(atomic::Ordering::Acquire); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, edits_made as u32), + label: lsp::InlayHintLabel::String(edits_made.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await + .unwrap(); + + executor.run_until_parked(); + + let initial_edit = edits_made.load(atomic::Ordering::Acquire); + editor_a.update(cx_a, |editor, _| { + assert_eq!( + vec![initial_edit.to_string()], + extract_hint_labels(editor), + "Host should get its first hints when opens an editor" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 1, + "Host editor update the cache version after every cache/view change", + ); + }); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + executor.run_until_parked(); + editor_b.update(cx_b, |editor, _| { + assert_eq!( + vec![initial_edit.to_string()], + extract_hint_labels(editor), + "Client should get its first hints when opens an editor" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 1, + "Guest editor update the cache version after every cache/view change" + ); + }); + + let after_client_edit = edits_made.fetch_add(1, atomic::Ordering::Release) + 1; + editor_b.update(cx_b, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13].clone())); + editor.handle_input(":", cx); + }); + cx_b.focus_view(&editor_b); + + executor.run_until_parked(); + editor_a.update(cx_a, |editor, _| { + assert_eq!( + vec![after_client_edit.to_string()], + extract_hint_labels(editor), + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!(inlay_cache.version(), 2); + }); + editor_b.update(cx_b, |editor, _| { + assert_eq!( + vec![after_client_edit.to_string()], + extract_hint_labels(editor), + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!(inlay_cache.version(), 2); + }); + + let after_host_edit = edits_made.fetch_add(1, atomic::Ordering::Release) + 1; + editor_a.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input("a change to increment both buffers' versions", cx); + }); + cx_a.focus_view(&editor_a); + + executor.run_until_parked(); + editor_a.update(cx_a, |editor, _| { + assert_eq!( + vec![after_host_edit.to_string()], + extract_hint_labels(editor), + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!(inlay_cache.version(), 3); + }); + editor_b.update(cx_b, |editor, _| { + assert_eq!( + vec![after_host_edit.to_string()], + extract_hint_labels(editor), + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!(inlay_cache.version(), 3); + }); + + let after_special_edit_for_refresh = edits_made.fetch_add(1, atomic::Ordering::Release) + 1; + fake_language_server + .request::(()) + .await + .expect("inlay refresh request failed"); + + executor.run_until_parked(); + editor_a.update(cx_a, |editor, _| { + assert_eq!( + vec![after_special_edit_for_refresh.to_string()], + extract_hint_labels(editor), + "Host should react to /refresh LSP request" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 4, + "Host should accepted all edits and bump its cache version every time" + ); + }); + editor_b.update(cx_b, |editor, _| { + assert_eq!( + vec![after_special_edit_for_refresh.to_string()], + extract_hint_labels(editor), + "Guest should get a /refresh LSP request propagated by host" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 4, + "Guest should accepted all edits and bump its cache version every time" + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_inlay_hint_refresh_is_forwarded( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let executor = cx_a.executor(); + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: false, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: false, + show_parameter_hints: false, + show_other_hints: false, + }) + }); + }); + }); + cx_b.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + }); + }); + + client_a.language_registry().add(rust_lang()); + client_b.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a } // and some long comment to ensure inlay hints are not trimmed out", + "other.rs": "// Test file", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + cx_a.background_executor.start_waiting(); + + let editor_a = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let other_hints = Arc::new(AtomicBool::new(false)); + let fake_language_server = fake_language_servers.next().await.unwrap(); + let closure_other_hints = Arc::clone(&other_hints); + fake_language_server + .handle_request::(move |params, _| { + let task_other_hints = Arc::clone(&closure_other_hints); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + let other_hints = task_other_hints.load(atomic::Ordering::Acquire); + let character = if other_hints { 0 } else { 2 }; + let label = if other_hints { + "other hint" + } else { + "initial hint" + }; + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, character), + label: lsp::InlayHintLabel::String(label.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await + .unwrap(); + executor.finish_waiting(); + + executor.run_until_parked(); + editor_a.update(cx_a, |editor, _| { + assert!( + extract_hint_labels(editor).is_empty(), + "Host should get no hints due to them turned off" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 0, + "Turned off hints should not generate version updates" + ); + }); + + executor.run_until_parked(); + editor_b.update(cx_b, |editor, _| { + assert_eq!( + vec!["initial hint".to_string()], + extract_hint_labels(editor), + "Client should get its first hints when opens an editor" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 1, + "Should update cache version after first hints" + ); + }); + + other_hints.fetch_or(true, atomic::Ordering::Release); + fake_language_server + .request::(()) + .await + .expect("inlay refresh request failed"); + executor.run_until_parked(); + editor_a.update(cx_a, |editor, _| { + assert!( + extract_hint_labels(editor).is_empty(), + "Host should get nop hints due to them turned off, even after the /refresh" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 0, + "Turned off hints should not generate version updates, again" + ); + }); + + executor.run_until_parked(); + editor_b.update(cx_b, |editor, _| { + assert_eq!( + vec!["other hint".to_string()], + extract_hint_labels(editor), + "Guest should get a /refresh LSP request propagated by host despite host hints are off" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.version(), + 2, + "Guest should accepted all edits and bump its cache version every time" + ); + }); +} + +#[gpui::test] +async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + client_a.language_registry().add(rust_lang()); + client_b.language_registry().add(rust_lang()); + + let base_text = indoc! {r#"struct Row; +struct Row1; +struct Row2; + +struct Row4; +struct Row5; +struct Row6; + +struct Row8; +struct Row9; +struct Row10;"#}; + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "main.rs": base_text, + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let editor_a = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let mut editor_cx_a = EditorTestContext { + cx: cx_a.clone(), + window: cx_a.handle(), + editor: editor_a, + assertion_cx: AssertionContextManager::new(), + }; + let mut editor_cx_b = EditorTestContext { + cx: cx_b.clone(), + window: cx_b.handle(), + editor: editor_b, + assertion_cx: AssertionContextManager::new(), + }; + + // host edits the file, that differs from the base text, producing diff hunks + editor_cx_a.set_state(indoc! {r#"struct Row; + struct Row0.1; + struct Row0.2; + struct Row1; + + struct Row4; + struct Row5444; + struct Row6; + + struct Row9; + struct Row1220;ˇ"#}); + editor_cx_a.update_editor(|editor, cx| { + editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_diff_base(Some(base_text.into()), cx); + }); + }); + editor_cx_b.update_editor(|editor, cx| { + editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_diff_base(Some(base_text.into()), cx); + }); + }); + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + + // the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection + // the host does not see the diffs toggled + editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row; + struct Row0.1; + struct Row0.2; + struct Row1; + + struct Row4; + struct Row5444; + struct Row6; + + struct R»ow9; + struct Row1220;"#}); + editor_cx_b + .update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx)); + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + editor_cx_a.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!( + all_hunks, + vec![ + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(1)..DisplayRow(3) + ), + ( + "struct Row2;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(4)..DisplayRow(4) + ), + ( + "struct Row5;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(7) + ), + ( + "struct Row8;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(9)..DisplayRow(9) + ), + ( + "struct Row10;".to_string(), + DiffHunkStatus::Modified, + DisplayRow(10)..DisplayRow(10), + ), + ] + ); + assert_eq!(all_expanded_hunks, Vec::new()); + }); + editor_cx_b.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)], + ); + assert_eq!( + all_hunks, + vec![ + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(1)..DisplayRow(3) + ), + ( + "struct Row2;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(5)..DisplayRow(5) + ), + ( + "struct Row5;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(8)..DisplayRow(9) + ), + ( + "struct Row8;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(12)..DisplayRow(12) + ), + ( + "struct Row10;".to_string(), + DiffHunkStatus::Modified, + DisplayRow(13)..DisplayRow(13), + ), + ] + ); + assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]); + }); + + // the client reverts the hunks, removing the expanded diffs too + // both host and the client observe the reverted state (with one hunk left, not covered by client's selection) + editor_cx_b.update_editor(|editor, cx| { + editor.revert_selected_hunks(&RevertSelectedHunks, cx); + }); + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + editor_cx_a.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!( + all_hunks, + vec![( + "struct Row10;".to_string(), + DiffHunkStatus::Modified, + DisplayRow(10)..DisplayRow(10), + )] + ); + assert_eq!(all_expanded_hunks, Vec::new()); + }); + editor_cx_b.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(5)..=DisplayRow(5)] + ); + assert_eq!( + all_hunks, + vec![( + "struct Row10;".to_string(), + DiffHunkStatus::Modified, + DisplayRow(10)..DisplayRow(10), + )] + ); + assert_eq!(all_expanded_hunks, Vec::new()); + }); + editor_cx_a.assert_editor_state(indoc! {r#"struct Row; + struct Row1; + struct Row2; + + struct Row4; + struct Row5; + struct Row6; + + struct Row8; + struct Row9; + struct Row1220;ˇ"#}); + editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row; + struct Row1; + struct Row2; + + struct Row4; + struct Row5; + struct Row6; + + struct Row8; + struct R»ow9; + struct Row1220;"#}); +} + +#[gpui::test(iterations = 10)] +async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + // Turn inline-blame-off by default so no state is transferred without us explicitly doing so + let inline_blame_off_settings = Some(InlineBlameSettings { + enabled: false, + delay_ms: None, + min_column: None, + }); + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.git.inline_blame = inline_blame_off_settings; + }); + }); + }); + cx_b.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.git.inline_blame = inline_blame_off_settings; + }); + }); + }); + + client_a + .fs() + .insert_tree( + "/my-repo", + json!({ + ".git": {}, + "file.txt": "line1\nline2\nline3\nline\n", + }), + ) + .await; + + let blame = git::blame::Blame { + entries: vec![ + blame_entry("1b1b1b", 0..1), + blame_entry("0d0d0d", 1..2), + blame_entry("3a3a3a", 2..3), + blame_entry("4c4c4c", 3..4), + ], + permalinks: HashMap::default(), // This field is deprecrated + messages: [ + ("1b1b1b", "message for idx-0"), + ("0d0d0d", "message for idx-1"), + ("3a3a3a", "message for idx-2"), + ("4c4c4c", "message for idx-3"), + ] + .into_iter() + .map(|(sha, message)| (sha.parse().unwrap(), message.into())) + .collect(), + remote_url: Some("git@github.com:zed-industries/zed.git".to_string()), + }; + client_a.fs().set_blame_for_repo( + Path::new("/my-repo/.git"), + vec![(Path::new("file.txt"), blame)], + ); + + let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Create editor_a + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let editor_a = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "file.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Join the project as client B. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let editor_b = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "file.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // client_b now requests git blame for the open buffer + editor_b.update(cx_b, |editor_b, cx| { + assert!(editor_b.blame().is_none()); + editor_b.toggle_git_blame(&editor::actions::ToggleGitBlame {}, cx); + }); + + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + + editor_b.update(cx_b, |editor_b, cx| { + let blame = editor_b.blame().expect("editor_b should have blame now"); + let entries = blame.update(cx, |blame, cx| { + blame + .blame_for_rows((0..4).map(MultiBufferRow).map(Some), cx) + .collect::>() + }); + + assert_eq!( + entries, + vec![ + Some(blame_entry("1b1b1b", 0..1)), + Some(blame_entry("0d0d0d", 1..2)), + Some(blame_entry("3a3a3a", 2..3)), + Some(blame_entry("4c4c4c", 3..4)), + ] + ); + + blame.update(cx, |blame, _| { + for (idx, entry) in entries.iter().flatten().enumerate() { + let details = blame.details_for_entry(entry).unwrap(); + assert_eq!(details.message, format!("message for idx-{}", idx)); + assert_eq!( + details.permalink.unwrap().to_string(), + format!("https://github.com/zed-industries/zed/commit/{}", entry.sha) + ); + } + }); + }); + + // editor_b updates the file, which gets sent to client_a, which updates git blame, + // which gets back to client_b. + editor_b.update(cx_b, |editor_b, cx| { + editor_b.edit([(Point::new(0, 3)..Point::new(0, 3), "FOO")], cx); + }); + + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + + editor_b.update(cx_b, |editor_b, cx| { + let blame = editor_b.blame().expect("editor_b should have blame now"); + let entries = blame.update(cx, |blame, cx| { + blame + .blame_for_rows((0..4).map(MultiBufferRow).map(Some), cx) + .collect::>() + }); + + assert_eq!( + entries, + vec![ + None, + Some(blame_entry("0d0d0d", 1..2)), + Some(blame_entry("3a3a3a", 2..3)), + Some(blame_entry("4c4c4c", 3..4)), + ] + ); + }); + + // Now editor_a also updates the file + editor_a.update(cx_a, |editor_a, cx| { + editor_a.edit([(Point::new(1, 3)..Point::new(1, 3), "FOO")], cx); + }); + + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + + editor_b.update(cx_b, |editor_b, cx| { + let blame = editor_b.blame().expect("editor_b should have blame now"); + let entries = blame.update(cx, |blame, cx| { + blame + .blame_for_rows((0..4).map(MultiBufferRow).map(Some), cx) + .collect::>() + }); + + assert_eq!( + entries, + vec![ + None, + None, + Some(blame_entry("3a3a3a", 2..3)), + Some(blame_entry("4c4c4c", 3..4)), + ] + ); + }); +} + +fn extract_hint_labels(editor: &Editor) -> Vec { + let mut labels = Vec::new(); + for hint in editor.inlay_hint_cache().hints() { + match hint.label { + project::InlayHintLabel::String(s) => labels.push(s), + _ => unreachable!(), + } + } + labels +} + +fn blame_entry(sha: &str, range: Range) -> git::blame::BlameEntry { + git::blame::BlameEntry { + sha: sha.parse().unwrap(), + range, + ..Default::default() + } +} diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs new file mode 100644 index 0000000..e6904ed --- /dev/null +++ b/crates/collab/src/tests/following_tests.rs @@ -0,0 +1,2150 @@ +use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; +use call::{ActiveCall, ParticipantLocation}; +use client::ChannelId; +use collab_ui::{ + channel_view::ChannelView, + notifications::project_shared_notification::ProjectSharedNotification, +}; +use editor::{Editor, ExcerptRange, MultiBuffer}; +use gpui::{ + point, BackgroundExecutor, BorrowAppContext, Context, Entity, SharedString, TestAppContext, + View, VisualContext, VisualTestContext, +}; +use language::Capability; +use live_kit_client::MacOSDisplay; +use project::WorktreeSettings; +use rpc::proto::PeerId; +use serde_json::json; +use settings::SettingsStore; +use workspace::{ + dock::{test::TestPanel, DockPosition}, + item::{test::TestItem, ItemHandle as _}, + shared_screen::SharedScreen, + SplitDirection, Workspace, +}; + +use super::TestClient; + +#[gpui::test(iterations = 10)] +async fn test_basic_following( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, + cx_d: &mut TestAppContext, +) { + let executor = cx_a.executor(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + let client_d = server.create_client(cx_d, "user_d").await; + server + .create_room(&mut [ + (&client_a, cx_a), + (&client_b, cx_b), + (&client_c, cx_c), + (&client_d, cx_d), + ]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "1.txt": "one\none\none", + "2.txt": "two\ntwo\ntwo", + "3.txt": "three\nthree\nthree", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + cx_b.update(|cx| { + assert!(cx.is_window_active()); + }); + + // Client A opens some editors. + let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone()); + let editor_a1 = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let editor_a2 = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "2.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Client B opens an editor. + let editor_b1 = workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let peer_id_a = client_a.peer_id().unwrap(); + let peer_id_b = client_b.peer_id().unwrap(); + let peer_id_c = client_c.peer_id().unwrap(); + let peer_id_d = client_d.peer_id().unwrap(); + + // Client A updates their selections in those editors + editor_a1.update(cx_a, |editor, cx| { + editor.handle_input("a", cx); + editor.handle_input("b", cx); + editor.handle_input("c", cx); + editor.select_left(&Default::default(), cx); + assert_eq!(editor.selections.ranges(cx), vec![3..2]); + }); + editor_a2.update(cx_a, |editor, cx| { + editor.handle_input("d", cx); + editor.handle_input("e", cx); + editor.select_left(&Default::default(), cx); + assert_eq!(editor.selections.ranges(cx), vec![2..1]); + }); + + // When client B starts following client A, all visible view states are replicated to client B. + workspace_b.update(cx_b, |workspace, cx| workspace.follow(peer_id_a, cx)); + + cx_c.executor().run_until_parked(); + let editor_b2 = workspace_b.update(cx_b, |workspace, cx| { + workspace + .active_item(cx) + .unwrap() + .downcast::() + .unwrap() + }); + assert_eq!( + cx_b.read(|cx| editor_b2.project_path(cx)), + Some((worktree_id, "2.txt").into()) + ); + assert_eq!( + editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)), + vec![2..1] + ); + assert_eq!( + editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)), + vec![3..2] + ); + + executor.run_until_parked(); + let active_call_c = cx_c.read(ActiveCall::global); + let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c); + active_call_c + .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx)) + .await + .unwrap(); + drop(project_c); + + // Client C also follows client A. + workspace_c.update(cx_c, |workspace, cx| workspace.follow(peer_id_a, cx)); + + cx_d.executor().run_until_parked(); + let active_call_d = cx_d.read(ActiveCall::global); + let project_d = client_d.build_dev_server_project(project_id, cx_d).await; + let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d); + active_call_d + .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx)) + .await + .unwrap(); + drop(project_d); + + // All clients see that clients B and C are following client A. + cx_c.executor().run_until_parked(); + for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { + assert_eq!( + followers_by_leader(project_id, cx), + &[(peer_id_a, vec![peer_id_b, peer_id_c])], + "followers seen by {name}" + ); + } + + // Client C unfollows client A. + workspace_c.update(cx_c, |workspace, cx| { + workspace.unfollow(&workspace.active_pane().clone(), cx); + }); + + // All clients see that clients B is following client A. + cx_c.executor().run_until_parked(); + for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { + assert_eq!( + followers_by_leader(project_id, cx), + &[(peer_id_a, vec![peer_id_b])], + "followers seen by {name}" + ); + } + + // Client C re-follows client A. + workspace_c.update(cx_c, |workspace, cx| workspace.follow(peer_id_a, cx)); + + // All clients see that clients B and C are following client A. + cx_c.executor().run_until_parked(); + for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { + assert_eq!( + followers_by_leader(project_id, cx), + &[(peer_id_a, vec![peer_id_b, peer_id_c])], + "followers seen by {name}" + ); + } + + // Client D follows client B, then switches to following client C. + workspace_d.update(cx_d, |workspace, cx| workspace.follow(peer_id_b, cx)); + cx_a.executor().run_until_parked(); + workspace_d.update(cx_d, |workspace, cx| workspace.follow(peer_id_c, cx)); + + // All clients see that D is following C + cx_a.executor().run_until_parked(); + for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] { + assert_eq!( + followers_by_leader(project_id, cx), + &[ + (peer_id_a, vec![peer_id_b, peer_id_c]), + (peer_id_c, vec![peer_id_d]) + ], + "followers seen by {name}" + ); + } + + // Client C closes the project. + let weak_workspace_c = workspace_c.downgrade(); + workspace_c.update(cx_c, |workspace, cx| { + workspace.close_window(&Default::default(), cx); + }); + executor.run_until_parked(); + // are you sure you want to leave the call? + cx_c.simulate_prompt_answer(0); + cx_c.cx.update(|_| { + drop(workspace_c); + }); + executor.run_until_parked(); + cx_c.cx.update(|_| {}); + + weak_workspace_c.assert_released(); + + // Clients A and B see that client B is following A, and client C is not present in the followers. + executor.run_until_parked(); + for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("D", &cx_d)] { + assert_eq!( + followers_by_leader(project_id, cx), + &[(peer_id_a, vec![peer_id_b]),], + "followers seen by {name}" + ); + } + + // When client A activates a different editor, client B does so as well. + workspace_a.update(cx_a, |workspace, cx| { + workspace.activate_item(&editor_a1, cx) + }); + executor.run_until_parked(); + workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + editor_b1.item_id() + ); + }); + + // When client A opens a multibuffer, client B does so as well. + let multibuffer_a = cx_a.new_model(|cx| { + let buffer_a1 = project_a.update(cx, |project, cx| { + project + .get_open_buffer(&(worktree_id, "1.txt").into(), cx) + .unwrap() + }); + let buffer_a2 = project_a.update(cx, |project, cx| { + project + .get_open_buffer(&(worktree_id, "2.txt").into(), cx) + .unwrap() + }); + let mut result = MultiBuffer::new(0, Capability::ReadWrite); + result.push_excerpts( + buffer_a1, + [ExcerptRange { + context: 0..3, + primary: None, + }], + cx, + ); + result.push_excerpts( + buffer_a2, + [ExcerptRange { + context: 4..7, + primary: None, + }], + cx, + ); + result + }); + let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| { + let editor = + cx.new_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx)); + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx); + editor + }); + executor.run_until_parked(); + let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| { + workspace + .active_item(cx) + .unwrap() + .downcast::() + .unwrap() + }); + assert_eq!( + multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)), + multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)), + ); + + // When client A navigates back and forth, client B does so as well. + workspace_a + .update(cx_a, |workspace, cx| { + workspace.go_back(workspace.active_pane().downgrade(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + editor_b1.item_id() + ); + }); + + workspace_a + .update(cx_a, |workspace, cx| { + workspace.go_back(workspace.active_pane().downgrade(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + editor_b2.item_id() + ); + }); + + workspace_a + .update(cx_a, |workspace, cx| { + workspace.go_forward(workspace.active_pane().downgrade(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + editor_b1.item_id() + ); + }); + + // Changes to client A's editor are reflected on client B. + editor_a1.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2])); + }); + executor.advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); + executor.run_until_parked(); + cx_b.background_executor.run_until_parked(); + + editor_b1.update(cx_b, |editor, cx| { + assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]); + }); + + editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx)); + executor.run_until_parked(); + editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO")); + + editor_a1.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([3..3])); + editor.set_scroll_position(point(0., 100.), cx); + }); + executor.advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); + executor.run_until_parked(); + editor_b1.update(cx_b, |editor, cx| { + assert_eq!(editor.selections.ranges(cx), &[3..3]); + }); + + // After unfollowing, client B stops receiving updates from client A. + workspace_b.update(cx_b, |workspace, cx| { + workspace.unfollow(&workspace.active_pane().clone(), cx) + }); + workspace_a.update(cx_a, |workspace, cx| { + workspace.activate_item(&editor_a2, cx) + }); + executor.run_until_parked(); + assert_eq!( + workspace_b.update(cx_b, |workspace, cx| workspace + .active_item(cx) + .unwrap() + .item_id()), + editor_b1.item_id() + ); + + // Client A starts following client B. + workspace_a.update(cx_a, |workspace, cx| workspace.follow(peer_id_b, cx)); + executor.run_until_parked(); + assert_eq!( + workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)), + Some(peer_id_b) + ); + assert_eq!( + workspace_a.update(cx_a, |workspace, cx| workspace + .active_item(cx) + .unwrap() + .item_id()), + editor_a1.item_id() + ); + + // Client B activates an external window, which causes a new screen-sharing item to be added to the pane. + let display = MacOSDisplay::new(); + active_call_b + .update(cx_b, |call, cx| call.set_location(None, cx)) + .await + .unwrap(); + active_call_b + .update(cx_b, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_display_sources(vec![display.clone()]); + room.share_screen(cx) + }) + }) + .await + .unwrap(); + executor.run_until_parked(); + let shared_screen = workspace_a.update(cx_a, |workspace, cx| { + workspace + .active_item(cx) + .expect("no active item") + .downcast::() + .expect("active item isn't a shared screen") + }); + + // Client B activates Zed again, which causes the previous editor to become focused again. + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + executor.run_until_parked(); + workspace_a.update(cx_a, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + editor_a1.item_id() + ) + }); + + // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer. + workspace_b.update(cx_b, |workspace, cx| { + workspace.activate_item(&multibuffer_editor_b, cx) + }); + executor.run_until_parked(); + workspace_a.update(cx_a, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + multibuffer_editor_a.item_id() + ) + }); + + // Client B activates a panel, and the previously-opened screen-sharing item gets activated. + let panel = cx_b.new_view(|cx| TestPanel::new(DockPosition::Left, cx)); + workspace_b.update(cx_b, |workspace, cx| { + workspace.add_panel(panel, cx); + workspace.toggle_panel_focus::(cx); + }); + executor.run_until_parked(); + assert_eq!( + workspace_a.update(cx_a, |workspace, cx| workspace + .active_item(cx) + .unwrap() + .item_id()), + shared_screen.item_id() + ); + + // Toggling the focus back to the pane causes client A to return to the multibuffer. + workspace_b.update(cx_b, |workspace, cx| { + workspace.toggle_panel_focus::(cx); + }); + executor.run_until_parked(); + workspace_a.update(cx_a, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().item_id(), + multibuffer_editor_a.item_id() + ) + }); + + // Client B activates an item that doesn't implement following, + // so the previously-opened screen-sharing item gets activated. + let unfollowable_item = cx_b.new_view(|cx| TestItem::new(cx)); + workspace_b.update(cx_b, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(unfollowable_item), true, true, None, cx) + }) + }); + executor.run_until_parked(); + assert_eq!( + workspace_a.update(cx_a, |workspace, cx| workspace + .active_item(cx) + .unwrap() + .item_id()), + shared_screen.item_id() + ); + + // Following interrupts when client B disconnects. + client_b.disconnect(&cx_b.to_async()); + executor.advance_clock(RECONNECT_TIMEOUT); + assert_eq!( + workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)), + None + ); +} + +#[gpui::test] +async fn test_following_tab_order( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "1.txt": "one", + "2.txt": "two", + "3.txt": "three", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone()); + + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone()); + + let client_b_id = project_a.update(cx_a, |project, _| { + project.collaborators().values().next().unwrap().peer_id + }); + + //Open 1, 3 in that order on client A + workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap(); + workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "3.txt"), None, true, cx) + }) + .await + .unwrap(); + + let pane_paths = |pane: &View, cx: &mut VisualTestContext| { + pane.update(cx, |pane, cx| { + pane.items() + .map(|item| { + item.project_path(cx) + .unwrap() + .path + .to_str() + .unwrap() + .to_owned() + }) + .collect::>() + }) + }; + + //Verify that the tabs opened in the order we expect + assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]); + + //Follow client B as client A + workspace_a.update(cx_a, |workspace, cx| workspace.follow(client_b_id, cx)); + executor.run_until_parked(); + + //Open just 2 on client B + workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "2.txt"), None, true, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + + // Verify that newly opened followed file is at the end + assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]); + + //Open just 1 on client B + workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap(); + assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]); + executor.run_until_parked(); + + // Verify that following into 1 did not reorder + assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]); +} + +#[gpui::test(iterations = 10)] +async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let executor = cx_a.executor(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + // Client A shares a project. + client_a + .fs() + .insert_tree( + "/a", + json!({ + "1.txt": "one", + "2.txt": "two", + "3.txt": "three", + "4.txt": "four", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Client B joins the project. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + // Client A opens a file. + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Client B opens a different file. + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "2.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Clients A and B follow each other in split panes + workspace_a.update(cx_a, |workspace, cx| { + workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx); + }); + workspace_a.update(cx_a, |workspace, cx| { + workspace.follow(client_b.peer_id().unwrap(), cx) + }); + executor.run_until_parked(); + workspace_b.update(cx_b, |workspace, cx| { + workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx); + }); + workspace_b.update(cx_b, |workspace, cx| { + workspace.follow(client_a.peer_id().unwrap(), cx) + }); + executor.run_until_parked(); + + // Clients A and B return focus to the original files they had open + workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx)); + workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx)); + executor.run_until_parked(); + + // Both clients see the other client's focused file in their right pane. + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: true, + leader: None, + items: vec![(true, "1.txt".into())] + }, + PaneSummary { + active: false, + leader: client_b.peer_id(), + items: vec![(false, "1.txt".into()), (true, "2.txt".into())] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: true, + leader: None, + items: vec![(true, "2.txt".into())] + }, + PaneSummary { + active: false, + leader: client_a.peer_id(), + items: vec![(false, "2.txt".into()), (true, "1.txt".into())] + }, + ] + ); + + // Clients A and B each open a new file. + workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "3.txt"), None, true, cx) + }) + .await + .unwrap(); + + workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "4.txt"), None, true, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + + // Both client's see the other client open the new file, but keep their + // focus on their own active pane. + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: true, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: false, + leader: client_b.peer_id(), + items: vec![ + (false, "1.txt".into()), + (false, "2.txt".into()), + (true, "4.txt".into()) + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: true, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: false, + leader: client_a.peer_id(), + items: vec![ + (false, "2.txt".into()), + (false, "1.txt".into()), + (true, "3.txt".into()) + ] + }, + ] + ); + + // Client A focuses their right pane, in which they're following client B. + workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx)); + executor.run_until_parked(); + + // Client B sees that client A is now looking at the same file as them. + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: client_b.peer_id(), + items: vec![ + (false, "1.txt".into()), + (false, "2.txt".into()), + (true, "4.txt".into()) + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: true, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: false, + leader: client_a.peer_id(), + items: vec![ + (false, "2.txt".into()), + (false, "1.txt".into()), + (false, "3.txt".into()), + (true, "4.txt".into()) + ] + }, + ] + ); + + // Client B focuses their right pane, in which they're following client A, + // who is following them. + workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx)); + executor.run_until_parked(); + + // Client A sees that client B is now looking at the same file as them. + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: true, + leader: client_a.peer_id(), + items: vec![ + (false, "2.txt".into()), + (false, "1.txt".into()), + (false, "3.txt".into()), + (true, "4.txt".into()) + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: client_b.peer_id(), + items: vec![ + (false, "1.txt".into()), + (false, "2.txt".into()), + (true, "4.txt".into()) + ] + }, + ] + ); + + // Client B focuses a file that they previously followed A to, breaking + // the follow. + workspace_b.update(cx_b, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.activate_prev_item(true, cx); + }); + }); + executor.run_until_parked(); + + // Both clients see that client B is looking at that previous file. + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: true, + leader: None, + items: vec![ + (false, "2.txt".into()), + (false, "1.txt".into()), + (true, "3.txt".into()), + (false, "4.txt".into()) + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: client_b.peer_id(), + items: vec![ + (false, "1.txt".into()), + (false, "2.txt".into()), + (false, "4.txt".into()), + (true, "3.txt".into()), + ] + }, + ] + ); + + // Client B closes tabs, some of which were originally opened by client A, + // and some of which were originally opened by client B. + workspace_b.update(cx_b, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.close_inactive_items(&Default::default(), cx) + .unwrap() + .detach(); + }); + }); + + executor.run_until_parked(); + + // Both clients see that Client B is looking at the previous tab. + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: true, + leader: None, + items: vec![(true, "3.txt".into()),] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: client_b.peer_id(), + items: vec![ + (false, "1.txt".into()), + (false, "2.txt".into()), + (false, "4.txt".into()), + (true, "3.txt".into()), + ] + }, + ] + ); + + // Client B follows client A again. + workspace_b.update(cx_b, |workspace, cx| { + workspace.follow(client_a.peer_id().unwrap(), cx) + }); + executor.run_until_parked(); + // Client A cycles through some tabs. + workspace_a.update(cx_a, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.activate_prev_item(true, cx); + }); + }); + executor.run_until_parked(); + + // Client B follows client A into those tabs. + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: None, + items: vec![ + (false, "1.txt".into()), + (false, "2.txt".into()), + (true, "4.txt".into()), + (false, "3.txt".into()), + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: true, + leader: client_a.peer_id(), + items: vec![(false, "3.txt".into()), (true, "4.txt".into())] + }, + ] + ); + + workspace_a.update(cx_a, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.activate_prev_item(true, cx); + }); + }); + executor.run_until_parked(); + + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: None, + items: vec![ + (false, "1.txt".into()), + (true, "2.txt".into()), + (false, "4.txt".into()), + (false, "3.txt".into()), + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: true, + leader: client_a.peer_id(), + items: vec![ + (false, "3.txt".into()), + (false, "4.txt".into()), + (true, "2.txt".into()) + ] + }, + ] + ); + + workspace_a.update(cx_a, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.activate_prev_item(true, cx); + }); + }); + executor.run_until_parked(); + + assert_eq!( + pane_summaries(&workspace_a, cx_a), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "1.txt".into()), (true, "3.txt".into())] + }, + PaneSummary { + active: true, + leader: None, + items: vec![ + (true, "1.txt".into()), + (false, "2.txt".into()), + (false, "4.txt".into()), + (false, "3.txt".into()), + ] + }, + ] + ); + assert_eq!( + pane_summaries(&workspace_b, cx_b), + &[ + PaneSummary { + active: false, + leader: None, + items: vec![(false, "2.txt".into()), (true, "4.txt".into())] + }, + PaneSummary { + active: true, + leader: client_a.peer_id(), + items: vec![ + (false, "3.txt".into()), + (false, "4.txt".into()), + (false, "2.txt".into()), + (true, "1.txt".into()), + ] + }, + ] + ); +} + +#[gpui::test(iterations = 10)] +async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + // 2 clients connect to a server. + let executor = cx_a.executor(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + // Client A shares a project. + client_a + .fs() + .insert_tree( + "/a", + json!({ + "1.txt": "one", + "2.txt": "two", + "3.txt": "three", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let _editor_a1 = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Client B starts following client A. + let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone()); + let leader_id = project_b.update(cx_b, |project, _| { + project.collaborators().values().next().unwrap().peer_id + }); + workspace_b.update(cx_b, |workspace, cx| workspace.follow(leader_id, cx)); + executor.run_until_parked(); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + Some(leader_id) + ); + let editor_b2 = workspace_b.update(cx_b, |workspace, cx| { + workspace + .active_item(cx) + .unwrap() + .downcast::() + .unwrap() + }); + + // When client B moves, it automatically stops following client A. + editor_b2.update(cx_b, |editor, cx| { + editor.move_right(&editor::actions::MoveRight, cx) + }); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + None + ); + + workspace_b.update(cx_b, |workspace, cx| workspace.follow(leader_id, cx)); + executor.run_until_parked(); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + Some(leader_id) + ); + + // When client B edits, it automatically stops following client A. + editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx)); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + None + ); + + workspace_b.update(cx_b, |workspace, cx| workspace.follow(leader_id, cx)); + executor.run_until_parked(); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + Some(leader_id) + ); + + // When client B scrolls, it automatically stops following client A. + editor_b2.update(cx_b, |editor, cx| { + editor.set_scroll_position(point(0., 3.), cx) + }); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + None + ); + + workspace_b.update(cx_b, |workspace, cx| workspace.follow(leader_id, cx)); + executor.run_until_parked(); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + Some(leader_id) + ); + + // When client B activates a different pane, it continues following client A in the original pane. + workspace_b.update(cx_b, |workspace, cx| { + workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx) + }); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + Some(leader_id) + ); + + workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx)); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + Some(leader_id) + ); + + // When client B activates a different item in the original pane, it automatically stops following client A. + workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "2.txt"), None, true, cx) + }) + .await + .unwrap(); + assert_eq!( + workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)), + None + ); +} + +#[gpui::test(iterations = 10)] +async fn test_peers_simultaneously_following_each_other( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let executor = cx_a.executor(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + cx_a.update(editor::init); + cx_b.update(editor::init); + + client_a.fs().insert_tree("/a", json!({})).await; + let (project_a, _) = client_a.build_local_project("/a", cx_a).await; + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + executor.run_until_parked(); + let client_a_id = project_b.update(cx_b, |project, _| { + project.collaborators().values().next().unwrap().peer_id + }); + let client_b_id = project_a.update(cx_a, |project, _| { + project.collaborators().values().next().unwrap().peer_id + }); + + workspace_a.update(cx_a, |workspace, cx| workspace.follow(client_b_id, cx)); + workspace_b.update(cx_b, |workspace, cx| workspace.follow(client_a_id, cx)); + executor.run_until_parked(); + + workspace_a.update(cx_a, |workspace, _| { + assert_eq!( + workspace.leader_for_pane(workspace.active_pane()), + Some(client_b_id) + ); + }); + workspace_b.update(cx_b, |workspace, _| { + assert_eq!( + workspace.leader_for_pane(workspace.active_pane()), + Some(client_a_id) + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + // a and b join a channel/call + // a shares project 1 + // b shares project 2 + // + // b follows a: causes project 2 to be joined, and b to follow a. + // b opens a different file in project 2, a follows b + // b opens a different file in project 1, a cannot follow b + // b shares the project, a joins the project and follows b + let executor = cx_a.executor(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "w.rs": "", + "x.rs": "", + }), + ) + .await; + + client_b + .fs() + .insert_tree( + "/b", + json!({ + "y.rs": "", + "z.rs": "", + }), + ) + .await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await; + let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await; + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id_a, "w.rs"), None, true, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!(visible_push_notifications(cx_b).len(), 1); + + workspace_b.update(cx_b, |workspace, cx| { + workspace.follow(client_a.peer_id().unwrap(), cx) + }); + + executor.run_until_parked(); + let window_b_project_a = *cx_b + .windows() + .iter() + .max_by_key(|window| window.window_id()) + .unwrap(); + + let mut cx_b2 = VisualTestContext::from_window(window_b_project_a, cx_b); + + let workspace_b_project_a = window_b_project_a + .downcast::() + .unwrap() + .root(cx_b) + .unwrap(); + + // assert that b is following a in project a in w.rs + workspace_b_project_a.update(&mut cx_b2, |workspace, cx| { + assert!(workspace.is_being_followed(client_a.peer_id().unwrap())); + assert_eq!( + client_a.peer_id(), + workspace.leader_for_pane(workspace.active_pane()) + ); + let item = workspace.active_item(cx).unwrap(); + assert_eq!( + item.tab_description(0, cx).unwrap(), + SharedString::from("w.rs") + ); + }); + + // TODO: in app code, this would be done by the collab_ui. + active_call_b + .update(&mut cx_b2, |call, cx| { + let project = workspace_b_project_a.read(cx).project().clone(); + call.set_location(Some(&project), cx) + }) + .await + .unwrap(); + + // assert that there are no share notifications open + assert_eq!(visible_push_notifications(cx_b).len(), 0); + + // b moves to x.rs in a's project, and a follows + workspace_b_project_a + .update(&mut cx_b2, |workspace, cx| { + workspace.open_path((worktree_id_a, "x.rs"), None, true, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + workspace_b_project_a.update(&mut cx_b2, |workspace, cx| { + let item = workspace.active_item(cx).unwrap(); + assert_eq!( + item.tab_description(0, cx).unwrap(), + SharedString::from("x.rs") + ); + }); + + workspace_a.update(cx_a, |workspace, cx| { + workspace.follow(client_b.peer_id().unwrap(), cx) + }); + + executor.run_until_parked(); + workspace_a.update(cx_a, |workspace, cx| { + assert!(workspace.is_being_followed(client_b.peer_id().unwrap())); + assert_eq!( + client_b.peer_id(), + workspace.leader_for_pane(workspace.active_pane()) + ); + let item = workspace.active_pane().read(cx).active_item().unwrap(); + assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs"); + }); + + // b moves to y.rs in b's project, a is still following but can't yet see + workspace_b + .update(cx_b, |workspace, cx| { + workspace.open_path((worktree_id_b, "y.rs"), None, true, cx) + }) + .await + .unwrap(); + + // TODO: in app code, this would be done by the collab_ui. + active_call_b + .update(cx_b, |call, cx| { + let project = workspace_b.read(cx).project().clone(); + call.set_location(Some(&project), cx) + }) + .await + .unwrap(); + + let project_b_id = active_call_b + .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx)) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!(visible_push_notifications(cx_a).len(), 1); + cx_a.update(|cx| { + workspace::join_in_room_project( + project_b_id, + client_b.user_id().unwrap(), + client_a.app_state.clone(), + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + assert_eq!(visible_push_notifications(cx_a).len(), 0); + let window_a_project_b = *cx_a + .windows() + .iter() + .max_by_key(|window| window.window_id()) + .unwrap(); + let cx_a2 = &mut VisualTestContext::from_window(window_a_project_b, cx_a); + let workspace_a_project_b = window_a_project_b + .downcast::() + .unwrap() + .root(cx_a) + .unwrap(); + + workspace_a_project_b.update(cx_a2, |workspace, cx| { + assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id)); + assert!(workspace.is_being_followed(client_b.peer_id().unwrap())); + assert_eq!( + client_b.peer_id(), + workspace.leader_for_pane(workspace.active_pane()) + ); + let item = workspace.active_item(cx).unwrap(); + assert_eq!( + item.tab_description(0, cx).unwrap(), + SharedString::from("y.rs") + ); + }); +} + +#[gpui::test] +async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let (_server, client_a, client_b, channel_id) = TestServer::start2(cx_a, cx_b).await; + + let (workspace_a, cx_a) = client_a.build_test_workspace(cx_a).await; + client_a + .host_workspace(&workspace_a, channel_id, cx_a) + .await; + let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await; + + cx_a.simulate_keystrokes("cmd-p 2 enter"); + cx_a.run_until_parked(); + + let editor_a = workspace_a.update(cx_a, |workspace, cx| { + workspace.active_item_as::(cx).unwrap() + }); + let editor_b = workspace_b.update(cx_b, |workspace, cx| { + workspace.active_item_as::(cx).unwrap() + }); + + // b should follow a to position 1 + editor_a.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([1..1])) + }); + cx_a.executor() + .advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); + cx_a.run_until_parked(); + editor_b.update(cx_b, |editor, cx| { + assert_eq!(editor.selections.ranges(cx), vec![1..1]) + }); + + // a unshares the project + cx_a.update(|cx| { + let project = workspace_a.read(cx).project().clone(); + ActiveCall::global(cx).update(cx, |call, cx| { + call.unshare_project(project, cx).unwrap(); + }) + }); + cx_a.run_until_parked(); + + // b should not follow a to position 2 + editor_a.update(cx_a, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([2..2])) + }); + cx_a.executor() + .advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); + cx_a.run_until_parked(); + editor_b.update(cx_b, |editor, cx| { + assert_eq!(editor.selections.ranges(cx), vec![1..1]) + }); + cx_b.update(|cx| { + let room = ActiveCall::global(cx).read(cx).room().unwrap().read(cx); + let participant = room.remote_participants().get(&client_a.id()).unwrap(); + assert_eq!(participant.location, ParticipantLocation::UnsharedProject) + }) +} + +#[gpui::test] +async fn test_following_into_excluded_file( + mut cx_a: &mut TestAppContext, + mut cx_b: &mut TestAppContext, +) { + let executor = cx_a.executor(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + for cx in [&mut cx_a, &mut cx_b] { + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |settings| { + settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]); + }); + }); + }); + } + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let peer_id_a = client_a.peer_id().unwrap(); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + ".git": { + "COMMIT_EDITMSG": "write your commit message here", + }, + "1.txt": "one\none\none", + "2.txt": "two\ntwo\ntwo", + "3.txt": "three\nthree\nthree", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + // Client A opens editors for a regular file and an excluded file. + let editor_for_regular = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let editor_for_excluded_a = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // Client A updates their selections in those editors + editor_for_regular.update(cx_a, |editor, cx| { + editor.handle_input("a", cx); + editor.handle_input("b", cx); + editor.handle_input("c", cx); + editor.select_left(&Default::default(), cx); + assert_eq!(editor.selections.ranges(cx), vec![3..2]); + }); + editor_for_excluded_a.update(cx_a, |editor, cx| { + editor.select_all(&Default::default(), cx); + editor.handle_input("new commit message", cx); + editor.select_left(&Default::default(), cx); + assert_eq!(editor.selections.ranges(cx), vec![18..17]); + }); + + // When client B starts following client A, currently visible file is replicated + workspace_b.update(cx_b, |workspace, cx| workspace.follow(peer_id_a, cx)); + executor.advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); + executor.run_until_parked(); + + let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| { + workspace + .active_item(cx) + .unwrap() + .downcast::() + .unwrap() + }); + assert_eq!( + cx_b.read(|cx| editor_for_excluded_b.project_path(cx)), + Some((worktree_id, ".git/COMMIT_EDITMSG").into()) + ); + assert_eq!( + editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)), + vec![18..17] + ); + + editor_for_excluded_a.update(cx_a, |editor, cx| { + editor.select_right(&Default::default(), cx); + }); + executor.advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); + executor.run_until_parked(); + + // Changes from B to the excluded file are replicated in A's editor + editor_for_excluded_b.update(cx_b, |editor, cx| { + editor.handle_input("\nCo-Authored-By: B ", cx); + }); + executor.run_until_parked(); + editor_for_excluded_a.update(cx_a, |editor, cx| { + assert_eq!( + editor.text(cx), + "new commit message\nCo-Authored-By: B " + ); + }); +} + +fn visible_push_notifications( + cx: &mut TestAppContext, +) -> Vec> { + let mut ret = Vec::new(); + for window in cx.windows() { + window + .update(cx, |window, _| { + if let Ok(handle) = window.downcast::() { + ret.push(handle) + } + }) + .unwrap(); + } + ret +} + +#[derive(Debug, PartialEq, Eq)] +struct PaneSummary { + active: bool, + leader: Option, + items: Vec<(bool, String)>, +} + +fn followers_by_leader(project_id: u64, cx: &TestAppContext) -> Vec<(PeerId, Vec)> { + cx.read(|cx| { + let active_call = ActiveCall::global(cx).read(cx); + let peer_id = active_call.client().peer_id(); + let room = active_call.room().unwrap().read(cx); + let mut result = room + .remote_participants() + .values() + .map(|participant| participant.peer_id) + .chain(peer_id) + .filter_map(|peer_id| { + let followers = room.followers_for(peer_id, project_id); + if followers.is_empty() { + None + } else { + Some((peer_id, followers.to_vec())) + } + }) + .collect::>(); + result.sort_by_key(|e| e.0); + result + }) +} + +fn pane_summaries(workspace: &View, cx: &mut VisualTestContext) -> Vec { + workspace.update(cx, |workspace, cx| { + let active_pane = workspace.active_pane(); + workspace + .panes() + .iter() + .map(|pane| { + let leader = workspace.leader_for_pane(pane); + let active = pane == active_pane; + let pane = pane.read(cx); + let active_ix = pane.active_item_index(); + PaneSummary { + active, + leader, + items: pane + .items() + .enumerate() + .map(|(ix, item)| { + ( + ix == active_ix, + item.tab_description(0, cx) + .map_or(String::new(), |s| s.to_string()), + ) + }) + .collect(), + } + }) + .collect() + }) +} + +#[gpui::test(iterations = 10)] +async fn test_following_to_channel_notes_without_a_shared_project( + deterministic: BackgroundExecutor, + mut cx_a: &mut TestAppContext, + mut cx_b: &mut TestAppContext, + mut cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(deterministic.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + cx_a.update(editor::init); + cx_b.update(editor::init); + cx_c.update(editor::init); + cx_a.update(collab_ui::channel_view::init); + cx_b.update(collab_ui::channel_view::init); + cx_c.update(collab_ui::channel_view::init); + + let channel_1_id = server + .make_channel( + "channel-1", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + let channel_2_id = server + .make_channel( + "channel-2", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + // Clients A, B, and C join a channel. + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + for (call, cx) in [ + (&active_call_a, &mut cx_a), + (&active_call_b, &mut cx_b), + (&active_call_c, &mut cx_c), + ] { + call.update(*cx, |call, cx| call.join_channel(channel_1_id, cx)) + .await + .unwrap(); + } + deterministic.run_until_parked(); + + // Clients A, B, and C all open their own unshared projects. + client_a + .fs() + .insert_tree("/a", json!({ "1.txt": "" })) + .await; + client_b.fs().insert_tree("/b", json!({})).await; + client_c.fs().insert_tree("/c", json!({})).await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let (project_b, _) = client_b.build_local_project("/b", cx_b).await; + let (project_c, _) = client_b.build_local_project("/c", cx_c).await; + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let (_workspace_c, _cx_c) = client_c.build_workspace(&project_c, cx_c); + + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + + // Client A opens the notes for channel 1. + let channel_notes_1_a = cx_a + .update(|cx| ChannelView::open(channel_1_id, None, workspace_a.clone(), cx)) + .await + .unwrap(); + channel_notes_1_a.update(cx_a, |notes, cx| { + assert_eq!(notes.channel(cx).unwrap().name, "channel-1"); + notes.editor.update(cx, |editor, cx| { + editor.insert("Hello from A.", cx); + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![3..4]); + }); + }); + }); + + // Client B follows client A. + workspace_b + .update(cx_b, |workspace, cx| { + workspace + .start_following(client_a.peer_id().unwrap(), cx) + .unwrap() + }) + .await + .unwrap(); + + // Client B is taken to the notes for channel 1, with the same + // text selected as client A. + deterministic.run_until_parked(); + let channel_notes_1_b = workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.leader_for_pane(workspace.active_pane()), + Some(client_a.peer_id().unwrap()) + ); + workspace + .active_item(cx) + .expect("no active item") + .downcast::() + .expect("active item is not a channel view") + }); + channel_notes_1_b.update(cx_b, |notes, cx| { + assert_eq!(notes.channel(cx).unwrap().name, "channel-1"); + let editor = notes.editor.read(cx); + assert_eq!(editor.text(cx), "Hello from A."); + assert_eq!(editor.selections.ranges::(cx), &[3..4]); + }); + + // Client A opens the notes for channel 2. + let channel_notes_2_a = cx_a + .update(|cx| ChannelView::open(channel_2_id, None, workspace_a.clone(), cx)) + .await + .unwrap(); + channel_notes_2_a.update(cx_a, |notes, cx| { + assert_eq!(notes.channel(cx).unwrap().name, "channel-2"); + }); + + // Client B is taken to the notes for channel 2. + deterministic.run_until_parked(); + let channel_notes_2_b = workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.leader_for_pane(workspace.active_pane()), + Some(client_a.peer_id().unwrap()) + ); + workspace + .active_item(cx) + .expect("no active item") + .downcast::() + .expect("active item is not a channel view") + }); + channel_notes_2_b.update(cx_b, |notes, cx| { + assert_eq!(notes.channel(cx).unwrap().name, "channel-2"); + }); + + // Client A opens a local buffer in their unshared project. + let _unshared_editor_a1 = workspace_a + .update(cx_a, |workspace, cx| { + workspace.open_path((worktree_id, "1.txt"), None, true, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + // This does not send any leader update message to client B. + // If it did, an error would occur on client B, since this buffer + // is not shared with them. + deterministic.run_until_parked(); + workspace_b.update(cx_b, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).expect("no active item").item_id(), + channel_notes_2_b.entity_id() + ); + }); +} + +pub(crate) async fn join_channel( + channel_id: ChannelId, + client: &TestClient, + cx: &mut TestAppContext, +) -> anyhow::Result<()> { + cx.update(|cx| workspace::join_channel(channel_id, client.app_state.clone(), None, cx)) + .await +} + +async fn share_workspace( + workspace: &View, + cx: &mut VisualTestContext, +) -> anyhow::Result { + let project = workspace.update(cx, |workspace, _| workspace.project().clone()); + cx.read(ActiveCall::global) + .update(cx, |call, cx| call.share_project(project, cx)) + .await +} + +#[gpui::test] +async fn test_following_to_channel_notes_other_workspace( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let (_server, client_a, client_b, channel) = TestServer::start2(cx_a, cx_b).await; + + let mut cx_a2 = cx_a.clone(); + let (workspace_a, cx_a) = client_a.build_test_workspace(cx_a).await; + join_channel(channel, &client_a, cx_a).await.unwrap(); + share_workspace(&workspace_a, cx_a).await.unwrap(); + + // a opens 1.txt + cx_a.simulate_keystrokes("cmd-p 1 enter"); + cx_a.run_until_parked(); + workspace_a.update(cx_a, |workspace, cx| { + let editor = workspace.active_item(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); + + // b joins channel and is following a + join_channel(channel, &client_b, cx_b).await.unwrap(); + cx_b.run_until_parked(); + let (workspace_b, cx_b) = client_b.active_workspace(cx_b); + workspace_b.update(cx_b, |workspace, cx| { + let editor = workspace.active_item(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); + + // a opens a second workspace and the channel notes + let (workspace_a2, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await; + cx_a2.update(|cx| cx.activate_window()); + cx_a2 + .update(|cx| ChannelView::open(channel, None, workspace_a2, cx)) + .await + .unwrap(); + cx_a2.run_until_parked(); + + // b should follow a to the channel notes + workspace_b.update(cx_b, |workspace, cx| { + let editor = workspace.active_item_as::(cx).unwrap(); + assert_eq!(editor.read(cx).channel(cx).unwrap().id, channel); + }); + + // a returns to the shared project + cx_a.update(|cx| cx.activate_window()); + cx_a.run_until_parked(); + + workspace_a.update(cx_a, |workspace, cx| { + let editor = workspace.active_item(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); + + // b should follow a back + workspace_b.update(cx_b, |workspace, cx| { + let editor = workspace.active_item_as::(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); +} + +#[gpui::test] +async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let (_server, client_a, client_b, channel) = TestServer::start2(cx_a, cx_b).await; + + let mut cx_a2 = cx_a.clone(); + let (workspace_a, cx_a) = client_a.build_test_workspace(cx_a).await; + join_channel(channel, &client_a, cx_a).await.unwrap(); + share_workspace(&workspace_a, cx_a).await.unwrap(); + + // a opens 1.txt + cx_a.simulate_keystrokes("cmd-p 1 enter"); + cx_a.run_until_parked(); + workspace_a.update(cx_a, |workspace, cx| { + let editor = workspace.active_item(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); + + // b joins channel and is following a + join_channel(channel, &client_b, cx_b).await.unwrap(); + cx_b.run_until_parked(); + let (workspace_b, cx_b) = client_b.active_workspace(cx_b); + workspace_b.update(cx_b, |workspace, cx| { + let editor = workspace.active_item(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); + + // stop following + cx_b.simulate_keystrokes("down"); + + // a opens a different file while not followed + cx_a.simulate_keystrokes("cmd-p 2 enter"); + + workspace_b.update(cx_b, |workspace, cx| { + let editor = workspace.active_item_as::(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "1.txt"); + }); + + // a opens a file in a new window + let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await; + cx_a2.update(|cx| cx.activate_window()); + cx_a2.simulate_keystrokes("cmd-p 3 enter"); + cx_a2.run_until_parked(); + + // b starts following a again + cx_b.simulate_keystrokes("cmd-ctrl-alt-f"); + cx_a.run_until_parked(); + + // a returns to the shared project + cx_a.update(|cx| cx.activate_window()); + cx_a.run_until_parked(); + + workspace_a.update(cx_a, |workspace, cx| { + let editor = workspace.active_item(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "2.js"); + }); + + // b should follow a back + workspace_b.update(cx_b, |workspace, cx| { + let editor = workspace.active_item_as::(cx).unwrap(); + assert_eq!(editor.tab_description(0, cx).unwrap(), "2.js"); + }); +} diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs new file mode 100644 index 0000000..003c0c6 --- /dev/null +++ b/crates/collab/src/tests/integration_tests.rs @@ -0,0 +1,6438 @@ +use crate::{ + rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}, + tests::{ + channel_id, following_tests::join_channel, room_participants, rust_lang, RoomParticipants, + TestClient, TestServer, + }, +}; +use anyhow::{anyhow, Result}; +use call::{room, ActiveCall, ParticipantLocation, Room}; +use client::{User, RECEIVE_TIMEOUT}; +use collections::{HashMap, HashSet}; +use fs::{FakeFs, Fs as _, RemoveOptions}; +use futures::{channel::mpsc, StreamExt as _}; +use git::repository::GitFileStatus; +use gpui::{ + px, size, AppContext, BackgroundExecutor, Model, Modifiers, MouseButton, MouseDownEvent, + TestAppContext, UpdateGlobal, +}; +use language::{ + language_settings::{AllLanguageSettings, Formatter, PrettierSettings}, + tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig, + LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, +}; +use live_kit_client::MacOSDisplay; +use lsp::LanguageServerId; +use parking_lot::Mutex; +use project::{ + search::SearchQuery, DiagnosticSummary, FormatTrigger, HoverBlockKind, Project, ProjectPath, + SearchResult, +}; +use rand::prelude::*; +use serde_json::json; +use settings::SettingsStore; +use std::{ + cell::{Cell, RefCell}, + env, future, mem, + path::{Path, PathBuf}, + rc::Rc, + sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, + }, + time::Duration, +}; +use unindent::Unindent as _; +use workspace::Pane; + +#[ctor::ctor] +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} + +#[gpui::test(iterations = 10)] +async fn test_basic_calls( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_b2: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + + // Call user B from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: vec!["user_b".to_string()] + } + ); + + // User B receives the call. + + let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + let call_b = incoming_call_b.next().await.unwrap().unwrap(); + assert_eq!(call_b.calling_user.github_login, "user_a"); + + // User B connects via another client and also receives a ring on the newly-connected client. + let _client_b2 = server.create_client(cx_b2, "user_b").await; + let active_call_b2 = cx_b2.read(ActiveCall::global); + + let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming()); + executor.run_until_parked(); + let call_b2 = incoming_call_b2.next().await.unwrap().unwrap(); + assert_eq!(call_b2.calling_user.github_login, "user_a"); + + // User B joins the room using the first client. + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + assert!(incoming_call_b.next().await.unwrap().is_none()); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: Default::default() + } + ); + + // Call user C from client B. + + let mut incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming()); + active_call_b + .update(cx_b, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec!["user_c".to_string()] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec!["user_c".to_string()] + } + ); + + // User C receives the call, but declines it. + let call_c = incoming_call_c.next().await.unwrap().unwrap(); + assert_eq!(call_c.calling_user.github_login, "user_b"); + active_call_c.update(cx_c, |call, cx| call.decline_incoming(cx).unwrap()); + assert!(incoming_call_c.next().await.unwrap().is_none()); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: Default::default() + } + ); + + // Call user C again from user A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec!["user_c".to_string()] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec!["user_c".to_string()] + } + ); + + // User C accepts the call. + let call_c = incoming_call_c.next().await.unwrap().unwrap(); + assert_eq!(call_c.calling_user.github_login, "user_a"); + active_call_c + .update(cx_c, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + assert!(incoming_call_c.next().await.unwrap().is_none()); + + let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone()); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string(), "user_c".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_c".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_b".to_string()], + pending: Default::default() + } + ); + + // User A shares their screen + let display = MacOSDisplay::new(); + let events_b = active_call_events(cx_b); + let events_c = active_call_events(cx_c); + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_display_sources(vec![display.clone()]); + room.share_screen(cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // User B observes the remote screen sharing track. + assert_eq!(events_b.borrow().len(), 1); + let event_b = events_b.borrow().first().unwrap().clone(); + if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_b { + assert_eq!(participant_id, client_a.peer_id().unwrap()); + + room_b.read_with(cx_b, |room, _| { + assert_eq!( + room.remote_participants()[&client_a.user_id().unwrap()] + .video_tracks + .len(), + 1 + ); + }); + } else { + panic!("unexpected event") + } + + // User C observes the remote screen sharing track. + assert_eq!(events_c.borrow().len(), 1); + let event_c = events_c.borrow().first().unwrap().clone(); + if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_c { + assert_eq!(participant_id, client_a.peer_id().unwrap()); + + room_c.read_with(cx_c, |room, _| { + assert_eq!( + room.remote_participants()[&client_a.user_id().unwrap()] + .video_tracks + .len(), + 1 + ); + }); + } else { + panic!("unexpected event") + } + + // User A leaves the room. + active_call_a + .update(cx_a, |call, cx| { + let hang_up = call.hang_up(cx); + assert!(call.room().is_none()); + hang_up + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_c".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: Default::default() + } + ); + + // User B gets disconnected from the LiveKit server, which causes them + // to automatically leave the room. User C leaves the room as well because + // nobody else is in there. + server + .test_live_kit_server + .disconnect_client(client_b.user_id().unwrap().to_string()) + .await; + executor.run_until_parked(); + + active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none())); + + active_call_c.read_with(cx_c, |call, _| assert!(call.room().is_none())); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); +} + +#[gpui::test(iterations = 10)] +async fn test_calling_multiple_users_simultaneously( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, + cx_d: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + let client_d = server.create_client(cx_d, "user_d").await; + server + .make_contacts(&mut [ + (&client_a, cx_a), + (&client_b, cx_b), + (&client_c, cx_c), + (&client_d, cx_d), + ]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + let active_call_d = cx_d.read(ActiveCall::global); + + // Simultaneously call user B and user C from client A. + let b_invite = active_call_a.update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }); + let c_invite = active_call_a.update(cx_a, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }); + b_invite.await.unwrap(); + c_invite.await.unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: vec!["user_b".to_string(), "user_c".to_string()] + } + ); + + // Call client D from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_d.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: vec![ + "user_b".to_string(), + "user_c".to_string(), + "user_d".to_string() + ] + } + ); + + // Accept the call on all clients simultaneously. + let accept_b = active_call_b.update(cx_b, |call, cx| call.accept_incoming(cx)); + let accept_c = active_call_c.update(cx_c, |call, cx| call.accept_incoming(cx)); + let accept_d = active_call_d.update(cx_d, |call, cx| call.accept_incoming(cx)); + accept_b.await.unwrap(); + accept_c.await.unwrap(); + accept_d.await.unwrap(); + + executor.run_until_parked(); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + + let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone()); + + let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone()); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec![ + "user_b".to_string(), + "user_c".to_string(), + "user_d".to_string(), + ], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec![ + "user_a".to_string(), + "user_c".to_string(), + "user_d".to_string(), + ], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec![ + "user_a".to_string(), + "user_b".to_string(), + "user_d".to_string(), + ], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_d, cx_d), + RoomParticipants { + remote: vec![ + "user_a".to_string(), + "user_b".to_string(), + "user_c".to_string(), + ], + pending: Default::default() + } + ); +} + +#[gpui::test(iterations = 10)] +async fn test_joining_channels_and_calling_multiple_users_simultaneously( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let channel_1 = server + .make_channel( + "channel1", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let channel_2 = server + .make_channel( + "channel2", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + + // Simultaneously join channel 1 and then channel 2 + active_call_a + .update(cx_a, |call, cx| call.join_channel(channel_1, cx)) + .detach(); + let join_channel_2 = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, cx)); + + join_channel_2.await.unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + + assert_eq!(channel_id(&room_a, cx_a), Some(channel_2)); + + // Leave the room + active_call_a + .update(cx_a, |call, cx| { + let hang_up = call.hang_up(cx); + hang_up + }) + .await + .unwrap(); + + // Initiating invites and then joining a channel should fail gracefully + let b_invite = active_call_a.update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }); + let c_invite = active_call_a.update(cx_a, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }); + + let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx)); + + b_invite.await.unwrap(); + c_invite.await.unwrap(); + join_channel.await.unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: vec!["user_b".to_string(), "user_c".to_string()] + } + ); + + assert_eq!(channel_id(&room_a, cx_a), None); + + // Leave the room + active_call_a + .update(cx_a, |call, cx| { + let hang_up = call.hang_up(cx); + hang_up + }) + .await + .unwrap(); + + // Simultaneously join channel 1 and call user B and user C from client A. + let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx)); + + let b_invite = active_call_a.update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }); + let c_invite = active_call_a.update(cx_a, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }); + + join_channel.await.unwrap(); + b_invite.await.unwrap(); + c_invite.await.unwrap(); + + active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); +} + +#[gpui::test(iterations = 10)] +async fn test_room_uniqueness( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_a2: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_b2: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let _client_a2 = server.create_client(cx_a2, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let _client_b2 = server.create_client(cx_b2, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_a2 = cx_a2.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_b2 = cx_b2.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + + // Call user B from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + // Ensure a new room can't be created given user A just created one. + active_call_a2 + .update(cx_a2, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }) + .await + .unwrap_err(); + + active_call_a2.read_with(cx_a2, |call, _| assert!(call.room().is_none())); + + // User B receives the call from user A. + + let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + let call_b1 = incoming_call_b.next().await.unwrap().unwrap(); + assert_eq!(call_b1.calling_user.github_login, "user_a"); + + // Ensure calling users A and B from client C fails. + active_call_c + .update(cx_c, |call, cx| { + call.invite(client_a.user_id().unwrap(), None, cx) + }) + .await + .unwrap_err(); + active_call_c + .update(cx_c, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap_err(); + + // Ensure User B can't create a room while they still have an incoming call. + active_call_b2 + .update(cx_b2, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }) + .await + .unwrap_err(); + + active_call_b2.read_with(cx_b2, |call, _| assert!(call.room().is_none())); + + // User B joins the room and calling them after they've joined still fails. + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + active_call_c + .update(cx_c, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap_err(); + + // Ensure User B can't create a room while they belong to another room. + active_call_b2 + .update(cx_b2, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }) + .await + .unwrap_err(); + + active_call_b2.read_with(cx_b2, |call, _| assert!(call.room().is_none())); + + // Client C can successfully call client B after client B leaves the room. + active_call_b + .update(cx_b, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + active_call_c + .update(cx_c, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + let call_b2 = incoming_call_b.next().await.unwrap().unwrap(); + assert_eq!(call_b2.calling_user.github_login, "user_c"); +} + +#[gpui::test(iterations = 10)] +async fn test_client_disconnecting_from_room( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + // Call user B from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + + // User B receives the call and joins the room. + + let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + incoming_call_b.next().await.unwrap().unwrap(); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: Default::default() + } + ); + + // User A automatically reconnects to the room upon disconnection. + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: Default::default() + } + ); + + // When user A disconnects, both client A and B clear their room on the active call. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none())); + + active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none())); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); + + // Allow user A to reconnect to the server. + server.allow_connections(); + executor.advance_clock(RECEIVE_TIMEOUT); + + // Call user B again from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + + // User B receives the call and joins the room. + + let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + incoming_call_b.next().await.unwrap().unwrap(); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: Default::default() + } + ); + + // User B gets disconnected from the LiveKit server, which causes it + // to automatically leave the room. + server + .test_live_kit_server + .disconnect_client(client_b.user_id().unwrap().to_string()) + .await; + executor.run_until_parked(); + active_call_a.update(cx_a, |call, _| assert!(call.room().is_none())); + active_call_b.update(cx_b, |call, _| assert!(call.room().is_none())); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: Default::default(), + pending: Default::default() + } + ); +} + +#[gpui::test(iterations = 10)] +async fn test_server_restarts( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, + cx_d: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + client_a + .fs() + .insert_tree("/a", json!({ "a.txt": "a-contents" })) + .await; + + // Invite client B to collaborate on a project + let (project_a, _) = client_a.build_local_project("/a", cx_a).await; + + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + let client_d = server.create_client(cx_d, "user_d").await; + server + .make_contacts(&mut [ + (&client_a, cx_a), + (&client_b, cx_b), + (&client_c, cx_c), + (&client_d, cx_d), + ]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + let active_call_d = cx_d.read(ActiveCall::global); + + // User A calls users B, C, and D. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), Some(project_a.clone()), cx) + }) + .await + .unwrap(); + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_c.user_id().unwrap(), Some(project_a.clone()), cx) + }) + .await + .unwrap(); + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_d.user_id().unwrap(), Some(project_a.clone()), cx) + }) + .await + .unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + + // User B receives the call and joins the room. + + let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + assert!(incoming_call_b.next().await.unwrap().is_some()); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + + // User C receives the call and joins the room. + + let mut incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming()); + assert!(incoming_call_c.next().await.unwrap().is_some()); + active_call_c + .update(cx_c, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone()); + + // User D receives the call but doesn't join the room yet. + + let mut incoming_call_d = active_call_d.read_with(cx_d, |call, _| call.incoming()); + assert!(incoming_call_d.next().await.unwrap().is_some()); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string(), "user_c".to_string()], + pending: vec!["user_d".to_string()] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_c".to_string()], + pending: vec!["user_d".to_string()] + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_b".to_string()], + pending: vec!["user_d".to_string()] + } + ); + + // The server is torn down. + server.reset().await; + + // Users A and B reconnect to the call. User C has troubles reconnecting, so it leaves the room. + client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending())); + executor.advance_clock(RECONNECT_TIMEOUT); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string(), "user_c".to_string()], + pending: vec!["user_d".to_string()] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_c".to_string()], + pending: vec!["user_d".to_string()] + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + + // User D is notified again of the incoming call and accepts it. + assert!(incoming_call_d.next().await.unwrap().is_some()); + active_call_d + .update(cx_d, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone()); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec![ + "user_b".to_string(), + "user_c".to_string(), + "user_d".to_string(), + ], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec![ + "user_a".to_string(), + "user_c".to_string(), + "user_d".to_string(), + ], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_d, cx_d), + RoomParticipants { + remote: vec![ + "user_a".to_string(), + "user_b".to_string(), + "user_c".to_string(), + ], + pending: vec![] + } + ); + + // The server finishes restarting, cleaning up stale connections. + server.start().await.unwrap(); + executor.advance_clock(CLEANUP_TIMEOUT); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string(), "user_d".to_string()], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_d".to_string()], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_d, cx_d), + RoomParticipants { + remote: vec!["user_a".to_string(), "user_b".to_string()], + pending: vec![] + } + ); + + // User D hangs up. + active_call_d + .update(cx_d, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_c, cx_c), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_d, cx_d), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + + // User B calls user D again. + active_call_b + .update(cx_b, |call, cx| { + call.invite(client_d.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + // User D receives the call but doesn't join the room yet. + + let mut incoming_call_d = active_call_d.read_with(cx_d, |call, _| call.incoming()); + assert!(incoming_call_d.next().await.unwrap().is_some()); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec!["user_d".to_string()] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec!["user_d".to_string()] + } + ); + + // The server is torn down. + server.reset().await; + + // Users A and B have troubles reconnecting, so they leave the room. + client_a.override_establish_connection(|_, cx| cx.spawn(|_| future::pending())); + client_b.override_establish_connection(|_, cx| cx.spawn(|_| future::pending())); + client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending())); + executor.advance_clock(RECONNECT_TIMEOUT); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec![], + pending: vec![] + } + ); + + // User D is notified again of the incoming call but doesn't accept it. + assert!(incoming_call_d.next().await.unwrap().is_some()); + + // The server finishes restarting, cleaning up stale connections and canceling the + // call to user D because the room has become empty. + server.start().await.unwrap(); + executor.advance_clock(CLEANUP_TIMEOUT); + assert!(incoming_call_d.next().await.unwrap().is_none()); +} + +#[gpui::test(iterations = 10)] +async fn test_calls_on_multiple_connections( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b1: &mut TestAppContext, + cx_b2: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b1 = server.create_client(cx_b1, "user_b").await; + let client_b2 = server.create_client(cx_b2, "user_b").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b1, cx_b1)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b1 = cx_b1.read(ActiveCall::global); + let active_call_b2 = cx_b2.read(ActiveCall::global); + + let mut incoming_call_b1 = active_call_b1.read_with(cx_b1, |call, _| call.incoming()); + + let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming()); + assert!(incoming_call_b1.next().await.unwrap().is_none()); + assert!(incoming_call_b2.next().await.unwrap().is_none()); + + // Call user B from client A, ensuring both clients for user B ring. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b1.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_some()); + assert!(incoming_call_b2.next().await.unwrap().is_some()); + + // User B declines the call on one of the two connections, causing both connections + // to stop ringing. + active_call_b2.update(cx_b2, |call, cx| call.decline_incoming(cx).unwrap()); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_none()); + assert!(incoming_call_b2.next().await.unwrap().is_none()); + + // Call user B again from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b1.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_some()); + assert!(incoming_call_b2.next().await.unwrap().is_some()); + + // User B accepts the call on one of the two connections, causing both connections + // to stop ringing. + active_call_b2 + .update(cx_b2, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_none()); + assert!(incoming_call_b2.next().await.unwrap().is_none()); + + // User B disconnects the client that is not on the call. Everything should be fine. + client_b1.disconnect(&cx_b1.to_async()); + executor.advance_clock(RECEIVE_TIMEOUT); + client_b1 + .authenticate_and_connect(false, &cx_b1.to_async()) + .await + .unwrap(); + + // User B hangs up, and user A calls them again. + active_call_b2 + .update(cx_b2, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b1.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_some()); + assert!(incoming_call_b2.next().await.unwrap().is_some()); + + // User A cancels the call, causing both connections to stop ringing. + active_call_a + .update(cx_a, |call, cx| { + call.cancel_invite(client_b1.user_id().unwrap(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_none()); + assert!(incoming_call_b2.next().await.unwrap().is_none()); + + // User A calls user B again. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b1.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_some()); + assert!(incoming_call_b2.next().await.unwrap().is_some()); + + // User A hangs up, causing both connections to stop ringing. + active_call_a + .update(cx_a, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_none()); + assert!(incoming_call_b2.next().await.unwrap().is_none()); + + // User A calls user B again. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b1.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_some()); + assert!(incoming_call_b2.next().await.unwrap().is_some()); + + // User A disconnects, causing both connections to stop ringing. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + assert!(incoming_call_b1.next().await.unwrap().is_none()); + assert!(incoming_call_b2.next().await.unwrap().is_none()); + + // User A reconnects automatically, then calls user B again. + server.allow_connections(); + executor.advance_clock(RECEIVE_TIMEOUT); + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b1.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert!(incoming_call_b1.next().await.unwrap().is_some()); + assert!(incoming_call_b2.next().await.unwrap().is_some()); + + // User B disconnects all clients, causing user A to no longer see a pending call for them. + server.forbid_connections(); + server.disconnect_client(client_b1.peer_id().unwrap()); + server.disconnect_client(client_b2.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none())); +} + +#[gpui::test(iterations = 10)] +async fn test_unshare_project( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "a.txt": "a-contents", + "b.txt": "b-contents", + }), + ) + .await; + + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + executor.run_until_parked(); + + assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared())); + + project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + // When client B leaves the room, the project becomes read-only. + active_call_b + .update(cx_b, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected())); + + // Client C opens the project. + let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + + // When client A unshares the project, client C's project becomes read-only. + project_a + .update(cx_a, |project, cx| project.unshare(cx)) + .unwrap(); + executor.run_until_parked(); + + assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared())); + + assert!(project_c.read_with(cx_c, |project, _| project.is_disconnected())); + + // Client C can open the project again after client A re-shares. + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await; + executor.run_until_parked(); + + assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared())); + project_c2 + .update(cx_c, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + // When client A (the host) leaves the room, the project gets unshared and guests are notified. + active_call_a + .update(cx_a, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| assert!(!project.is_shared())); + + project_c2.read_with(cx_c, |project, _| { + assert!(project.is_disconnected()); + assert!(project.collaborators().is_empty()); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_project_reconnect( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + "/root-1", + json!({ + "dir1": { + "a.txt": "a", + "b.txt": "b", + "subdir1": { + "c.txt": "c", + "d.txt": "d", + "e.txt": "e", + } + }, + "dir2": { + "v.txt": "v", + }, + "dir3": { + "w.txt": "w", + "x.txt": "x", + "y.txt": "y", + }, + "dir4": { + "z.txt": "z", + }, + }), + ) + .await; + client_a + .fs() + .insert_tree( + "/root-2", + json!({ + "2.txt": "2", + }), + ) + .await; + client_a + .fs() + .insert_tree( + "/root-3", + json!({ + "3.txt": "3", + }), + ) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await; + let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await; + let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await; + let worktree_a1 = project_a1.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + let project1_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a1.clone(), cx)) + .await + .unwrap(); + let project2_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a2.clone(), cx)) + .await + .unwrap(); + let project3_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a3.clone(), cx)) + .await + .unwrap(); + + let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await; + let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await; + let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await; + executor.run_until_parked(); + + let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| { + assert!(worktree.as_local().unwrap().is_shared()); + worktree.id() + }); + let (worktree_a2, _) = project_a1 + .update(cx_a, |p, cx| { + p.find_or_create_local_worktree("/root-1/dir2", true, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + + let worktree2_id = worktree_a2.read_with(cx_a, |tree, _| { + assert!(tree.as_local().unwrap().is_shared()); + tree.id() + }); + executor.run_until_parked(); + + project_b1.read_with(cx_b, |project, cx| { + assert!(project.worktree_for_id(worktree2_id, cx).is_some()) + }); + + let buffer_a1 = project_a1 + .update(cx_a, |p, cx| p.open_buffer((worktree1_id, "a.txt"), cx)) + .await + .unwrap(); + let buffer_b1 = project_b1 + .update(cx_b, |p, cx| p.open_buffer((worktree1_id, "a.txt"), cx)) + .await + .unwrap(); + + // Drop client A's connection. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT); + + project_a1.read_with(cx_a, |project, _| { + assert!(project.is_shared()); + assert_eq!(project.collaborators().len(), 1); + }); + + project_b1.read_with(cx_b, |project, _| { + assert!(!project.is_disconnected()); + assert_eq!(project.collaborators().len(), 1); + }); + + worktree_a1.read_with(cx_a, |tree, _| { + assert!(tree.as_local().unwrap().is_shared()) + }); + + // While client A is disconnected, add and remove files from client A's project. + client_a + .fs() + .insert_tree( + "/root-1/dir1/subdir2", + json!({ + "f.txt": "f-contents", + "g.txt": "g-contents", + "h.txt": "h-contents", + "i.txt": "i-contents", + }), + ) + .await; + client_a + .fs() + .remove_dir( + "/root-1/dir1/subdir1".as_ref(), + RemoveOptions { + recursive: true, + ..Default::default() + }, + ) + .await + .unwrap(); + + // While client A is disconnected, add and remove worktrees from client A's project. + project_a1.update(cx_a, |project, cx| { + project.remove_worktree(worktree2_id, cx) + }); + let (worktree_a3, _) = project_a1 + .update(cx_a, |p, cx| { + p.find_or_create_local_worktree("/root-1/dir3", true, cx) + }) + .await + .unwrap(); + worktree_a3 + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + + let worktree3_id = worktree_a3.read_with(cx_a, |tree, _| { + assert!(!tree.as_local().unwrap().is_shared()); + tree.id() + }); + executor.run_until_parked(); + + // While client A is disconnected, close project 2 + cx_a.update(|_| drop(project_a2)); + + // While client A is disconnected, mutate a buffer on both the host and the guest. + buffer_a1.update(cx_a, |buf, cx| buf.edit([(0..0, "W")], None, cx)); + buffer_b1.update(cx_b, |buf, cx| buf.edit([(1..1, "Z")], None, cx)); + executor.run_until_parked(); + + // Client A reconnects. Their project is re-shared, and client B re-joins it. + server.allow_connections(); + client_a + .authenticate_and_connect(false, &cx_a.to_async()) + .await + .unwrap(); + executor.run_until_parked(); + + project_a1.read_with(cx_a, |project, cx| { + assert!(project.is_shared()); + assert!(worktree_a1.read(cx).as_local().unwrap().is_shared()); + assert_eq!( + worktree_a1 + .read(cx) + .snapshot() + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + vec![ + "a.txt", + "b.txt", + "subdir2", + "subdir2/f.txt", + "subdir2/g.txt", + "subdir2/h.txt", + "subdir2/i.txt" + ] + ); + assert!(worktree_a3.read(cx).as_local().unwrap().is_shared()); + assert_eq!( + worktree_a3 + .read(cx) + .snapshot() + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + vec!["w.txt", "x.txt", "y.txt"] + ); + }); + + project_b1.read_with(cx_b, |project, cx| { + assert!(!project.is_disconnected()); + assert_eq!( + project + .worktree_for_id(worktree1_id, cx) + .unwrap() + .read(cx) + .snapshot() + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + vec![ + "a.txt", + "b.txt", + "subdir2", + "subdir2/f.txt", + "subdir2/g.txt", + "subdir2/h.txt", + "subdir2/i.txt" + ] + ); + assert!(project.worktree_for_id(worktree2_id, cx).is_none()); + assert_eq!( + project + .worktree_for_id(worktree3_id, cx) + .unwrap() + .read(cx) + .snapshot() + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + vec!["w.txt", "x.txt", "y.txt"] + ); + }); + + project_b2.read_with(cx_b, |project, _| assert!(project.is_disconnected())); + + project_b3.read_with(cx_b, |project, _| assert!(!project.is_disconnected())); + + buffer_a1.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "WaZ")); + + buffer_b1.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "WaZ")); + + // Drop client B's connection. + server.forbid_connections(); + server.disconnect_client(client_b.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT); + + // While client B is disconnected, add and remove files from client A's project + client_a + .fs() + .insert_file("/root-1/dir1/subdir2/j.txt", "j-contents".into()) + .await; + client_a + .fs() + .remove_file("/root-1/dir1/subdir2/i.txt".as_ref(), Default::default()) + .await + .unwrap(); + + // While client B is disconnected, add and remove worktrees from client A's project. + let (worktree_a4, _) = project_a1 + .update(cx_a, |p, cx| { + p.find_or_create_local_worktree("/root-1/dir4", true, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + + let worktree4_id = worktree_a4.read_with(cx_a, |tree, _| { + assert!(tree.as_local().unwrap().is_shared()); + tree.id() + }); + project_a1.update(cx_a, |project, cx| { + project.remove_worktree(worktree3_id, cx) + }); + executor.run_until_parked(); + + // While client B is disconnected, mutate a buffer on both the host and the guest. + buffer_a1.update(cx_a, |buf, cx| buf.edit([(1..1, "X")], None, cx)); + buffer_b1.update(cx_b, |buf, cx| buf.edit([(2..2, "Y")], None, cx)); + executor.run_until_parked(); + + // While disconnected, close project 3 + cx_a.update(|_| drop(project_a3)); + + // Client B reconnects. They re-join the room and the remaining shared project. + server.allow_connections(); + client_b + .authenticate_and_connect(false, &cx_b.to_async()) + .await + .unwrap(); + executor.run_until_parked(); + + project_b1.read_with(cx_b, |project, cx| { + assert!(!project.is_disconnected()); + assert_eq!( + project + .worktree_for_id(worktree1_id, cx) + .unwrap() + .read(cx) + .snapshot() + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + vec![ + "a.txt", + "b.txt", + "subdir2", + "subdir2/f.txt", + "subdir2/g.txt", + "subdir2/h.txt", + "subdir2/j.txt" + ] + ); + assert!(project.worktree_for_id(worktree2_id, cx).is_none()); + assert_eq!( + project + .worktree_for_id(worktree4_id, cx) + .unwrap() + .read(cx) + .snapshot() + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + vec!["z.txt"] + ); + }); + + project_b3.read_with(cx_b, |project, _| assert!(project.is_disconnected())); + + buffer_a1.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "WXaYZ")); + + buffer_b1.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "WXaYZ")); +} + +#[gpui::test(iterations = 10)] +async fn test_active_call_events( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + client_a.fs().insert_tree("/a", json!({})).await; + client_b.fs().insert_tree("/b", json!({})).await; + + let (project_a, _) = client_a.build_local_project("/a", cx_a).await; + let (project_b, _) = client_b.build_local_project("/b", cx_b).await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + let events_a = active_call_events(cx_a); + let events_b = active_call_events(cx_b); + + let project_a_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]); + assert_eq!( + mem::take(&mut *events_b.borrow_mut()), + vec![room::Event::RemoteProjectShared { + owner: Arc::new(User { + id: client_a.user_id().unwrap(), + github_login: "user_a".to_string(), + avatar_uri: "avatar_a".into(), + }), + project_id: project_a_id, + worktree_root_names: vec!["a".to_string()], + }] + ); + + let project_b_id = active_call_b + .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + mem::take(&mut *events_a.borrow_mut()), + vec![room::Event::RemoteProjectShared { + owner: Arc::new(User { + id: client_b.user_id().unwrap(), + github_login: "user_b".to_string(), + avatar_uri: "avatar_b".into(), + }), + project_id: project_b_id, + worktree_root_names: vec!["b".to_string()] + }] + ); + assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]); + + // Sharing a project twice is idempotent. + let project_b_id_2 = active_call_b + .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx)) + .await + .unwrap(); + assert_eq!(project_b_id_2, project_b_id); + executor.run_until_parked(); + assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]); + assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]); + + // Unsharing a project should dispatch the RemoteProjectUnshared event. + active_call_a + .update(cx_a, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + assert_eq!( + mem::take(&mut *events_a.borrow_mut()), + vec![room::Event::RoomLeft { channel_id: None }] + ); + assert_eq!( + mem::take(&mut *events_b.borrow_mut()), + vec![room::Event::RemoteProjectUnshared { + project_id: project_a_id, + }] + ); +} + +fn active_call_events(cx: &mut TestAppContext) -> Rc>> { + let events = Rc::new(RefCell::new(Vec::new())); + let active_call = cx.read(ActiveCall::global); + cx.update({ + let events = events.clone(); + |cx| { + cx.subscribe(&active_call, move |_, event, _| { + events.borrow_mut().push(event.clone()) + }) + .detach() + } + }); + events +} + +#[gpui::test] +async fn test_mute_deafen( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + + // User A calls user B, B answers. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + + room_a.read_with(cx_a, |room, _| assert!(!room.is_muted())); + room_b.read_with(cx_b, |room, _| assert!(!room.is_muted())); + + // Users A and B are both muted. + assert_eq!( + participant_audio_state(&room_a, cx_a), + &[ParticipantAudioState { + user_id: client_b.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![true], + }] + ); + assert_eq!( + participant_audio_state(&room_b, cx_b), + &[ParticipantAudioState { + user_id: client_a.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![true], + }] + ); + + // User A mutes + room_a.update(cx_a, |room, cx| room.toggle_mute(cx)); + executor.run_until_parked(); + + // User A hears user B, but B doesn't hear A. + room_a.read_with(cx_a, |room, _| assert!(room.is_muted())); + room_b.read_with(cx_b, |room, _| assert!(!room.is_muted())); + assert_eq!( + participant_audio_state(&room_a, cx_a), + &[ParticipantAudioState { + user_id: client_b.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![true], + }] + ); + assert_eq!( + participant_audio_state(&room_b, cx_b), + &[ParticipantAudioState { + user_id: client_a.user_id().unwrap(), + is_muted: true, + audio_tracks_playing: vec![true], + }] + ); + + // User A deafens + room_a.update(cx_a, |room, cx| room.toggle_deafen(cx)); + executor.run_until_parked(); + + // User A does not hear user B. + room_a.read_with(cx_a, |room, _| assert!(room.is_muted())); + room_b.read_with(cx_b, |room, _| assert!(!room.is_muted())); + assert_eq!( + participant_audio_state(&room_a, cx_a), + &[ParticipantAudioState { + user_id: client_b.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![false], + }] + ); + assert_eq!( + participant_audio_state(&room_b, cx_b), + &[ParticipantAudioState { + user_id: client_a.user_id().unwrap(), + is_muted: true, + audio_tracks_playing: vec![true], + }] + ); + + // User B calls user C, C joins. + active_call_b + .update(cx_b, |call, cx| { + call.invite(client_c.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + active_call_c + .update(cx_c, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + // User A does not hear users B or C. + assert_eq!( + participant_audio_state(&room_a, cx_a), + &[ + ParticipantAudioState { + user_id: client_b.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![false], + }, + ParticipantAudioState { + user_id: client_c.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![false], + } + ] + ); + assert_eq!( + participant_audio_state(&room_b, cx_b), + &[ + ParticipantAudioState { + user_id: client_a.user_id().unwrap(), + is_muted: true, + audio_tracks_playing: vec![true], + }, + ParticipantAudioState { + user_id: client_c.user_id().unwrap(), + is_muted: false, + audio_tracks_playing: vec![true], + } + ] + ); + + #[derive(PartialEq, Eq, Debug)] + struct ParticipantAudioState { + user_id: u64, + is_muted: bool, + audio_tracks_playing: Vec, + } + + fn participant_audio_state( + room: &Model, + cx: &TestAppContext, + ) -> Vec { + room.read_with(cx, |room, _| { + room.remote_participants() + .iter() + .map(|(user_id, participant)| ParticipantAudioState { + user_id: *user_id, + is_muted: participant.muted, + audio_tracks_playing: participant + .audio_tracks + .values() + .map(|track| track.is_playing()) + .collect(), + }) + .collect::>() + }) + } +} + +#[gpui::test(iterations = 10)] +async fn test_room_location( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + client_a.fs().insert_tree("/a", json!({})).await; + client_b.fs().insert_tree("/b", json!({})).await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + let a_notified = Rc::new(Cell::new(false)); + cx_a.update({ + let notified = a_notified.clone(); + |cx| { + cx.observe(&active_call_a, move |_, _| notified.set(true)) + .detach() + } + }); + + let b_notified = Rc::new(Cell::new(false)); + cx_b.update({ + let b_notified = b_notified.clone(); + |cx| { + cx.observe(&active_call_b, move |_, _| b_notified.set(true)) + .detach() + } + }); + + let (project_a, _) = client_a.build_local_project("/a", cx_a).await; + active_call_a + .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) + .await + .unwrap(); + let (project_b, _) = client_b.build_local_project("/b", cx_b).await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + assert!(a_notified.take()); + assert_eq!( + participant_locations(&room_a, cx_a), + vec![("user_b".to_string(), ParticipantLocation::External)] + ); + assert!(b_notified.take()); + assert_eq!( + participant_locations(&room_b, cx_b), + vec![("user_a".to_string(), ParticipantLocation::UnsharedProject)] + ); + + let project_a_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert!(a_notified.take()); + assert_eq!( + participant_locations(&room_a, cx_a), + vec![("user_b".to_string(), ParticipantLocation::External)] + ); + assert!(b_notified.take()); + assert_eq!( + participant_locations(&room_b, cx_b), + vec![( + "user_a".to_string(), + ParticipantLocation::SharedProject { + project_id: project_a_id + } + )] + ); + + let project_b_id = active_call_b + .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert!(a_notified.take()); + assert_eq!( + participant_locations(&room_a, cx_a), + vec![("user_b".to_string(), ParticipantLocation::External)] + ); + assert!(b_notified.take()); + assert_eq!( + participant_locations(&room_b, cx_b), + vec![( + "user_a".to_string(), + ParticipantLocation::SharedProject { + project_id: project_a_id + } + )] + ); + + active_call_b + .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert!(a_notified.take()); + assert_eq!( + participant_locations(&room_a, cx_a), + vec![( + "user_b".to_string(), + ParticipantLocation::SharedProject { + project_id: project_b_id + } + )] + ); + assert!(b_notified.take()); + assert_eq!( + participant_locations(&room_b, cx_b), + vec![( + "user_a".to_string(), + ParticipantLocation::SharedProject { + project_id: project_a_id + } + )] + ); + + active_call_b + .update(cx_b, |call, cx| call.set_location(None, cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert!(a_notified.take()); + assert_eq!( + participant_locations(&room_a, cx_a), + vec![("user_b".to_string(), ParticipantLocation::External)] + ); + assert!(b_notified.take()); + assert_eq!( + participant_locations(&room_b, cx_b), + vec![( + "user_a".to_string(), + ParticipantLocation::SharedProject { + project_id: project_a_id + } + )] + ); + + fn participant_locations( + room: &Model, + cx: &TestAppContext, + ) -> Vec<(String, ParticipantLocation)> { + room.read_with(cx, |room, _| { + room.remote_participants() + .values() + .map(|participant| { + ( + participant.user.github_login.to_string(), + participant.location, + ) + }) + .collect() + }) + } +} + +#[gpui::test(iterations = 10)] +async fn test_propagate_saves_and_fs_changes( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + let rust = Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + let javascript = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["js".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + for client in [&client_a, &client_b, &client_c] { + client.language_registry().add(rust.clone()); + client.language_registry().add(javascript.clone()); + } + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "file1.rs": "", + "file2": "" + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + + let worktree_a = project_a.read_with(cx_a, |p, _| p.worktrees().next().unwrap()); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Join that worktree as clients B and C. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + + let worktree_b = project_b.read_with(cx_b, |p, _| p.worktrees().next().unwrap()); + + let worktree_c = project_c.read_with(cx_c, |p, _| p.worktrees().next().unwrap()); + + // Open and edit a buffer as both guests B and C. + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx)) + .await + .unwrap(); + let buffer_c = project_c + .update(cx_c, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx)) + .await + .unwrap(); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + }); + + buffer_c.read_with(cx_c, |buffer, _| { + assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + }); + buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "i-am-b, ")], None, cx)); + buffer_c.update(cx_c, |buf, cx| buf.edit([(0..0, "i-am-c, ")], None, cx)); + + // Open and edit that buffer as the host. + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx)) + .await + .unwrap(); + + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buf, _| assert_eq!(buf.text(), "i-am-c, i-am-b, ")); + buffer_a.update(cx_a, |buf, cx| { + buf.edit([(buf.len()..buf.len(), "i-am-a")], None, cx) + }); + + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buf, _| { + assert_eq!(buf.text(), "i-am-c, i-am-b, i-am-a"); + }); + + buffer_b.read_with(cx_b, |buf, _| { + assert_eq!(buf.text(), "i-am-c, i-am-b, i-am-a"); + }); + + buffer_c.read_with(cx_c, |buf, _| { + assert_eq!(buf.text(), "i-am-c, i-am-b, i-am-a"); + }); + + // Edit the buffer as the host and concurrently save as guest B. + let save_b = project_b.update(cx_b, |project, cx| { + project.save_buffer(buffer_b.clone(), cx) + }); + buffer_a.update(cx_a, |buf, cx| buf.edit([(0..0, "hi-a, ")], None, cx)); + save_b.await.unwrap(); + assert_eq!( + client_a.fs().load("/a/file1.rs".as_ref()).await.unwrap(), + "hi-a, i-am-c, i-am-b, i-am-a" + ); + + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buf, _| assert!(!buf.is_dirty())); + + buffer_b.read_with(cx_b, |buf, _| assert!(!buf.is_dirty())); + + buffer_c.read_with(cx_c, |buf, _| assert!(!buf.is_dirty())); + + // Make changes on host's file system, see those changes on guest worktrees. + client_a + .fs() + .rename( + "/a/file1.rs".as_ref(), + "/a/file1.js".as_ref(), + Default::default(), + ) + .await + .unwrap(); + client_a + .fs() + .rename("/a/file2".as_ref(), "/a/file3".as_ref(), Default::default()) + .await + .unwrap(); + client_a.fs().insert_file("/a/file4", "4".into()).await; + executor.run_until_parked(); + + worktree_a.read_with(cx_a, |tree, _| { + assert_eq!( + tree.paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["file1.js", "file3", "file4"] + ) + }); + + worktree_b.read_with(cx_b, |tree, _| { + assert_eq!( + tree.paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["file1.js", "file3", "file4"] + ) + }); + + worktree_c.read_with(cx_c, |tree, _| { + assert_eq!( + tree.paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["file1.js", "file3", "file4"] + ) + }); + + // Ensure buffer files are updated as well. + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); + assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + }); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); + assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + }); + + buffer_c.read_with(cx_c, |buffer, _| { + assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); + assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + }); + + let new_buffer_a = project_a + .update(cx_a, |p, cx| p.create_buffer(cx)) + .await + .unwrap(); + + let new_buffer_id = new_buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id()); + let new_buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer_by_id(new_buffer_id, cx)) + .await + .unwrap(); + + new_buffer_b.read_with(cx_b, |buffer, _| { + assert!(buffer.file().is_none()); + }); + + new_buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(0..0, "ok")], None, cx); + }); + project_a + .update(cx_a, |project, cx| { + let path = ProjectPath { + path: Arc::from(Path::new("file3.rs")), + worktree_id: worktree_a.read(cx).id(), + }; + + project.save_buffer_as(new_buffer_a.clone(), path, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + new_buffer_b.read_with(cx_b, |buffer_b, _| { + assert_eq!( + buffer_b.file().unwrap().path().as_ref(), + Path::new("file3.rs") + ); + + new_buffer_a.read_with(cx_a, |buffer_a, _| { + assert_eq!(buffer_b.saved_mtime(), buffer_a.saved_mtime()); + assert_eq!(buffer_b.saved_version(), buffer_a.saved_version()); + }); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_git_diff_base_change( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + ".git": {}, + "sub": { + ".git": {}, + "b.txt": " + one + two + three + ".unindent(), + }, + "a.txt": " + one + two + three + ".unindent(), + }), + ) + .await; + + let (project_local, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| { + call.share_project(project_local.clone(), cx) + }) + .await + .unwrap(); + + let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + + let diff_base = " + one + three + " + .unindent(); + + let new_diff_base = " + one + two + " + .unindent(); + + client_a.fs().set_index_for_repo( + Path::new("/dir/.git"), + &[(Path::new("a.txt"), diff_base.clone())], + ); + + // Create the buffer + let buffer_local_a = project_local + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + // Wait for it to catch up to the new diff + executor.run_until_parked(); + + // Smoke test diffing + + buffer_local_a.read_with(cx_a, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(diff_base.as_str()) + ); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(1..2, "", "two\n")], + ); + }); + + // Create remote buffer + let buffer_remote_a = project_remote + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + // Wait remote buffer to catch up to the new diff + executor.run_until_parked(); + + // Smoke test diffing + + buffer_remote_a.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(diff_base.as_str()) + ); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(1..2, "", "two\n")], + ); + }); + + client_a.fs().set_index_for_repo( + Path::new("/dir/.git"), + &[(Path::new("a.txt"), new_diff_base.clone())], + ); + + // Wait for buffer_local_a to receive it + executor.run_until_parked(); + + // Smoke test new diffing + + buffer_local_a.read_with(cx_a, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(new_diff_base.as_str()) + ); + + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(2..3, "", "three\n")], + ); + }); + + // Smoke test B + + buffer_remote_a.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(new_diff_base.as_str()) + ); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(2..3, "", "three\n")], + ); + }); + + //Nested git dir + + let diff_base = " + one + three + " + .unindent(); + + let new_diff_base = " + one + two + " + .unindent(); + + client_a.fs().set_index_for_repo( + Path::new("/dir/sub/.git"), + &[(Path::new("b.txt"), diff_base.clone())], + ); + + // Create the buffer + let buffer_local_b = project_local + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) + .await + .unwrap(); + + // Wait for it to catch up to the new diff + executor.run_until_parked(); + + // Smoke test diffing + + buffer_local_b.read_with(cx_a, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(diff_base.as_str()) + ); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(1..2, "", "two\n")], + ); + }); + + // Create remote buffer + let buffer_remote_b = project_remote + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) + .await + .unwrap(); + + // Wait remote buffer to catch up to the new diff + executor.run_until_parked(); + + // Smoke test diffing + + buffer_remote_b.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(diff_base.as_str()) + ); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(1..2, "", "two\n")], + ); + }); + + client_a.fs().set_index_for_repo( + Path::new("/dir/sub/.git"), + &[(Path::new("b.txt"), new_diff_base.clone())], + ); + + // Wait for buffer_local_b to receive it + executor.run_until_parked(); + + // Smoke test new diffing + + buffer_local_b.read_with(cx_a, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(new_diff_base.as_str()) + ); + println!("{:?}", buffer.as_rope().to_string()); + println!("{:?}", buffer.diff_base()); + println!( + "{:?}", + buffer + .snapshot() + .git_diff_hunks_in_row_range(0..4) + .collect::>() + ); + + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(2..3, "", "three\n")], + ); + }); + + // Smoke test B + + buffer_remote_b.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.diff_base().map(|rope| rope.to_string()).as_deref(), + Some(new_diff_base.as_str()) + ); + git::diff::assert_hunks( + buffer.snapshot().git_diff_hunks_in_row_range(0..4), + &buffer, + &diff_base, + &[(2..3, "", "three\n")], + ); + }); +} + +#[gpui::test] +async fn test_git_branch_name( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + ".git": {}, + }), + ) + .await; + + let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| { + call.share_project(project_local.clone(), cx) + }) + .await + .unwrap(); + + let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + client_a + .fs() + .set_branch_name(Path::new("/dir/.git"), Some("branch-1")); + + // Wait for it to catch up to the new branch + executor.run_until_parked(); + + #[track_caller] + fn assert_branch(branch_name: Option>, project: &Project, cx: &AppContext) { + let branch_name = branch_name.map(Into::into); + let worktrees = project.visible_worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + let worktree = worktrees[0].clone(); + let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap(); + assert_eq!(root_entry.branch(), branch_name.map(Into::into)); + } + + // Smoke test branch reading + + project_local.read_with(cx_a, |project, cx| { + assert_branch(Some("branch-1"), project, cx) + }); + + project_remote.read_with(cx_b, |project, cx| { + assert_branch(Some("branch-1"), project, cx) + }); + + client_a + .fs() + .set_branch_name(Path::new("/dir/.git"), Some("branch-2")); + + // Wait for buffer_local_a to receive it + executor.run_until_parked(); + + // Smoke test branch reading + + project_local.read_with(cx_a, |project, cx| { + assert_branch(Some("branch-2"), project, cx) + }); + + project_remote.read_with(cx_b, |project, cx| { + assert_branch(Some("branch-2"), project, cx) + }); + + let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + executor.run_until_parked(); + + project_remote_c.read_with(cx_c, |project, cx| { + assert_branch(Some("branch-2"), project, cx) + }); +} + +#[gpui::test] +async fn test_git_status_sync( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + ".git": {}, + "a.txt": "a", + "b.txt": "b", + }), + ) + .await; + + const A_TXT: &str = "a.txt"; + const B_TXT: &str = "b.txt"; + + client_a.fs().set_status_for_repo_via_git_operation( + Path::new("/dir/.git"), + &[ + (&Path::new(A_TXT), GitFileStatus::Added), + (&Path::new(B_TXT), GitFileStatus::Added), + ], + ); + + let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| { + call.share_project(project_local.clone(), cx) + }) + .await + .unwrap(); + + let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + + // Wait for it to catch up to the new status + executor.run_until_parked(); + + #[track_caller] + fn assert_status( + file: &impl AsRef, + status: Option, + project: &Project, + cx: &AppContext, + ) { + let file = file.as_ref(); + let worktrees = project.visible_worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + let worktree = worktrees[0].clone(); + let snapshot = worktree.read(cx).snapshot(); + assert_eq!(snapshot.status_for_file(file), status); + } + + // Smoke test status reading + + project_local.read_with(cx_a, |project, cx| { + assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx); + assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx); + }); + + project_remote.read_with(cx_b, |project, cx| { + assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx); + assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx); + }); + + client_a.fs().set_status_for_repo_via_working_copy_change( + Path::new("/dir/.git"), + &[ + (&Path::new(A_TXT), GitFileStatus::Modified), + (&Path::new(B_TXT), GitFileStatus::Modified), + ], + ); + + // Wait for buffer_local_a to receive it + executor.run_until_parked(); + + // Smoke test status reading + + project_local.read_with(cx_a, |project, cx| { + assert_status( + &Path::new(A_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + assert_status( + &Path::new(B_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + }); + + project_remote.read_with(cx_b, |project, cx| { + assert_status( + &Path::new(A_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + assert_status( + &Path::new(B_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + }); + + // And synchronization while joining + let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + executor.run_until_parked(); + + project_remote_c.read_with(cx_c, |project, cx| { + assert_status( + &Path::new(A_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + assert_status( + &Path::new(B_TXT), + Some(GitFileStatus::Modified), + project, + cx, + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_fs_operations( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + "a.txt": "a-contents", + "b.txt": "b-contents", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + + let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap()); + + let entry = project_b + .update(cx_b, |project, cx| { + project.create_entry((worktree_id, "c.txt"), false, cx) + }) + .await + .unwrap() + .unwrap(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "c.txt"] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "c.txt"] + ); + }); + + project_b + .update(cx_b, |project, cx| { + project.rename_entry(entry.id, Path::new("d.txt"), cx) + }) + .await + .unwrap() + .unwrap(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "d.txt"] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "d.txt"] + ); + }); + + let dir_entry = project_b + .update(cx_b, |project, cx| { + project.create_entry((worktree_id, "DIR"), true, cx) + }) + .await + .unwrap() + .unwrap(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["DIR", "a.txt", "b.txt", "d.txt"] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["DIR", "a.txt", "b.txt", "d.txt"] + ); + }); + + project_b + .update(cx_b, |project, cx| { + project.create_entry((worktree_id, "DIR/e.txt"), false, cx) + }) + .await + .unwrap() + .unwrap(); + project_b + .update(cx_b, |project, cx| { + project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx) + }) + .await + .unwrap() + .unwrap(); + project_b + .update(cx_b, |project, cx| { + project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx) + }) + .await + .unwrap() + .unwrap(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + [ + "DIR", + "DIR/SUBDIR", + "DIR/SUBDIR/f.txt", + "DIR/e.txt", + "a.txt", + "b.txt", + "d.txt" + ] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + [ + "DIR", + "DIR/SUBDIR", + "DIR/SUBDIR/f.txt", + "DIR/e.txt", + "a.txt", + "b.txt", + "d.txt" + ] + ); + }); + + project_b + .update(cx_b, |project, cx| { + project.copy_entry(entry.id, Path::new("f.txt"), cx) + }) + .await + .unwrap() + .unwrap(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + [ + "DIR", + "DIR/SUBDIR", + "DIR/SUBDIR/f.txt", + "DIR/e.txt", + "a.txt", + "b.txt", + "d.txt", + "f.txt" + ] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + [ + "DIR", + "DIR/SUBDIR", + "DIR/SUBDIR/f.txt", + "DIR/e.txt", + "a.txt", + "b.txt", + "d.txt", + "f.txt" + ] + ); + }); + + project_b + .update(cx_b, |project, cx| { + project.delete_entry(dir_entry.id, false, cx).unwrap() + }) + .await + .unwrap(); + executor.run_until_parked(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "d.txt", "f.txt"] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "d.txt", "f.txt"] + ); + }); + + project_b + .update(cx_b, |project, cx| { + project.delete_entry(entry.id, false, cx).unwrap() + }) + .await + .unwrap(); + + worktree_a.read_with(cx_a, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "f.txt"] + ); + }); + + worktree_b.read_with(cx_b, |worktree, _| { + assert_eq!( + worktree + .paths() + .map(|p| p.to_string_lossy()) + .collect::>(), + ["a.txt", "b.txt", "f.txt"] + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_local_settings( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + // As client A, open a project that contains some local settings files + client_a + .fs() + .insert_tree( + "/dir", + json!({ + ".zed": { + "settings.json": r#"{ "tab_size": 2 }"# + }, + "a": { + ".zed": { + "settings.json": r#"{ "tab_size": 8 }"# + }, + "a.txt": "a-contents", + }, + "b": { + "b.txt": "b-contents", + } + }), + ) + .await; + let (project_a, _) = client_a.build_local_project("/dir", cx_a).await; + executor.run_until_parked(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + executor.run_until_parked(); + + // As client B, join that project and observe the local settings. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap()); + executor.run_until_parked(); + cx_b.read(|cx| { + let store = cx.global::(); + assert_eq!( + store + .local_settings(worktree_b.read(cx).id().to_usize()) + .collect::>(), + &[ + (Path::new("").into(), r#"{"tab_size":2}"#.to_string()), + (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ] + ) + }); + + // As client A, update a settings file. As Client B, see the changed settings. + client_a + .fs() + .insert_file("/dir/.zed/settings.json", r#"{}"#.into()) + .await; + executor.run_until_parked(); + cx_b.read(|cx| { + let store = cx.global::(); + assert_eq!( + store + .local_settings(worktree_b.read(cx).id().to_usize()) + .collect::>(), + &[ + (Path::new("").into(), r#"{}"#.to_string()), + (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ] + ) + }); + + // As client A, create and remove some settings files. As client B, see the changed settings. + client_a + .fs() + .remove_file("/dir/.zed/settings.json".as_ref(), Default::default()) + .await + .unwrap(); + client_a + .fs() + .create_dir("/dir/b/.zed".as_ref()) + .await + .unwrap(); + client_a + .fs() + .insert_file("/dir/b/.zed/settings.json", r#"{"tab_size": 4}"#.into()) + .await; + executor.run_until_parked(); + cx_b.read(|cx| { + let store = cx.global::(); + assert_eq!( + store + .local_settings(worktree_b.read(cx).id().to_usize()) + .collect::>(), + &[ + (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + (Path::new("b").into(), r#"{"tab_size":4}"#.to_string()), + ] + ) + }); + + // As client B, disconnect. + server.forbid_connections(); + server.disconnect_client(client_b.peer_id().unwrap()); + + // As client A, change and remove settings files while client B is disconnected. + client_a + .fs() + .insert_file("/dir/a/.zed/settings.json", r#"{"hard_tabs":true}"#.into()) + .await; + client_a + .fs() + .remove_file("/dir/b/.zed/settings.json".as_ref(), Default::default()) + .await + .unwrap(); + executor.run_until_parked(); + + // As client B, reconnect and see the changed settings. + server.allow_connections(); + executor.advance_clock(RECEIVE_TIMEOUT); + cx_b.read(|cx| { + let store = cx.global::(); + assert_eq!( + store + .local_settings(worktree_b.read(cx).id().to_usize()) + .collect::>(), + &[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),] + ) + }); +} + +#[gpui::test(iterations = 10)] +async fn test_buffer_conflict_after_save( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + "a.txt": "a-contents", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a buffer as client B + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "world ")], None, cx)); + + buffer_b.read_with(cx_b, |buf, _| { + assert!(buf.is_dirty()); + assert!(!buf.has_conflict()); + }); + + project_b + .update(cx_b, |project, cx| { + project.save_buffer(buffer_b.clone(), cx) + }) + .await + .unwrap(); + + buffer_b.read_with(cx_b, |buffer_b, _| assert!(!buffer_b.is_dirty())); + + buffer_b.read_with(cx_b, |buf, _| { + assert!(!buf.has_conflict()); + }); + + buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "hello ")], None, cx)); + + buffer_b.read_with(cx_b, |buf, _| { + assert!(buf.is_dirty()); + assert!(!buf.has_conflict()); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_buffer_reloading( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + "a.txt": "a\nb\nc", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a buffer as client B + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + buffer_b.read_with(cx_b, |buf, _| { + assert!(!buf.is_dirty()); + assert!(!buf.has_conflict()); + assert_eq!(buf.line_ending(), LineEnding::Unix); + }); + + let new_contents = Rope::from("d\ne\nf"); + client_a + .fs() + .save("/dir/a.txt".as_ref(), &new_contents, LineEnding::Windows) + .await + .unwrap(); + + executor.run_until_parked(); + + buffer_b.read_with(cx_b, |buf, _| { + assert_eq!(buf.text(), new_contents.to_string()); + assert!(!buf.is_dirty()); + assert!(!buf.has_conflict()); + assert_eq!(buf.line_ending(), LineEnding::Windows); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_editing_while_guest_opens_buffer( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/dir", json!({ "a.txt": "a-contents" })) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open a buffer as client A + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + // Start opening the same buffer as client B + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer); + + // Edit the buffer as client A while client B is still opening it. + cx_b.executor().simulate_random_delay().await; + buffer_a.update(cx_a, |buf, cx| buf.edit([(0..0, "X")], None, cx)); + cx_b.executor().simulate_random_delay().await; + buffer_a.update(cx_a, |buf, cx| buf.edit([(1..1, "Y")], None, cx)); + + let text = buffer_a.read_with(cx_a, |buf, _| buf.text()); + let buffer_b = buffer_b.await.unwrap(); + executor.run_until_parked(); + + buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), text)); +} + +#[gpui::test(iterations = 10)] +async fn test_leaving_worktree_while_opening_buffer( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/dir", json!({ "a.txt": "a-contents" })) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // See that a guest has joined as client A. + executor.run_until_parked(); + + project_a.read_with(cx_a, |p, _| assert_eq!(p.collaborators().len(), 1)); + + // Begin opening a buffer as client B, but leave the project before the open completes. + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer); + cx_b.update(|_| drop(project_b)); + drop(buffer_b); + + // See that the guest has left. + executor.run_until_parked(); + + project_a.read_with(cx_a, |p, _| assert!(p.collaborators().is_empty())); +} + +#[gpui::test(iterations = 10)] +async fn test_canceling_buffer_opening( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/dir", + json!({ + "a.txt": "abc", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .await + .unwrap(); + + // Open a buffer as client B but cancel after a random amount of time. + let buffer_b = project_b.update(cx_b, |p, cx| { + p.open_buffer_by_id(buffer_a.read_with(cx_a, |a, _| a.remote_id()), cx) + }); + executor.simulate_random_delay().await; + drop(buffer_b); + + // Try opening the same buffer again as client B, and ensure we can + // still do it despite the cancellation above. + let buffer_b = project_b + .update(cx_b, |p, cx| { + p.open_buffer_by_id(buffer_a.read_with(cx_a, |a, _| a.remote_id()), cx) + }) + .await + .unwrap(); + + buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), "abc")); +} + +#[gpui::test(iterations = 10)] +async fn test_leaving_project( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/a", + json!({ + "a.txt": "a-contents", + "b.txt": "b-contents", + }), + ) + .await; + let (project_a, _) = client_a.build_local_project("/a", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await; + let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + + // Client A sees that a guest has joined. + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + project_b1.read_with(cx_b, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + project_c.read_with(cx_c, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + // Client B opens a buffer. + let buffer_b1 = project_b1 + .update(cx_b, |project, cx| { + let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + project.open_buffer((worktree_id, "a.txt"), cx) + }) + .await + .unwrap(); + + buffer_b1.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "a-contents")); + + // Drop client B's project and ensure client A and client C observe client B leaving. + cx_b.update(|_| drop(project_b1)); + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + assert_eq!(project.collaborators().len(), 1); + }); + + project_c.read_with(cx_c, |project, _| { + assert_eq!(project.collaborators().len(), 1); + }); + + // Client B re-joins the project and can open buffers as before. + let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await; + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + project_b2.read_with(cx_b, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + project_c.read_with(cx_c, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + let buffer_b2 = project_b2 + .update(cx_b, |project, cx| { + let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + project.open_buffer((worktree_id, "a.txt"), cx) + }) + .await + .unwrap(); + + buffer_b2.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "a-contents")); + + project_a.read_with(cx_a, |project, _| { + assert_eq!(project.collaborators().len(), 2); + }); + + // Drop client B's connection and ensure client A and client C observe client B leaving. + client_b.disconnect(&cx_b.to_async()); + executor.advance_clock(RECONNECT_TIMEOUT); + + project_a.read_with(cx_a, |project, _| { + assert_eq!(project.collaborators().len(), 1); + }); + + project_b2.read_with(cx_b, |project, _| { + assert!(project.is_disconnected()); + }); + + project_c.read_with(cx_c, |project, _| { + assert_eq!(project.collaborators().len(), 1); + }); + + // Client B can't join the project, unless they re-join the room. + cx_b.spawn(|cx| { + Project::in_room( + project_id, + client_b.app_state.client.clone(), + client_b.user_store().clone(), + client_b.language_registry().clone(), + FakeFs::new(cx.background_executor().clone()), + cx, + ) + }) + .await + .unwrap_err(); + + // Simulate connection loss for client C and ensure client A observes client C leaving the project. + client_c.wait_for_current_user(cx_c).await; + server.forbid_connections(); + server.disconnect_client(client_c.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + assert_eq!(project.collaborators().len(), 0); + }); + + project_b2.read_with(cx_b, |project, _| { + assert!(project.is_disconnected()); + }); + + project_c.read_with(cx_c, |project, _| { + assert!(project.is_disconnected()); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_collaborating_with_diagnostics( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", Default::default()); + + // Share a project as client A + client_a + .fs() + .insert_tree( + "/a", + json!({ + "a.rs": "let one = two", + "other.rs": "", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + + // Cause the language server to start. + let _buffer = project_a + .update(cx_a, |project, cx| { + project.open_buffer( + ProjectPath { + worktree_id, + path: Path::new("other.rs").into(), + }, + cx, + ) + }) + .await + .unwrap(); + + // Simulate a language server reporting errors for a file. + let mut fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server + .receive_notification::() + .await; + fake_language_server.notify::( + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), + version: None, + diagnostics: vec![lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::WARNING), + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)), + message: "message 0".to_string(), + ..Default::default() + }], + }, + ); + + // Client A shares the project and, simultaneously, the language server + // publishes a diagnostic. This is done to ensure that the server always + // observes the latest diagnostics for a worktree. + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + fake_language_server.notify::( + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), + version: None, + diagnostics: vec![lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::ERROR), + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)), + message: "message 1".to_string(), + ..Default::default() + }], + }, + ); + + // Join the worktree as client B. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Wait for server to see the diagnostics update. + executor.run_until_parked(); + + // Ensure client B observes the new diagnostics. + + project_b.read_with(cx_b, |project, cx| { + assert_eq!( + project.diagnostic_summaries(false, cx).collect::>(), + &[( + ProjectPath { + worktree_id, + path: Arc::from(Path::new("a.rs")), + }, + LanguageServerId(0), + DiagnosticSummary { + error_count: 1, + warning_count: 0, + }, + )] + ) + }); + + // Join project as client C and observe the diagnostics. + let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + executor.run_until_parked(); + let project_c_diagnostic_summaries = + Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| { + project.diagnostic_summaries(false, cx).collect::>() + }))); + project_c.update(cx_c, |_, cx| { + let summaries = project_c_diagnostic_summaries.clone(); + cx.subscribe(&project_c, { + move |p, _, event, cx| { + if let project::Event::DiskBasedDiagnosticsFinished { .. } = event { + *summaries.borrow_mut() = p.diagnostic_summaries(false, cx).collect(); + } + } + }) + .detach(); + }); + + executor.run_until_parked(); + assert_eq!( + project_c_diagnostic_summaries.borrow().as_slice(), + &[( + ProjectPath { + worktree_id, + path: Arc::from(Path::new("a.rs")), + }, + LanguageServerId(0), + DiagnosticSummary { + error_count: 1, + warning_count: 0, + }, + )] + ); + + // Simulate a language server reporting more errors for a file. + fake_language_server.notify::( + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), + version: None, + diagnostics: vec![ + lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::ERROR), + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)), + message: "message 1".to_string(), + ..Default::default() + }, + lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::WARNING), + range: lsp::Range::new(lsp::Position::new(0, 10), lsp::Position::new(0, 13)), + message: "message 2".to_string(), + ..Default::default() + }, + ], + }, + ); + + // Clients B and C get the updated summaries + executor.run_until_parked(); + + project_b.read_with(cx_b, |project, cx| { + assert_eq!( + project.diagnostic_summaries(false, cx).collect::>(), + [( + ProjectPath { + worktree_id, + path: Arc::from(Path::new("a.rs")), + }, + LanguageServerId(0), + DiagnosticSummary { + error_count: 1, + warning_count: 1, + }, + )] + ); + }); + + project_c.read_with(cx_c, |project, cx| { + assert_eq!( + project.diagnostic_summaries(false, cx).collect::>(), + [( + ProjectPath { + worktree_id, + path: Arc::from(Path::new("a.rs")), + }, + LanguageServerId(0), + DiagnosticSummary { + error_count: 1, + warning_count: 1, + }, + )] + ); + }); + + // Open the file with the errors on client B. They should be present. + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer + .snapshot() + .diagnostics_in_range::<_, Point>(0..buffer.len(), false) + .collect::>(), + &[ + DiagnosticEntry { + range: Point::new(0, 4)..Point::new(0, 7), + diagnostic: Diagnostic { + group_id: 2, + message: "message 1".to_string(), + severity: lsp::DiagnosticSeverity::ERROR, + is_primary: true, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(0, 10)..Point::new(0, 13), + diagnostic: Diagnostic { + group_id: 3, + severity: lsp::DiagnosticSeverity::WARNING, + message: "message 2".to_string(), + is_primary: true, + ..Default::default() + } + } + ] + ); + }); + + // Simulate a language server reporting no errors for a file. + fake_language_server.notify::( + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), + version: None, + diagnostics: vec![], + }, + ); + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, cx| { + assert_eq!( + project.diagnostic_summaries(false, cx).collect::>(), + [] + ) + }); + + project_b.read_with(cx_b, |project, cx| { + assert_eq!( + project.diagnostic_summaries(false, cx).collect::>(), + [] + ) + }); + + project_c.read_with(cx_c, |project, cx| { + assert_eq!( + project.diagnostic_summaries(false, cx).collect::>(), + [] + ) + }); +} + +#[gpui::test(iterations = 10)] +async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + disk_based_diagnostics_progress_token: Some("the-disk-based-token".into()), + disk_based_diagnostics_sources: vec!["the-disk-based-diagnostics-source".into()], + ..Default::default() + }, + ); + + let file_names = &["one.rs", "two.rs", "three.rs", "four.rs", "five.rs"]; + client_a + .fs() + .insert_tree( + "/test", + json!({ + "one.rs": "const ONE: usize = 1;", + "two.rs": "const TWO: usize = 2;", + "three.rs": "const THREE: usize = 3;", + "four.rs": "const FOUR: usize = 3;", + "five.rs": "const FIVE: usize = 3;", + }), + ) + .await; + + let (project_a, worktree_id) = client_a.build_local_project("/test", cx_a).await; + + // Share a project as client A + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // Join the project as client B and open all three files. + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| { + project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx)) + })) + .await + .unwrap(); + + // Simulate a language server reporting errors for a file. + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server + .request::(lsp::WorkDoneProgressCreateParams { + token: lsp::NumberOrString::String("the-disk-based-token".to_string()), + }) + .await + .unwrap(); + fake_language_server.notify::(lsp::ProgressParams { + token: lsp::NumberOrString::String("the-disk-based-token".to_string()), + value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin( + lsp::WorkDoneProgressBegin { + title: "Progress Began".into(), + ..Default::default() + }, + )), + }); + for file_name in file_names { + fake_language_server.notify::( + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path(Path::new("/test").join(file_name)).unwrap(), + version: None, + diagnostics: vec![lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::WARNING), + source: Some("the-disk-based-diagnostics-source".into()), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), + message: "message one".to_string(), + ..Default::default() + }], + }, + ); + } + fake_language_server.notify::(lsp::ProgressParams { + token: lsp::NumberOrString::String("the-disk-based-token".to_string()), + value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End( + lsp::WorkDoneProgressEnd { message: None }, + )), + }); + + // When the "disk base diagnostics finished" message is received, the buffers' + // diagnostics are expected to be present. + let disk_based_diagnostics_finished = Arc::new(AtomicBool::new(false)); + project_b.update(cx_b, { + let project_b = project_b.clone(); + let disk_based_diagnostics_finished = disk_based_diagnostics_finished.clone(); + move |_, cx| { + cx.subscribe(&project_b, move |_, _, event, cx| { + if let project::Event::DiskBasedDiagnosticsFinished { .. } = event { + disk_based_diagnostics_finished.store(true, SeqCst); + for buffer in &guest_buffers { + assert_eq!( + buffer + .read(cx) + .snapshot() + .diagnostics_in_range::<_, usize>(0..5, false) + .count(), + 1, + "expected a diagnostic for buffer {:?}", + buffer.read(cx).file().unwrap().path(), + ); + } + } + }) + .detach(); + } + }); + + executor.run_until_parked(); + assert!(disk_based_diagnostics_finished.load(SeqCst)); +} + +#[gpui::test(iterations = 10)] +async fn test_reloading_buffer_manually( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/a", json!({ "a.rs": "let one = 1;" })) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) + .await + .unwrap(); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + buffer_b.update(cx_b, |buffer, cx| { + buffer.edit([(4..7, "six")], None, cx); + buffer.edit([(10..11, "6")], None, cx); + assert_eq!(buffer.text(), "let six = 6;"); + assert!(buffer.is_dirty()); + assert!(!buffer.has_conflict()); + }); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "let six = 6;")); + + client_a + .fs() + .save( + "/a/a.rs".as_ref(), + &Rope::from("let seven = 7;"), + LineEnding::Unix, + ) + .await + .unwrap(); + executor.run_until_parked(); + + buffer_a.read_with(cx_a, |buffer, _| assert!(buffer.has_conflict())); + + buffer_b.read_with(cx_b, |buffer, _| assert!(buffer.has_conflict())); + + project_b + .update(cx_b, |project, cx| { + project.reload_buffers(HashSet::from_iter([buffer_b.clone()]), true, cx) + }) + .await + .unwrap(); + + buffer_a.read_with(cx_a, |buffer, _| { + assert_eq!(buffer.text(), "let seven = 7;"); + assert!(!buffer.is_dirty()); + assert!(!buffer.has_conflict()); + }); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!(buffer.text(), "let seven = 7;"); + assert!(!buffer.is_dirty()); + assert!(!buffer.has_conflict()); + }); + + buffer_a.update(cx_a, |buffer, cx| { + // Undoing on the host is a no-op when the reload was initiated by the guest. + buffer.undo(cx); + assert_eq!(buffer.text(), "let seven = 7;"); + assert!(!buffer.is_dirty()); + assert!(!buffer.has_conflict()); + }); + buffer_b.update(cx_b, |buffer, cx| { + // Undoing on the guest rolls back the buffer to before it was reloaded but the conflict gets cleared. + buffer.undo(cx); + assert_eq!(buffer.text(), "let six = 6;"); + assert!(buffer.is_dirty()); + assert!(!buffer.has_conflict()); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_formatting_buffer( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + executor.allow_parking(); + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", FakeLspAdapter::default()); + + // Here we insert a fake tree with a directory that exists on disk. This is needed + // because later we'll invoke a command, which requires passing a working directory + // that points to a valid location on disk. + let directory = env::current_dir().unwrap(); + client_a + .fs() + .insert_tree(&directory, json!({ "a.rs": "let one = \"two\"" })) + .await; + let (project_a, worktree_id) = client_a.build_local_project(&directory, cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + Ok(Some(vec![ + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 4)), + new_text: "h".to_string(), + }, + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 7), lsp::Position::new(0, 7)), + new_text: "y".to_string(), + }, + ])) + }); + + project_b + .update(cx_b, |project, cx| { + project.format( + HashSet::from_iter([buffer_b.clone()]), + true, + FormatTrigger::Save, + cx, + ) + }) + .await + .unwrap(); + + // The edits from the LSP are applied, and a final newline is added. + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + "let honey = \"two\"\n" + ); + + // Ensure buffer can be formatted using an external command. Notice how the + // host's configuration is honored as opposed to using the guest's settings. + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(Formatter::External { + command: "awk".into(), + arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(), + }); + }); + }); + }); + project_b + .update(cx_b, |project, cx| { + project.format( + HashSet::from_iter([buffer_b.clone()]), + true, + FormatTrigger::Save, + cx, + ) + }) + .await + .unwrap(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + format!("let honey = \"{}/a.rs\"\n", directory.to_str().unwrap()) + ); +} + +#[gpui::test(iterations = 10)] +async fn test_prettier_formatting_buffer( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + let test_plugin = "test_plugin"; + + client_a.language_registry().add(Arc::new(Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "TypeScript", + FakeLspAdapter { + prettier_plugins: vec![test_plugin], + ..Default::default() + }, + ); + + // Here we insert a fake tree with a directory that exists on disk. This is needed + // because later we'll invoke a command, which requires passing a working directory + // that points to a valid location on disk. + let directory = env::current_dir().unwrap(); + let buffer_text = "let one = \"two\""; + client_a + .fs() + .insert_tree(&directory, json!({ "a.ts": buffer_text })) + .await; + let (project_a, worktree_id) = client_a.build_local_project(&directory, cx_a).await; + let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; + let open_buffer = project_a.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)); + let buffer_a = cx_a.executor().spawn(open_buffer).await.unwrap(); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(Formatter::Auto); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + cx_b.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(Formatter::LanguageServer); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + panic!( + "Unexpected: prettier should be preferred since it's enabled and language supports it" + ) + }); + + project_b + .update(cx_b, |project, cx| { + project.format( + HashSet::from_iter([buffer_b.clone()]), + true, + FormatTrigger::Save, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + buffer_text.to_string() + "\n" + prettier_format_suffix, + "Prettier formatting was not applied to client buffer after client's request" + ); + + project_a + .update(cx_a, |project, cx| { + project.format( + HashSet::from_iter([buffer_a.clone()]), + true, + FormatTrigger::Manual, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix, + "Prettier formatting was not applied to client buffer after host's request" + ); +} + +#[gpui::test(iterations = 10)] +async fn test_definition( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", Default::default()); + client_a.language_registry().add(rust_lang()); + + client_a + .fs() + .insert_tree( + "/root", + json!({ + "dir-1": { + "a.rs": "const ONE: usize = b::TWO + b::THREE;", + }, + "dir-2": { + "b.rs": "const TWO: c::T2 = 2;\nconst THREE: usize = 3;", + "c.rs": "type T2 = usize;", + } + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open the file on client B. + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + + // Request the definition of a symbol as the guest. + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Scalar( + lsp::Location::new( + lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(), + lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), + ), + ))) + }); + + let definitions_1 = project_b + .update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx)) + .await + .unwrap(); + cx_b.read(|cx| { + assert_eq!(definitions_1.len(), 1); + assert_eq!(project_b.read(cx).worktrees().count(), 2); + let target_buffer = definitions_1[0].target.buffer.read(cx); + assert_eq!( + target_buffer.text(), + "const TWO: c::T2 = 2;\nconst THREE: usize = 3;" + ); + assert_eq!( + definitions_1[0].target.range.to_point(target_buffer), + Point::new(0, 6)..Point::new(0, 9) + ); + }); + + // Try getting more definitions for the same buffer, ensuring the buffer gets reused from + // the previous call to `definition`. + fake_language_server.handle_request::(|_, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Scalar( + lsp::Location::new( + lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(), + lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)), + ), + ))) + }); + + let definitions_2 = project_b + .update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx)) + .await + .unwrap(); + cx_b.read(|cx| { + assert_eq!(definitions_2.len(), 1); + assert_eq!(project_b.read(cx).worktrees().count(), 2); + let target_buffer = definitions_2[0].target.buffer.read(cx); + assert_eq!( + target_buffer.text(), + "const TWO: c::T2 = 2;\nconst THREE: usize = 3;" + ); + assert_eq!( + definitions_2[0].target.range.to_point(target_buffer), + Point::new(1, 6)..Point::new(1, 11) + ); + }); + assert_eq!( + definitions_1[0].target.buffer, + definitions_2[0].target.buffer + ); + + fake_language_server.handle_request::( + |req, _| async move { + assert_eq!( + req.text_document_position_params.position, + lsp::Position::new(0, 7) + ); + Ok(Some(lsp::GotoDefinitionResponse::Scalar( + lsp::Location::new( + lsp::Url::from_file_path("/root/dir-2/c.rs").unwrap(), + lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)), + ), + ))) + }, + ); + + let type_definitions = project_b + .update(cx_b, |p, cx| p.type_definition(&buffer_b, 7, cx)) + .await + .unwrap(); + cx_b.read(|cx| { + assert_eq!(type_definitions.len(), 1); + let target_buffer = type_definitions[0].target.buffer.read(cx); + assert_eq!(target_buffer.text(), "type T2 = usize;"); + assert_eq!( + type_definitions[0].target.range.to_point(target_buffer), + Point::new(0, 5)..Point::new(0, 7) + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_references( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "my-fake-lsp-adapter", + capabilities: lsp::ServerCapabilities { + references_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + client_a + .fs() + .insert_tree( + "/root", + json!({ + "dir-1": { + "one.rs": "const ONE: usize = 1;", + "two.rs": "const TWO: usize = one::ONE + one::ONE;", + }, + "dir-2": { + "three.rs": "const THREE: usize = two::TWO + one::ONE;", + } + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open the file on client B. + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + + // Request references to a symbol as the guest. + let fake_language_server = fake_language_servers.next().await.unwrap(); + let (lsp_response_tx, rx) = mpsc::unbounded::>>>(); + fake_language_server.handle_request::({ + let rx = Arc::new(Mutex::new(Some(rx))); + move |params, _| { + assert_eq!( + params.text_document_position.text_document.uri.as_str(), + "file:///root/dir-1/one.rs" + ); + let rx = rx.clone(); + async move { + let mut response_rx = rx.lock().take().unwrap(); + let result = response_rx.next().await.unwrap(); + *rx.lock() = Some(response_rx); + result + } + } + }); + + let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx)); + + // User is informed that a request is pending. + executor.run_until_parked(); + project_b.read_with(cx_b, |project, _| { + let status = project.language_server_statuses().next().cloned().unwrap(); + assert_eq!(status.name, "my-fake-lsp-adapter"); + assert_eq!( + status.pending_work.values().next().unwrap().message, + Some("Finding references...".into()) + ); + }); + + // Cause the language server to respond. + lsp_response_tx + .unbounded_send(Ok(Some(vec![ + lsp::Location { + uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)), + }, + lsp::Location { + uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)), + }, + lsp::Location { + uri: lsp::Url::from_file_path("/root/dir-2/three.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)), + }, + ]))) + .unwrap(); + + let references = references.await.unwrap(); + executor.run_until_parked(); + project_b.read_with(cx_b, |project, cx| { + // User is informed that a request is no longer pending. + let status = project.language_server_statuses().next().unwrap(); + assert!(status.pending_work.is_empty()); + + assert_eq!(references.len(), 3); + assert_eq!(project.worktrees().count(), 2); + + let two_buffer = references[0].buffer.read(cx); + let three_buffer = references[2].buffer.read(cx); + assert_eq!( + two_buffer.file().unwrap().path().as_ref(), + Path::new("two.rs") + ); + assert_eq!(references[1].buffer, references[0].buffer); + assert_eq!( + three_buffer.file().unwrap().full_path(cx), + Path::new("/root/dir-2/three.rs") + ); + + assert_eq!(references[0].range.to_offset(two_buffer), 24..27); + assert_eq!(references[1].range.to_offset(two_buffer), 35..38); + assert_eq!(references[2].range.to_offset(three_buffer), 37..40); + }); + + let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx)); + + // User is informed that a request is pending. + executor.run_until_parked(); + project_b.read_with(cx_b, |project, _| { + let status = project.language_server_statuses().next().cloned().unwrap(); + assert_eq!(status.name, "my-fake-lsp-adapter"); + assert_eq!( + status.pending_work.values().next().unwrap().message, + Some("Finding references...".into()) + ); + }); + + // Cause the LSP request to fail. + lsp_response_tx + .unbounded_send(Err(anyhow!("can't find references"))) + .unwrap(); + references.await.unwrap_err(); + + // User is informed that the request is no longer pending. + executor.run_until_parked(); + project_b.read_with(cx_b, |project, _| { + let status = project.language_server_statuses().next().unwrap(); + assert!(status.pending_work.is_empty()); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_project_search( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/root", + json!({ + "dir-1": { + "a": "hello world", + "b": "goodnight moon", + "c": "a world of goo", + "d": "world champion of clown world", + }, + "dir-2": { + "e": "disney world is fun", + } + }), + ) + .await; + let (project_a, _) = client_a.build_local_project("/root/dir-1", cx_a).await; + let (worktree_2, _) = project_a + .update(cx_a, |p, cx| { + p.find_or_create_local_worktree("/root/dir-2", true, cx) + }) + .await + .unwrap(); + worktree_2 + .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Perform a search as the guest. + let mut results = HashMap::default(); + let mut search_rx = project_b.update(cx_b, |project, cx| { + project.search( + SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(), + cx, + ) + }); + while let Some(result) = search_rx.next().await { + match result { + SearchResult::Buffer { buffer, ranges } => { + results.entry(buffer).or_insert(ranges); + } + SearchResult::LimitReached => { + panic!("Unexpectedly reached search limit in tests. If you do want to assert limit-reached, change this panic call.") + } + }; + } + + let mut ranges_by_path = results + .into_iter() + .map(|(buffer, ranges)| { + buffer.read_with(cx_b, |buffer, cx| { + let path = buffer.file().unwrap().full_path(cx); + let offset_ranges = ranges + .into_iter() + .map(|range| range.to_offset(buffer)) + .collect::>(); + (path, offset_ranges) + }) + }) + .collect::>(); + ranges_by_path.sort_by_key(|(path, _)| path.clone()); + + assert_eq!( + ranges_by_path, + &[ + (PathBuf::from("dir-1/a"), vec![6..11]), + (PathBuf::from("dir-1/c"), vec![2..7]), + (PathBuf::from("dir-1/d"), vec![0..5, 24..29]), + (PathBuf::from("dir-2/e"), vec![7..12]), + ] + ); +} + +#[gpui::test(iterations = 10)] +async fn test_document_highlights( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/root-1", + json!({ + "main.rs": "fn double(number: i32) -> i32 { number + number }", + }), + ) + .await; + + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", Default::default()); + client_a.language_registry().add(rust_lang()); + + let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open the file on client B. + let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_b).await.unwrap(); + + // Request document highlights as the guest. + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::( + |params, _| async move { + assert_eq!( + params + .text_document_position_params + .text_document + .uri + .as_str(), + "file:///root-1/main.rs" + ); + assert_eq!( + params.text_document_position_params.position, + lsp::Position::new(0, 34) + ); + Ok(Some(vec![ + lsp::DocumentHighlight { + kind: Some(lsp::DocumentHighlightKind::WRITE), + range: lsp::Range::new(lsp::Position::new(0, 10), lsp::Position::new(0, 16)), + }, + lsp::DocumentHighlight { + kind: Some(lsp::DocumentHighlightKind::READ), + range: lsp::Range::new(lsp::Position::new(0, 32), lsp::Position::new(0, 38)), + }, + lsp::DocumentHighlight { + kind: Some(lsp::DocumentHighlightKind::READ), + range: lsp::Range::new(lsp::Position::new(0, 41), lsp::Position::new(0, 47)), + }, + ])) + }, + ); + + let highlights = project_b + .update(cx_b, |p, cx| p.document_highlights(&buffer_b, 34, cx)) + .await + .unwrap(); + + buffer_b.read_with(cx_b, |buffer, _| { + let snapshot = buffer.snapshot(); + + let highlights = highlights + .into_iter() + .map(|highlight| (highlight.kind, highlight.range.to_offset(&snapshot))) + .collect::>(); + assert_eq!( + highlights, + &[ + (lsp::DocumentHighlightKind::WRITE, 10..16), + (lsp::DocumentHighlightKind::READ, 32..38), + (lsp::DocumentHighlightKind::READ, 41..47) + ] + ) + }); +} + +#[gpui::test(iterations = 10)] +async fn test_lsp_hover( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + "/root-1", + json!({ + "main.rs": "use std::collections::HashMap;", + }), + ) + .await; + + client_a.language_registry().add(rust_lang()); + let language_server_names = ["rust-analyzer", "CrabLang-ls"]; + let mut fake_language_servers = client_a + .language_registry() + .register_specific_fake_lsp_adapter( + "Rust", + true, + FakeLspAdapter { + name: "rust-analyzer", + capabilities: lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..lsp::ServerCapabilities::default() + }, + ..FakeLspAdapter::default() + }, + ); + let _other_server = client_a + .language_registry() + .register_specific_fake_lsp_adapter( + "Rust", + false, + FakeLspAdapter { + name: "CrabLang-ls", + capabilities: lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..lsp::ServerCapabilities::default() + }, + ..FakeLspAdapter::default() + }, + ); + + let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Open the file as the guest + let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); + let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); + + let mut servers_with_hover_requests = HashMap::default(); + for i in 0..language_server_names.len() { + let new_server = fake_language_servers.next().await.unwrap_or_else(|| { + panic!( + "Failed to get language server #{i} with name {}", + &language_server_names[i] + ) + }); + let new_server_name = new_server.server.name(); + assert!( + !servers_with_hover_requests.contains_key(new_server_name), + "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`" + ); + let new_server_name = new_server_name.to_string(); + match new_server_name.as_str() { + "CrabLang-ls" => { + servers_with_hover_requests.insert( + new_server_name.clone(), + new_server.handle_request::( + move |params, _| { + assert_eq!( + params + .text_document_position_params + .text_document + .uri + .as_str(), + "file:///root-1/main.rs" + ); + let name = new_server_name.clone(); + async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Scalar( + lsp::MarkedString::String(format!("{name} hover")), + ), + range: None, + })) + } + }, + ), + ); + } + "rust-analyzer" => { + servers_with_hover_requests.insert( + new_server_name.clone(), + new_server.handle_request::( + |params, _| async move { + assert_eq!( + params + .text_document_position_params + .text_document + .uri + .as_str(), + "file:///root-1/main.rs" + ); + assert_eq!( + params.text_document_position_params.position, + lsp::Position::new(0, 22) + ); + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Array(vec![ + lsp::MarkedString::String("Test hover content.".to_string()), + lsp::MarkedString::LanguageString(lsp::LanguageString { + language: "Rust".to_string(), + value: "let foo = 42;".to_string(), + }), + ]), + range: Some(lsp::Range::new( + lsp::Position::new(0, 22), + lsp::Position::new(0, 29), + )), + })) + }, + ), + ); + } + unexpected => panic!("Unexpected server name: {unexpected}"), + } + } + + // Request hover information as the guest. + let mut hovers = project_b + .update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx)) + .await; + assert_eq!( + hovers.len(), + 2, + "Expected two hovers from both language servers, but got: {hovers:?}" + ); + + let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map( + |mut hover_request| async move { + hover_request + .next() + .await + .expect("All hover requests should have been triggered") + }, + )) + .await; + + hovers.sort_by_key(|hover| hover.contents.len()); + let first_hover = hovers.first().cloned().unwrap(); + assert_eq!( + first_hover.contents, + vec![project::HoverBlock { + text: "CrabLang-ls hover".to_string(), + kind: HoverBlockKind::Markdown, + },] + ); + let second_hover = hovers.last().cloned().unwrap(); + assert_eq!( + second_hover.contents, + vec![ + project::HoverBlock { + text: "Test hover content.".to_string(), + kind: HoverBlockKind::Markdown, + }, + project::HoverBlock { + text: "let foo = 42;".to_string(), + kind: HoverBlockKind::Code { + language: "Rust".to_string() + }, + } + ] + ); + buffer_b.read_with(cx_b, |buffer, _| { + let snapshot = buffer.snapshot(); + assert_eq!(second_hover.range.unwrap().to_offset(&snapshot), 22..29); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_project_symbols( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", Default::default()); + + client_a + .fs() + .insert_tree( + "/code", + json!({ + "crate-1": { + "one.rs": "const ONE: usize = 1;", + }, + "crate-2": { + "two.rs": "const TWO: usize = 2; const THREE: usize = 3;", + }, + "private": { + "passwords.txt": "the-password", + } + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/code/crate-1", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + // Cause the language server to start. + let open_buffer_task = + project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx)); + let _buffer = cx_b.executor().spawn(open_buffer_task).await.unwrap(); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![ + #[allow(deprecated)] + lsp::SymbolInformation { + name: "TWO".into(), + location: lsp::Location { + uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), + }, + kind: lsp::SymbolKind::CONSTANT, + tags: None, + container_name: None, + deprecated: None, + }, + ]))) + }); + + // Request the definition of a symbol as the guest. + let symbols = project_b + .update(cx_b, |p, cx| p.symbols("two", cx)) + .await + .unwrap(); + assert_eq!(symbols.len(), 1); + assert_eq!(symbols[0].name, "TWO"); + + // Open one of the returned symbols. + let buffer_b_2 = project_b + .update(cx_b, |project, cx| { + project.open_buffer_for_symbol(&symbols[0], cx) + }) + .await + .unwrap(); + + buffer_b_2.read_with(cx_b, |buffer, cx| { + assert_eq!( + buffer.file().unwrap().full_path(cx), + Path::new("/code/crate-2/two.rs") + ); + }); + + // Attempt to craft a symbol and violate host's privacy by opening an arbitrary file. + let mut fake_symbol = symbols[0].clone(); + fake_symbol.path.path = Path::new("/code/secrets").into(); + let error = project_b + .update(cx_b, |project, cx| { + project.open_buffer_for_symbol(&fake_symbol, cx) + }) + .await + .unwrap_err(); + assert!(error.to_string().contains("invalid symbol signature")); +} + +#[gpui::test(iterations = 10)] +async fn test_open_buffer_while_getting_definition_pointing_to_it( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + mut rng: StdRng, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a + .language_registry() + .register_fake_lsp_adapter("Rust", Default::default()); + + client_a + .fs() + .insert_tree( + "/root", + json!({ + "a.rs": "const ONE: usize = b::TWO;", + "b.rs": "const TWO: usize = 2", + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project("/root", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + + let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap(); + + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Scalar( + lsp::Location::new( + lsp::Url::from_file_path("/root/b.rs").unwrap(), + lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), + ), + ))) + }); + + let definitions; + let buffer_b2; + if rng.gen() { + definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); + buffer_b2 = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx)); + } else { + buffer_b2 = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx)); + definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); + } + + let buffer_b2 = buffer_b2.await.unwrap(); + let definitions = definitions.await.unwrap(); + assert_eq!(definitions.len(), 1); + assert_eq!(definitions[0].target.buffer, buffer_b2); +} + +#[gpui::test(iterations = 10)] +async fn test_contacts( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, + cx_d: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + let client_d = server.create_client(cx_d, "user_d").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); + let _active_call_d = cx_d.read(ActiveCall::global); + + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "free"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!(contacts(&client_d, cx_d), []); + + server.disconnect_client(client_c.peer_id().unwrap()); + server.forbid_connections(); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "offline", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "free"), + ("user_c".to_string(), "offline", "free") + ] + ); + assert_eq!(contacts(&client_c, cx_c), []); + assert_eq!(contacts(&client_d, cx_d), []); + + server.allow_connections(); + client_c + .authenticate_and_connect(false, &cx_c.to_async()) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "free"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!(contacts(&client_d, cx_d), []); + + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "busy"), + ("user_b".to_string(), "online", "busy") + ] + ); + assert_eq!(contacts(&client_d, cx_d), []); + + // Client B and client D become contacts while client B is being called. + server + .make_contacts(&mut [(&client_b, cx_b), (&client_d, cx_d)]) + .await; + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "free"), + ("user_d".to_string(), "online", "free"), + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "busy"), + ("user_b".to_string(), "online", "busy") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "busy")] + ); + + active_call_b.update(cx_b, |call, cx| call.decline_incoming(cx).unwrap()); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "free"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "free")] + ); + + active_call_c + .update(cx_c, |call, cx| { + call.invite(client_a.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "online", "busy") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "busy"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "busy"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "free")] + ); + + active_call_a + .update(cx_a, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "online", "busy") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "busy"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "busy"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "free")] + ); + + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "busy") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "busy"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "busy"), + ("user_b".to_string(), "online", "busy") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "busy")] + ); + + active_call_a + .update(cx_a, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "free"), + ("user_c".to_string(), "online", "free"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "free"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "free")] + ); + + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_a, cx_a), + [ + ("user_b".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "online", "busy"), + ("user_c".to_string(), "online", "free"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "online", "busy"), + ("user_b".to_string(), "online", "busy") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "busy")] + ); + + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + assert_eq!(contacts(&client_a, cx_a), []); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "offline", "free"), + ("user_c".to_string(), "online", "free"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [ + ("user_a".to_string(), "offline", "free"), + ("user_b".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_d, cx_d), + [("user_b".to_string(), "online", "free")] + ); + + // Test removing a contact + client_b + .user_store() + .update(cx_b, |store, cx| { + store.remove_contact(client_c.user_id().unwrap(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + assert_eq!( + contacts(&client_b, cx_b), + [ + ("user_a".to_string(), "offline", "free"), + ("user_d".to_string(), "online", "free") + ] + ); + assert_eq!( + contacts(&client_c, cx_c), + [("user_a".to_string(), "offline", "free"),] + ); + + fn contacts( + client: &TestClient, + cx: &TestAppContext, + ) -> Vec<(String, &'static str, &'static str)> { + client.user_store().read_with(cx, |store, _| { + store + .contacts() + .iter() + .map(|contact| { + ( + contact.user.github_login.clone(), + if contact.online { "online" } else { "offline" }, + if contact.busy { "busy" } else { "free" }, + ) + }) + .collect() + }) + } +} + +#[gpui::test(iterations = 10)] +async fn test_contact_requests( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_a2: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_b2: &mut TestAppContext, + cx_c: &mut TestAppContext, + cx_c2: &mut TestAppContext, +) { + // Connect to a server as 3 clients. + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_a2 = server.create_client(cx_a2, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_b2 = server.create_client(cx_b2, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + let client_c2 = server.create_client(cx_c2, "user_c").await; + + assert_eq!(client_a.user_id().unwrap(), client_a2.user_id().unwrap()); + assert_eq!(client_b.user_id().unwrap(), client_b2.user_id().unwrap()); + assert_eq!(client_c.user_id().unwrap(), client_c2.user_id().unwrap()); + + // User A and User C request that user B become their contact. + client_a + .user_store() + .update(cx_a, |store, cx| { + store.request_contact(client_b.user_id().unwrap(), cx) + }) + .await + .unwrap(); + client_c + .user_store() + .update(cx_c, |store, cx| { + store.request_contact(client_b.user_id().unwrap(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + + // All users see the pending request appear in all their clients. + assert_eq!( + client_a.summarize_contacts(cx_a).outgoing_requests, + &["user_b"] + ); + assert_eq!( + client_a2.summarize_contacts(cx_a2).outgoing_requests, + &["user_b"] + ); + assert_eq!( + client_b.summarize_contacts(cx_b).incoming_requests, + &["user_a", "user_c"] + ); + assert_eq!( + client_b2.summarize_contacts(cx_b2).incoming_requests, + &["user_a", "user_c"] + ); + assert_eq!( + client_c.summarize_contacts(cx_c).outgoing_requests, + &["user_b"] + ); + assert_eq!( + client_c2.summarize_contacts(cx_c2).outgoing_requests, + &["user_b"] + ); + + // Contact requests are present upon connecting (tested here via disconnect/reconnect) + disconnect_and_reconnect(&client_a, cx_a).await; + disconnect_and_reconnect(&client_b, cx_b).await; + disconnect_and_reconnect(&client_c, cx_c).await; + executor.run_until_parked(); + assert_eq!( + client_a.summarize_contacts(cx_a).outgoing_requests, + &["user_b"] + ); + assert_eq!( + client_b.summarize_contacts(cx_b).incoming_requests, + &["user_a", "user_c"] + ); + assert_eq!( + client_c.summarize_contacts(cx_c).outgoing_requests, + &["user_b"] + ); + + // User B accepts the request from user A. + client_b + .user_store() + .update(cx_b, |store, cx| { + store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // User B sees user A as their contact now in all client, and the incoming request from them is removed. + let contacts_b = client_b.summarize_contacts(cx_b); + assert_eq!(contacts_b.current, &["user_a"]); + assert_eq!(contacts_b.incoming_requests, &["user_c"]); + let contacts_b2 = client_b2.summarize_contacts(cx_b2); + assert_eq!(contacts_b2.current, &["user_a"]); + assert_eq!(contacts_b2.incoming_requests, &["user_c"]); + + // User A sees user B as their contact now in all clients, and the outgoing request to them is removed. + let contacts_a = client_a.summarize_contacts(cx_a); + assert_eq!(contacts_a.current, &["user_b"]); + assert!(contacts_a.outgoing_requests.is_empty()); + let contacts_a2 = client_a2.summarize_contacts(cx_a2); + assert_eq!(contacts_a2.current, &["user_b"]); + assert!(contacts_a2.outgoing_requests.is_empty()); + + // Contacts are present upon connecting (tested here via disconnect/reconnect) + disconnect_and_reconnect(&client_a, cx_a).await; + disconnect_and_reconnect(&client_b, cx_b).await; + disconnect_and_reconnect(&client_c, cx_c).await; + executor.run_until_parked(); + assert_eq!(client_a.summarize_contacts(cx_a).current, &["user_b"]); + assert_eq!(client_b.summarize_contacts(cx_b).current, &["user_a"]); + assert_eq!( + client_b.summarize_contacts(cx_b).incoming_requests, + &["user_c"] + ); + assert!(client_c.summarize_contacts(cx_c).current.is_empty()); + assert_eq!( + client_c.summarize_contacts(cx_c).outgoing_requests, + &["user_b"] + ); + + // User B rejects the request from user C. + client_b + .user_store() + .update(cx_b, |store, cx| { + store.respond_to_contact_request(client_c.user_id().unwrap(), false, cx) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + // User B doesn't see user C as their contact, and the incoming request from them is removed. + let contacts_b = client_b.summarize_contacts(cx_b); + assert_eq!(contacts_b.current, &["user_a"]); + assert!(contacts_b.incoming_requests.is_empty()); + let contacts_b2 = client_b2.summarize_contacts(cx_b2); + assert_eq!(contacts_b2.current, &["user_a"]); + assert!(contacts_b2.incoming_requests.is_empty()); + + // User C doesn't see user B as their contact, and the outgoing request to them is removed. + let contacts_c = client_c.summarize_contacts(cx_c); + assert!(contacts_c.current.is_empty()); + assert!(contacts_c.outgoing_requests.is_empty()); + let contacts_c2 = client_c2.summarize_contacts(cx_c2); + assert!(contacts_c2.current.is_empty()); + assert!(contacts_c2.outgoing_requests.is_empty()); + + // Incoming/outgoing requests are not present upon connecting (tested here via disconnect/reconnect) + disconnect_and_reconnect(&client_a, cx_a).await; + disconnect_and_reconnect(&client_b, cx_b).await; + disconnect_and_reconnect(&client_c, cx_c).await; + executor.run_until_parked(); + assert_eq!(client_a.summarize_contacts(cx_a).current, &["user_b"]); + assert_eq!(client_b.summarize_contacts(cx_b).current, &["user_a"]); + assert!(client_b + .summarize_contacts(cx_b) + .incoming_requests + .is_empty()); + assert!(client_c.summarize_contacts(cx_c).current.is_empty()); + assert!(client_c + .summarize_contacts(cx_c) + .outgoing_requests + .is_empty()); + + async fn disconnect_and_reconnect(client: &TestClient, cx: &mut TestAppContext) { + client.disconnect(&cx.to_async()); + client.clear_contacts(cx).await; + client + .authenticate_and_connect(false, &cx.to_async()) + .await + .unwrap(); + } +} + +#[gpui::test(iterations = 10)] +async fn test_join_call_after_screen_was_shared( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + // Call users B and C from client A. + active_call_a + .update(cx_a, |call, cx| { + call.invite(client_b.user_id().unwrap(), None, cx) + }) + .await + .unwrap(); + + let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone()); + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: Default::default(), + pending: vec!["user_b".to_string()] + } + ); + + // User B receives the call. + + let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming()); + let call_b = incoming_call_b.next().await.unwrap().unwrap(); + assert_eq!(call_b.calling_user.github_login, "user_a"); + + // User A shares their screen + let display = MacOSDisplay::new(); + active_call_a + .update(cx_a, |call, cx| { + call.room().unwrap().update(cx, |room, cx| { + room.set_display_sources(vec![display.clone()]); + room.share_screen(cx) + }) + }) + .await + .unwrap(); + + client_b.user_store().update(cx_b, |user_store, _| { + user_store.clear_cache(); + }); + + // User B joins the room + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + + let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone()); + assert!(incoming_call_b.next().await.unwrap().is_none()); + + executor.run_until_parked(); + assert_eq!( + room_participants(&room_a, cx_a), + RoomParticipants { + remote: vec!["user_b".to_string()], + pending: vec![], + } + ); + assert_eq!( + room_participants(&room_b, cx_b), + RoomParticipants { + remote: vec!["user_a".to_string()], + pending: vec![], + } + ); + + // Ensure User B sees User A's screenshare. + + room_b.read_with(cx_b, |room, _| { + assert_eq!( + room.remote_participants() + .get(&client_a.user_id().unwrap()) + .unwrap() + .video_tracks + .len(), + 1 + ); + }); +} + +#[gpui::test] +async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) { + let mut server = TestServer::start(cx.executor().clone()).await; + let client_a = server.create_client(cx, "user_a").await; + let (_workspace_a, cx) = client_a.build_test_workspace(cx).await; + + cx.simulate_resize(size(px(300.), px(300.))); + + cx.simulate_keystrokes("cmd-n cmd-n cmd-n"); + cx.update(|cx| cx.refresh()); + + let tab_bounds = cx.debug_bounds("TAB-2").unwrap(); + let new_tab_button_bounds = cx.debug_bounds("ICON-Plus").unwrap(); + + assert!( + tab_bounds.intersects(&new_tab_button_bounds), + "Tab should overlap with the new tab button, if this is failing check if there's been a redesign!" + ); + + cx.simulate_event(MouseDownEvent { + button: MouseButton::Right, + position: new_tab_button_bounds.center(), + modifiers: Modifiers::default(), + click_count: 1, + first_mouse: false, + }); + + // regression test that the right click menu for tabs does not open. + assert!(cx.debug_bounds("MENU_ITEM-Close").is_none()); + + let tab_bounds = cx.debug_bounds("TAB-1").unwrap(); + cx.simulate_event(MouseDownEvent { + button: MouseButton::Right, + position: tab_bounds.center(), + modifiers: Modifiers::default(), + click_count: 1, + first_mouse: false, + }); + assert!(cx.debug_bounds("MENU_ITEM-Close").is_some()); +} + +#[gpui::test] +async fn test_pane_split_left(cx: &mut TestAppContext) { + let (_, client) = TestServer::start1(cx).await; + let (workspace, cx) = client.build_test_workspace(cx).await; + + cx.simulate_keystrokes("cmd-n"); + workspace.update(cx, |workspace, cx| { + assert!(workspace.items(cx).collect::>().len() == 1); + }); + cx.simulate_keystrokes("cmd-k left"); + workspace.update(cx, |workspace, cx| { + assert!(workspace.items(cx).collect::>().len() == 2); + }); + cx.simulate_keystrokes("cmd-k"); + // sleep for longer than the timeout in keyboard shortcut handling + // to verify that it doesn't fire in this case. + cx.executor().advance_clock(Duration::from_secs(2)); + cx.simulate_keystrokes("left"); + workspace.update(cx, |workspace, cx| { + assert!(workspace.items(cx).collect::>().len() == 2); + }); +} + +#[gpui::test] +async fn test_join_after_restart(cx1: &mut TestAppContext, cx2: &mut TestAppContext) { + let (mut server, client) = TestServer::start1(cx1).await; + let channel1 = server.make_public_channel("channel1", &client, cx1).await; + let channel2 = server.make_public_channel("channel2", &client, cx1).await; + + join_channel(channel1, &client, cx1).await.unwrap(); + drop(client); + + let client2 = server.create_client(cx2, "user_a").await; + join_channel(channel2, &client2, cx2).await.unwrap(); +} + +#[gpui::test] +async fn test_preview_tabs(cx: &mut TestAppContext) { + let (_server, client) = TestServer::start1(cx).await; + let (workspace, cx) = client.build_test_workspace(cx).await; + let project = workspace.update(cx, |workspace, _| workspace.project().clone()); + + let worktree_id = project.update(cx, |project, cx| { + project.worktrees().next().unwrap().read(cx).id() + }); + + let path_1 = ProjectPath { + worktree_id, + path: Path::new("1.txt").into(), + }; + let path_2 = ProjectPath { + worktree_id, + path: Path::new("2.js").into(), + }; + let path_3 = ProjectPath { + worktree_id, + path: Path::new("3.rs").into(), + }; + + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + let get_path = |pane: &Pane, idx: usize, cx: &AppContext| { + pane.item_for_index(idx).unwrap().project_path(cx).unwrap() + }; + + // Opening item 3 as a "permanent" tab + workspace + .update(cx, |workspace, cx| { + workspace.open_path(path_3.clone(), None, false, cx) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_3.clone()); + assert_eq!(pane.preview_item_id(), None); + + assert!(!pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + // Open item 1 as preview + workspace + .update(cx, |workspace, cx| { + workspace.open_path_preview(path_1.clone(), None, true, true, cx) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 2); + assert_eq!(get_path(pane, 0, cx), path_3.clone()); + assert_eq!(get_path(pane, 1, cx), path_1.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(1).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + // Open item 2 as preview + workspace + .update(cx, |workspace, cx| { + workspace.open_path_preview(path_2.clone(), None, true, true, cx) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 2); + assert_eq!(get_path(pane, 0, cx), path_3.clone()); + assert_eq!(get_path(pane, 1, cx), path_2.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(1).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + // Going back should show item 1 as preview + workspace + .update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx)) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 2); + assert_eq!(get_path(pane, 0, cx), path_3.clone()); + assert_eq!(get_path(pane, 1, cx), path_1.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(1).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(pane.can_navigate_forward()); + }); + + // Closing item 1 + pane.update(cx, |pane, cx| { + pane.close_item_by_id( + pane.active_item().unwrap().item_id(), + workspace::SaveIntent::Skip, + cx, + ) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_3.clone()); + assert_eq!(pane.preview_item_id(), None); + + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + // Going back should show item 1 as preview + workspace + .update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx)) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 2); + assert_eq!(get_path(pane, 0, cx), path_3.clone()); + assert_eq!(get_path(pane, 1, cx), path_1.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(1).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(pane.can_navigate_forward()); + }); + + // Close permanent tab + pane.update(cx, |pane, cx| { + let id = pane.items().nth(0).unwrap().item_id(); + pane.close_item_by_id(id, workspace::SaveIntent::Skip, cx) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_1.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(0).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(pane.can_navigate_forward()); + }); + + // Split pane to the right + pane.update(cx, |pane, cx| { + pane.split(workspace::SplitDirection::Right, cx); + }); + + let right_pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_1.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(0).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(pane.can_navigate_forward()); + }); + + right_pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_1.clone()); + assert_eq!(pane.preview_item_id(), None); + + assert!(!pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + // Open item 2 as preview in right pane + workspace + .update(cx, |workspace, cx| { + workspace.open_path_preview(path_2.clone(), None, true, true, cx) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_1.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(0).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(pane.can_navigate_forward()); + }); + + right_pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 2); + assert_eq!(get_path(pane, 0, cx), path_1.clone()); + assert_eq!(get_path(pane, 1, cx), path_2.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(1).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + // Focus left pane + workspace.update(cx, |workspace, cx| { + workspace.activate_pane_in_direction(workspace::SplitDirection::Left, cx) + }); + + // Open item 2 as preview in left pane + workspace + .update(cx, |workspace, cx| { + workspace.open_path_preview(path_2.clone(), None, true, true, cx) + }) + .await + .unwrap(); + + pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 1); + assert_eq!(get_path(pane, 0, cx), path_2.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(0).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); + + right_pane.update(cx, |pane, cx| { + assert_eq!(pane.items_len(), 2); + assert_eq!(get_path(pane, 0, cx), path_1.clone()); + assert_eq!(get_path(pane, 1, cx), path_2.clone()); + assert_eq!( + pane.preview_item_id(), + Some(pane.items().nth(1).unwrap().item_id()) + ); + + assert!(pane.can_navigate_backward()); + assert!(!pane.can_navigate_forward()); + }); +} diff --git a/crates/collab/src/tests/notification_tests.rs b/crates/collab/src/tests/notification_tests.rs new file mode 100644 index 0000000..ddbc1d1 --- /dev/null +++ b/crates/collab/src/tests/notification_tests.rs @@ -0,0 +1,160 @@ +use std::sync::Arc; + +use gpui::{BackgroundExecutor, TestAppContext}; +use notifications::NotificationEvent; +use parking_lot::Mutex; +use rpc::{proto, Notification}; + +use crate::tests::TestServer; + +#[gpui::test] +async fn test_notifications( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let notification_events_a = Arc::new(Mutex::new(Vec::new())); + let notification_events_b = Arc::new(Mutex::new(Vec::new())); + client_a.notification_store().update(cx_a, |_, cx| { + let events = notification_events_a.clone(); + cx.subscribe(&cx.handle(), move |_, _, event, _| { + events.lock().push(event.clone()); + }) + .detach() + }); + client_b.notification_store().update(cx_b, |_, cx| { + let events = notification_events_b.clone(); + cx.subscribe(&cx.handle(), move |_, _, event, _| { + events.lock().push(event.clone()); + }) + .detach() + }); + + // Client A sends a contact request to client B. + client_a + .user_store() + .update(cx_a, |store, cx| store.request_contact(client_b.id(), cx)) + .await + .unwrap(); + + // Client B receives a contact request notification and responds to the + // request, accepting it. + executor.run_until_parked(); + client_b.notification_store().update(cx_b, |store, cx| { + assert_eq!(store.notification_count(), 1); + assert_eq!(store.unread_notification_count(), 1); + + let entry = store.notification_at(0).unwrap(); + assert_eq!( + entry.notification, + Notification::ContactRequest { + sender_id: client_a.id() + } + ); + assert!(!entry.is_read); + assert_eq!( + ¬ification_events_b.lock()[0..], + &[ + NotificationEvent::NewNotification { + entry: entry.clone(), + }, + NotificationEvent::NotificationsUpdated { + old_range: 0..0, + new_count: 1 + } + ] + ); + + store.respond_to_notification(entry.notification.clone(), true, cx); + }); + + // Client B sees the notification is now read, and that they responded. + executor.run_until_parked(); + client_b.notification_store().read_with(cx_b, |store, _| { + assert_eq!(store.notification_count(), 1); + assert_eq!(store.unread_notification_count(), 0); + + let entry = store.notification_at(0).unwrap(); + assert!(entry.is_read); + assert_eq!(entry.response, Some(true)); + assert_eq!( + ¬ification_events_b.lock()[2..], + &[ + NotificationEvent::NotificationRead { + entry: entry.clone(), + }, + NotificationEvent::NotificationsUpdated { + old_range: 0..1, + new_count: 1 + } + ] + ); + }); + + // Client A receives a notification that client B accepted their request. + client_a.notification_store().read_with(cx_a, |store, _| { + assert_eq!(store.notification_count(), 1); + assert_eq!(store.unread_notification_count(), 1); + + let entry = store.notification_at(0).unwrap(); + assert_eq!( + entry.notification, + Notification::ContactRequestAccepted { + responder_id: client_b.id() + } + ); + assert!(!entry.is_read); + }); + + // Client A creates a channel and invites client B to be a member. + let channel_id = client_a + .channel_store() + .update(cx_a, |store, cx| { + store.create_channel("the-channel", None, cx) + }) + .await + .unwrap(); + client_a + .channel_store() + .update(cx_a, |store, cx| { + store.invite_member(channel_id, client_b.id(), proto::ChannelRole::Member, cx) + }) + .await + .unwrap(); + + // Client B receives a channel invitation notification and responds to the + // invitation, accepting it. + executor.run_until_parked(); + client_b.notification_store().update(cx_b, |store, cx| { + assert_eq!(store.notification_count(), 2); + assert_eq!(store.unread_notification_count(), 1); + + let entry = store.notification_at(0).unwrap(); + assert_eq!( + entry.notification, + Notification::ChannelInvitation { + channel_id: channel_id.0, + channel_name: "the-channel".to_string(), + inviter_id: client_a.id() + } + ); + assert!(!entry.is_read); + + store.respond_to_notification(entry.notification.clone(), true, cx); + }); + + // Client B sees the notification is now read, and that they responded. + executor.run_until_parked(); + client_b.notification_store().read_with(cx_b, |store, _| { + assert_eq!(store.notification_count(), 2); + assert_eq!(store.unread_notification_count(), 0); + + let entry = store.notification_at(0).unwrap(); + assert!(entry.is_read); + assert_eq!(entry.response, Some(true)); + }); +} diff --git a/crates/collab/src/tests/random_channel_buffer_tests.rs b/crates/collab/src/tests/random_channel_buffer_tests.rs new file mode 100644 index 0000000..0eacc56 --- /dev/null +++ b/crates/collab/src/tests/random_channel_buffer_tests.rs @@ -0,0 +1,288 @@ +use crate::db::ChannelRole; + +use super::{run_randomized_test, RandomizedTest, TestClient, TestError, TestServer, UserTestPlan}; +use anyhow::Result; +use async_trait::async_trait; +use gpui::{BackgroundExecutor, SharedString, TestAppContext}; +use rand::prelude::*; +use serde_derive::{Deserialize, Serialize}; +use std::{ + ops::{Deref, DerefMut, Range}, + rc::Rc, + sync::Arc, +}; +use text::Bias; + +#[gpui::test( + iterations = 100, + on_failure = "crate::tests::save_randomized_test_plan" +)] +async fn test_random_channel_buffers( + cx: &mut TestAppContext, + executor: BackgroundExecutor, + rng: StdRng, +) { + run_randomized_test::(cx, executor, rng).await; +} + +struct RandomChannelBufferTest; + +#[derive(Clone, Serialize, Deserialize)] +enum ChannelBufferOperation { + JoinChannelNotes { + channel_name: SharedString, + }, + LeaveChannelNotes { + channel_name: SharedString, + }, + EditChannelNotes { + channel_name: SharedString, + edits: Vec<(Range, Arc)>, + }, + Noop, +} + +const CHANNEL_COUNT: usize = 3; + +#[async_trait(?Send)] +impl RandomizedTest for RandomChannelBufferTest { + type Operation = ChannelBufferOperation; + + async fn initialize(server: &mut TestServer, users: &[UserTestPlan]) { + let db = &server.app_state.db; + for ix in 0..CHANNEL_COUNT { + let id = db + .create_root_channel(&format!("channel-{ix}"), users[0].user_id) + .await + .unwrap(); + for user in &users[1..] { + db.invite_channel_member(id, user.user_id, users[0].user_id, ChannelRole::Member) + .await + .unwrap(); + db.respond_to_channel_invite(id, user.user_id, true) + .await + .unwrap(); + } + } + } + + fn generate_operation( + client: &TestClient, + rng: &mut StdRng, + _: &mut UserTestPlan, + cx: &TestAppContext, + ) -> ChannelBufferOperation { + let channel_store = client.channel_store().clone(); + let mut channel_buffers = client.channel_buffers(); + + // When signed out, we can't do anything unless a channel buffer is + // already open. + if channel_buffers.deref_mut().is_empty() + && channel_store.read_with(cx, |store, _| store.channel_count() == 0) + { + return ChannelBufferOperation::Noop; + } + + loop { + match rng.gen_range(0..100_u32) { + 0..=29 => { + let channel_name = client.channel_store().read_with(cx, |store, cx| { + store.ordered_channels().find_map(|(_, channel)| { + if store.has_open_channel_buffer(channel.id, cx) { + None + } else { + Some(channel.name.clone()) + } + }) + }); + if let Some(channel_name) = channel_name { + break ChannelBufferOperation::JoinChannelNotes { channel_name }; + } + } + + 30..=40 => { + if let Some(buffer) = channel_buffers.deref().iter().choose(rng) { + let channel_name = + buffer.read_with(cx, |b, cx| b.channel(cx).unwrap().name.clone()); + break ChannelBufferOperation::LeaveChannelNotes { channel_name }; + } + } + + _ => { + if let Some(buffer) = channel_buffers.deref().iter().choose(rng) { + break buffer.read_with(cx, |b, cx| { + let channel_name = b.channel(cx).unwrap().name.clone(); + let edits = b + .buffer() + .read_with(cx, |buffer, _| buffer.get_random_edits(rng, 3)); + ChannelBufferOperation::EditChannelNotes { + channel_name, + edits, + } + }); + } + } + } + } + } + + async fn apply_operation( + client: &TestClient, + operation: ChannelBufferOperation, + cx: &mut TestAppContext, + ) -> Result<(), TestError> { + match operation { + ChannelBufferOperation::JoinChannelNotes { channel_name } => { + let buffer = client.channel_store().update(cx, |store, cx| { + let channel_id = store + .ordered_channels() + .find(|(_, c)| c.name == channel_name) + .unwrap() + .1 + .id; + if store.has_open_channel_buffer(channel_id, cx) { + Err(TestError::Inapplicable) + } else { + Ok(store.open_channel_buffer(channel_id, cx)) + } + })?; + + log::info!( + "{}: opening notes for channel {channel_name}", + client.username + ); + client.channel_buffers().deref_mut().insert(buffer.await?); + } + + ChannelBufferOperation::LeaveChannelNotes { channel_name } => { + let buffer = cx.update(|cx| { + let mut left_buffer = Err(TestError::Inapplicable); + client.channel_buffers().deref_mut().retain(|buffer| { + if buffer.read(cx).channel(cx).unwrap().name == channel_name { + left_buffer = Ok(buffer.clone()); + false + } else { + true + } + }); + left_buffer + })?; + + log::info!( + "{}: closing notes for channel {channel_name}", + client.username + ); + cx.update(|_| drop(buffer)); + } + + ChannelBufferOperation::EditChannelNotes { + channel_name, + edits, + } => { + let channel_buffer = cx + .read(|cx| { + client + .channel_buffers() + .deref() + .iter() + .find(|buffer| { + buffer.read(cx).channel(cx).unwrap().name == channel_name + }) + .cloned() + }) + .ok_or_else(|| TestError::Inapplicable)?; + + log::info!( + "{}: editing notes for channel {channel_name} with {:?}", + client.username, + edits + ); + + channel_buffer.update(cx, |buffer, cx| { + let buffer = buffer.buffer(); + buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(); + buffer.edit( + edits.into_iter().map(|(range, text)| { + let start = snapshot.clip_offset(range.start, Bias::Left); + let end = snapshot.clip_offset(range.end, Bias::Right); + (start..end, text) + }), + None, + cx, + ); + }); + }); + } + + ChannelBufferOperation::Noop => Err(TestError::Inapplicable)?, + } + Ok(()) + } + + async fn on_quiesce(server: &mut TestServer, clients: &mut [(Rc, TestAppContext)]) { + let channels = server.app_state.db.all_channels().await.unwrap(); + + for (client, client_cx) in clients.iter_mut() { + client_cx.update(|cx| { + client + .channel_buffers() + .deref_mut() + .retain(|b| b.read(cx).is_connected()); + }); + } + + for (channel_id, channel_name) in channels { + let mut prev_text: Option<(u64, String)> = None; + + let mut collaborator_user_ids = server + .app_state + .db + .get_channel_buffer_collaborators(channel_id) + .await + .unwrap() + .into_iter() + .map(|id| id.to_proto()) + .collect::>(); + collaborator_user_ids.sort(); + + for (client, client_cx) in clients.iter() { + let user_id = client.user_id().unwrap(); + client_cx.read(|cx| { + if let Some(channel_buffer) = client + .channel_buffers() + .deref() + .iter() + .find(|b| b.read(cx).channel_id.0 == channel_id.to_proto()) + { + let channel_buffer = channel_buffer.read(cx); + + // Assert that channel buffer's text matches other clients' copies. + let text = channel_buffer.buffer().read(cx).text(); + if let Some((prev_user_id, prev_text)) = &prev_text { + assert_eq!( + &text, + prev_text, + "client {user_id} has different text than client {prev_user_id} for channel {channel_name}", + ); + } else { + prev_text = Some((user_id, text.clone())); + } + + // Assert that all clients and the server agree about who is present in the + // channel buffer. + let collaborators = channel_buffer.collaborators(); + let mut user_ids = + collaborators.values().map(|c| c.user_id).collect::>(); + user_ids.sort(); + assert_eq!( + user_ids, + collaborator_user_ids, + "client {user_id} has different user ids for channel {channel_name} than the server", + ); + } + }); + } + } + } +} diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs new file mode 100644 index 0000000..03f3f92 --- /dev/null +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -0,0 +1,1595 @@ +use super::{RandomizedTest, TestClient, TestError, TestServer, UserTestPlan}; +use crate::{db::UserId, tests::run_randomized_test}; +use anyhow::{anyhow, Result}; +use async_trait::async_trait; +use call::ActiveCall; +use collections::{BTreeMap, HashMap}; +use editor::Bias; +use fs::{FakeFs, Fs as _}; +use futures::StreamExt; +use git::repository::GitFileStatus; +use gpui::{BackgroundExecutor, Model, TestAppContext}; +use language::{ + range_to_lsp, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, PointUtf16, +}; +use lsp::FakeLanguageServer; +use pretty_assertions::assert_eq; +use project::{search::SearchQuery, Project, ProjectPath, SearchResult}; +use rand::{ + distributions::{Alphanumeric, DistString}, + prelude::*, +}; +use serde::{Deserialize, Serialize}; +use std::{ + ops::{Deref, Range}, + path::{Path, PathBuf}, + rc::Rc, + sync::Arc, +}; +use util::ResultExt; + +#[gpui::test( + iterations = 100, + on_failure = "crate::tests::save_randomized_test_plan" +)] +async fn test_random_project_collaboration( + cx: &mut TestAppContext, + executor: BackgroundExecutor, + rng: StdRng, +) { + run_randomized_test::(cx, executor, rng).await; +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum ClientOperation { + AcceptIncomingCall, + RejectIncomingCall, + LeaveCall, + InviteContactToCall { + user_id: UserId, + }, + OpenLocalProject { + first_root_name: String, + }, + OpenRemoteProject { + host_id: UserId, + first_root_name: String, + }, + AddWorktreeToProject { + project_root_name: String, + new_root_path: PathBuf, + }, + CloseRemoteProject { + project_root_name: String, + }, + OpenBuffer { + project_root_name: String, + is_local: bool, + full_path: PathBuf, + }, + SearchProject { + project_root_name: String, + is_local: bool, + query: String, + detach: bool, + }, + EditBuffer { + project_root_name: String, + is_local: bool, + full_path: PathBuf, + edits: Vec<(Range, Arc)>, + }, + CloseBuffer { + project_root_name: String, + is_local: bool, + full_path: PathBuf, + }, + SaveBuffer { + project_root_name: String, + is_local: bool, + full_path: PathBuf, + detach: bool, + }, + RequestLspDataInBuffer { + project_root_name: String, + is_local: bool, + full_path: PathBuf, + offset: usize, + kind: LspRequestKind, + detach: bool, + }, + CreateWorktreeEntry { + project_root_name: String, + is_local: bool, + full_path: PathBuf, + is_dir: bool, + }, + WriteFsEntry { + path: PathBuf, + is_dir: bool, + content: String, + }, + GitOperation { + operation: GitOperation, + }, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum GitOperation { + WriteGitIndex { + repo_path: PathBuf, + contents: Vec<(PathBuf, String)>, + }, + WriteGitBranch { + repo_path: PathBuf, + new_branch: Option, + }, + WriteGitStatuses { + repo_path: PathBuf, + statuses: Vec<(PathBuf, GitFileStatus)>, + git_operation: bool, + }, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum LspRequestKind { + Rename, + Completion, + CodeAction, + Definition, + Highlights, +} + +struct ProjectCollaborationTest; + +#[async_trait(?Send)] +impl RandomizedTest for ProjectCollaborationTest { + type Operation = ClientOperation; + + async fn initialize(server: &mut TestServer, users: &[UserTestPlan]) { + let db = &server.app_state.db; + for (ix, user_a) in users.iter().enumerate() { + for user_b in &users[ix + 1..] { + db.send_contact_request(user_a.user_id, user_b.user_id) + .await + .unwrap(); + db.respond_to_contact_request(user_b.user_id, user_a.user_id, true) + .await + .unwrap(); + } + } + } + + fn generate_operation( + client: &TestClient, + rng: &mut StdRng, + plan: &mut UserTestPlan, + cx: &TestAppContext, + ) -> ClientOperation { + let call = cx.read(ActiveCall::global); + loop { + match rng.gen_range(0..100_u32) { + // Mutate the call + 0..=29 => { + // Respond to an incoming call + if call.read_with(cx, |call, _| call.incoming().borrow().is_some()) { + break if rng.gen_bool(0.7) { + ClientOperation::AcceptIncomingCall + } else { + ClientOperation::RejectIncomingCall + }; + } + + match rng.gen_range(0..100_u32) { + // Invite a contact to the current call + 0..=70 => { + let available_contacts = + client.user_store().read_with(cx, |user_store, _| { + user_store + .contacts() + .iter() + .filter(|contact| contact.online && !contact.busy) + .cloned() + .collect::>() + }); + if !available_contacts.is_empty() { + let contact = available_contacts.choose(rng).unwrap(); + break ClientOperation::InviteContactToCall { + user_id: UserId(contact.user.id as i32), + }; + } + } + + // Leave the current call + 71.. => { + if plan.allow_client_disconnection + && call.read_with(cx, |call, _| call.room().is_some()) + { + break ClientOperation::LeaveCall; + } + } + } + } + + // Mutate projects + 30..=59 => match rng.gen_range(0..100_u32) { + // Open a new project + 0..=70 => { + // Open a remote project + if let Some(room) = call.read_with(cx, |call, _| call.room().cloned()) { + let existing_dev_server_project_ids = cx.read(|cx| { + client + .dev_server_projects() + .iter() + .map(|p| p.read(cx).remote_id().unwrap()) + .collect::>() + }); + let new_dev_server_projects = room.read_with(cx, |room, _| { + room.remote_participants() + .values() + .flat_map(|participant| { + participant.projects.iter().filter_map(|project| { + if existing_dev_server_project_ids.contains(&project.id) + { + None + } else { + Some(( + UserId::from_proto(participant.user.id), + project.worktree_root_names[0].clone(), + )) + } + }) + }) + .collect::>() + }); + if !new_dev_server_projects.is_empty() { + let (host_id, first_root_name) = + new_dev_server_projects.choose(rng).unwrap().clone(); + break ClientOperation::OpenRemoteProject { + host_id, + first_root_name, + }; + } + } + // Open a local project + else { + let first_root_name = plan.next_root_dir_name(); + break ClientOperation::OpenLocalProject { first_root_name }; + } + } + + // Close a remote project + 71..=80 => { + if !client.dev_server_projects().is_empty() { + let project = client.dev_server_projects().choose(rng).unwrap().clone(); + let first_root_name = root_name_for_project(&project, cx); + break ClientOperation::CloseRemoteProject { + project_root_name: first_root_name, + }; + } + } + + // Mutate project worktrees + 81.. => match rng.gen_range(0..100_u32) { + // Add a worktree to a local project + 0..=50 => { + let Some(project) = client.local_projects().choose(rng).cloned() else { + continue; + }; + let project_root_name = root_name_for_project(&project, cx); + let mut paths = client.fs().paths(false); + paths.remove(0); + let new_root_path = if paths.is_empty() || rng.gen() { + Path::new("/").join(&plan.next_root_dir_name()) + } else { + paths.choose(rng).unwrap().clone() + }; + break ClientOperation::AddWorktreeToProject { + project_root_name, + new_root_path, + }; + } + + // Add an entry to a worktree + _ => { + let Some(project) = choose_random_project(client, rng) else { + continue; + }; + let project_root_name = root_name_for_project(&project, cx); + let is_local = project.read_with(cx, |project, _| project.is_local()); + let worktree = project.read_with(cx, |project, cx| { + project + .worktrees() + .filter(|worktree| { + let worktree = worktree.read(cx); + worktree.is_visible() + && worktree.entries(false).any(|e| e.is_file()) + && worktree.root_entry().map_or(false, |e| e.is_dir()) + }) + .choose(rng) + }); + let Some(worktree) = worktree else { continue }; + let is_dir = rng.gen::(); + let mut full_path = + worktree.read_with(cx, |w, _| PathBuf::from(w.root_name())); + full_path.push(gen_file_name(rng)); + if !is_dir { + full_path.set_extension("rs"); + } + break ClientOperation::CreateWorktreeEntry { + project_root_name, + is_local, + full_path, + is_dir, + }; + } + }, + }, + + // Query and mutate buffers + 60..=90 => { + let Some(project) = choose_random_project(client, rng) else { + continue; + }; + let project_root_name = root_name_for_project(&project, cx); + let is_local = project.read_with(cx, |project, _| project.is_local()); + + match rng.gen_range(0..100_u32) { + // Manipulate an existing buffer + 0..=70 => { + let Some(buffer) = client + .buffers_for_project(&project) + .iter() + .choose(rng) + .cloned() + else { + continue; + }; + + let full_path = buffer + .read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx)); + + match rng.gen_range(0..100_u32) { + // Close the buffer + 0..=15 => { + break ClientOperation::CloseBuffer { + project_root_name, + is_local, + full_path, + }; + } + // Save the buffer + 16..=29 if buffer.read_with(cx, |b, _| b.is_dirty()) => { + let detach = rng.gen_bool(0.3); + break ClientOperation::SaveBuffer { + project_root_name, + is_local, + full_path, + detach, + }; + } + // Edit the buffer + 30..=69 => { + let edits = buffer + .read_with(cx, |buffer, _| buffer.get_random_edits(rng, 3)); + break ClientOperation::EditBuffer { + project_root_name, + is_local, + full_path, + edits, + }; + } + // Make an LSP request + _ => { + let offset = buffer.read_with(cx, |buffer, _| { + buffer.clip_offset( + rng.gen_range(0..=buffer.len()), + language::Bias::Left, + ) + }); + let detach = rng.gen(); + break ClientOperation::RequestLspDataInBuffer { + project_root_name, + full_path, + offset, + is_local, + kind: match rng.gen_range(0..5_u32) { + 0 => LspRequestKind::Rename, + 1 => LspRequestKind::Highlights, + 2 => LspRequestKind::Definition, + 3 => LspRequestKind::CodeAction, + 4.. => LspRequestKind::Completion, + }, + detach, + }; + } + } + } + + 71..=80 => { + let query = rng.gen_range('a'..='z').to_string(); + let detach = rng.gen_bool(0.3); + break ClientOperation::SearchProject { + project_root_name, + is_local, + query, + detach, + }; + } + + // Open a buffer + 81.. => { + let worktree = project.read_with(cx, |project, cx| { + project + .worktrees() + .filter(|worktree| { + let worktree = worktree.read(cx); + worktree.is_visible() + && worktree.entries(false).any(|e| e.is_file()) + }) + .choose(rng) + }); + let Some(worktree) = worktree else { continue }; + let full_path = worktree.read_with(cx, |worktree, _| { + let entry = worktree + .entries(false) + .filter(|e| e.is_file()) + .choose(rng) + .unwrap(); + if entry.path.as_ref() == Path::new("") { + Path::new(worktree.root_name()).into() + } else { + Path::new(worktree.root_name()).join(&entry.path) + } + }); + break ClientOperation::OpenBuffer { + project_root_name, + is_local, + full_path, + }; + } + } + } + + // Update a git related action + 91..=95 => { + break ClientOperation::GitOperation { + operation: generate_git_operation(rng, client), + }; + } + + // Create or update a file or directory + 96.. => { + let is_dir = rng.gen::(); + let content; + let mut path; + let dir_paths = client.fs().directories(false); + + if is_dir { + content = String::new(); + path = dir_paths.choose(rng).unwrap().clone(); + path.push(gen_file_name(rng)); + } else { + content = Alphanumeric.sample_string(rng, 16); + + // Create a new file or overwrite an existing file + let file_paths = client.fs().files(); + if file_paths.is_empty() || rng.gen_bool(0.5) { + path = dir_paths.choose(rng).unwrap().clone(); + path.push(gen_file_name(rng)); + path.set_extension("rs"); + } else { + path = file_paths.choose(rng).unwrap().clone() + }; + } + break ClientOperation::WriteFsEntry { + path, + is_dir, + content, + }; + } + } + } + } + + async fn apply_operation( + client: &TestClient, + operation: ClientOperation, + cx: &mut TestAppContext, + ) -> Result<(), TestError> { + match operation { + ClientOperation::AcceptIncomingCall => { + let active_call = cx.read(ActiveCall::global); + if active_call.read_with(cx, |call, _| call.incoming().borrow().is_none()) { + Err(TestError::Inapplicable)?; + } + + log::info!("{}: accepting incoming call", client.username); + active_call + .update(cx, |call, cx| call.accept_incoming(cx)) + .await?; + } + + ClientOperation::RejectIncomingCall => { + let active_call = cx.read(ActiveCall::global); + if active_call.read_with(cx, |call, _| call.incoming().borrow().is_none()) { + Err(TestError::Inapplicable)?; + } + + log::info!("{}: declining incoming call", client.username); + active_call.update(cx, |call, cx| call.decline_incoming(cx))?; + } + + ClientOperation::LeaveCall => { + let active_call = cx.read(ActiveCall::global); + if active_call.read_with(cx, |call, _| call.room().is_none()) { + Err(TestError::Inapplicable)?; + } + + log::info!("{}: hanging up", client.username); + active_call.update(cx, |call, cx| call.hang_up(cx)).await?; + } + + ClientOperation::InviteContactToCall { user_id } => { + let active_call = cx.read(ActiveCall::global); + + log::info!("{}: inviting {}", client.username, user_id,); + active_call + .update(cx, |call, cx| call.invite(user_id.to_proto(), None, cx)) + .await + .log_err(); + } + + ClientOperation::OpenLocalProject { first_root_name } => { + log::info!( + "{}: opening local project at {:?}", + client.username, + first_root_name + ); + + let root_path = Path::new("/").join(&first_root_name); + client.fs().create_dir(&root_path).await.unwrap(); + client + .fs() + .create_file(&root_path.join("main.rs"), Default::default()) + .await + .unwrap(); + let project = client.build_local_project(root_path, cx).await.0; + ensure_project_shared(&project, client, cx).await; + client.local_projects_mut().push(project.clone()); + } + + ClientOperation::AddWorktreeToProject { + project_root_name, + new_root_path, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: finding/creating local worktree at {:?} to project with root path {}", + client.username, + new_root_path, + project_root_name + ); + + ensure_project_shared(&project, client, cx).await; + if !client.fs().paths(false).contains(&new_root_path) { + client.fs().create_dir(&new_root_path).await.unwrap(); + } + project + .update(cx, |project, cx| { + project.find_or_create_local_worktree(&new_root_path, true, cx) + }) + .await + .unwrap(); + } + + ClientOperation::CloseRemoteProject { project_root_name } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: closing remote project with root path {}", + client.username, + project_root_name, + ); + + let ix = client + .dev_server_projects() + .iter() + .position(|p| p == &project) + .unwrap(); + cx.update(|_| { + client.dev_server_projects_mut().remove(ix); + client.buffers().retain(|p, _| *p != project); + drop(project); + }); + } + + ClientOperation::OpenRemoteProject { + host_id, + first_root_name, + } => { + let active_call = cx.read(ActiveCall::global); + let project = active_call + .update(cx, |call, cx| { + let room = call.room().cloned()?; + let participant = room + .read(cx) + .remote_participants() + .get(&host_id.to_proto())?; + let project_id = participant + .projects + .iter() + .find(|project| project.worktree_root_names[0] == first_root_name)? + .id; + Some(room.update(cx, |room, cx| { + room.join_project( + project_id, + client.language_registry().clone(), + FakeFs::new(cx.background_executor().clone()), + cx, + ) + })) + }) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: joining remote project of user {}, root name {}", + client.username, + host_id, + first_root_name, + ); + + let project = project.await?; + client.dev_server_projects_mut().push(project.clone()); + } + + ClientOperation::CreateWorktreeEntry { + project_root_name, + is_local, + full_path, + is_dir, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + let project_path = project_path_for_full_path(&project, &full_path, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: creating {} at path {:?} in {} project {}", + client.username, + if is_dir { "dir" } else { "file" }, + full_path, + if is_local { "local" } else { "remote" }, + project_root_name, + ); + + ensure_project_shared(&project, client, cx).await; + project + .update(cx, |p, cx| p.create_entry(project_path, is_dir, cx)) + .await?; + } + + ClientOperation::OpenBuffer { + project_root_name, + is_local, + full_path, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + let project_path = project_path_for_full_path(&project, &full_path, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: opening buffer {:?} in {} project {}", + client.username, + full_path, + if is_local { "local" } else { "remote" }, + project_root_name, + ); + + ensure_project_shared(&project, client, cx).await; + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await?; + client.buffers_for_project(&project).insert(buffer); + } + + ClientOperation::EditBuffer { + project_root_name, + is_local, + full_path, + edits, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + let buffer = buffer_for_full_path(client, &project, &full_path, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: editing buffer {:?} in {} project {} with {:?}", + client.username, + full_path, + if is_local { "local" } else { "remote" }, + project_root_name, + edits + ); + + ensure_project_shared(&project, client, cx).await; + buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(); + buffer.edit( + edits.into_iter().map(|(range, text)| { + let start = snapshot.clip_offset(range.start, Bias::Left); + let end = snapshot.clip_offset(range.end, Bias::Right); + (start..end, text) + }), + None, + cx, + ); + }); + } + + ClientOperation::CloseBuffer { + project_root_name, + is_local, + full_path, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + let buffer = buffer_for_full_path(client, &project, &full_path, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: closing buffer {:?} in {} project {}", + client.username, + full_path, + if is_local { "local" } else { "remote" }, + project_root_name + ); + + ensure_project_shared(&project, client, cx).await; + cx.update(|_| { + client.buffers_for_project(&project).remove(&buffer); + drop(buffer); + }); + } + + ClientOperation::SaveBuffer { + project_root_name, + is_local, + full_path, + detach, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + let buffer = buffer_for_full_path(client, &project, &full_path, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: saving buffer {:?} in {} project {}, {}", + client.username, + full_path, + if is_local { "local" } else { "remote" }, + project_root_name, + if detach { "detaching" } else { "awaiting" } + ); + + ensure_project_shared(&project, client, cx).await; + let requested_version = buffer.read_with(cx, |buffer, _| buffer.version()); + let save = + project.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); + let save = cx.spawn(|cx| async move { + save.await + .map_err(|err| anyhow!("save request failed: {:?}", err))?; + assert!(buffer + .read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() }) + .expect("App should not be dropped") + .observed_all(&requested_version)); + anyhow::Ok(()) + }); + if detach { + cx.update(|cx| save.detach_and_log_err(cx)); + } else { + save.await?; + } + } + + ClientOperation::RequestLspDataInBuffer { + project_root_name, + is_local, + full_path, + offset, + kind, + detach, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + let buffer = buffer_for_full_path(client, &project, &full_path, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: request LSP {:?} for buffer {:?} in {} project {}, {}", + client.username, + kind, + full_path, + if is_local { "local" } else { "remote" }, + project_root_name, + if detach { "detaching" } else { "awaiting" } + ); + + use futures::{FutureExt as _, TryFutureExt as _}; + let offset = buffer.read_with(cx, |b, _| b.clip_offset(offset, Bias::Left)); + + let process_lsp_request = project.update(cx, |project, cx| match kind { + LspRequestKind::Rename => project + .prepare_rename(buffer, offset, cx) + .map_ok(|_| ()) + .boxed(), + LspRequestKind::Completion => project + .completions(&buffer, offset, cx) + .map_ok(|_| ()) + .boxed(), + LspRequestKind::CodeAction => project + .code_actions(&buffer, offset..offset, cx) + .map(|_| Ok(())) + .boxed(), + LspRequestKind::Definition => project + .definition(&buffer, offset, cx) + .map_ok(|_| ()) + .boxed(), + LspRequestKind::Highlights => project + .document_highlights(&buffer, offset, cx) + .map_ok(|_| ()) + .boxed(), + }); + let request = cx.foreground_executor().spawn(process_lsp_request); + if detach { + request.detach(); + } else { + request.await?; + } + } + + ClientOperation::SearchProject { + project_root_name, + is_local, + query, + detach, + } => { + let project = project_for_root_name(client, &project_root_name, cx) + .ok_or(TestError::Inapplicable)?; + + log::info!( + "{}: search {} project {} for {:?}, {}", + client.username, + if is_local { "local" } else { "remote" }, + project_root_name, + query, + if detach { "detaching" } else { "awaiting" } + ); + + let mut search = project.update(cx, |project, cx| { + project.search( + SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()) + .unwrap(), + cx, + ) + }); + drop(project); + let search = cx.executor().spawn(async move { + let mut results = HashMap::default(); + while let Some(result) = search.next().await { + if let SearchResult::Buffer { buffer, ranges } = result { + results.entry(buffer).or_insert(ranges); + } + } + results + }); + search.await; + } + + ClientOperation::WriteFsEntry { + path, + is_dir, + content, + } => { + if !client + .fs() + .directories(false) + .contains(&path.parent().unwrap().to_owned()) + { + return Err(TestError::Inapplicable); + } + + if is_dir { + log::info!("{}: creating dir at {:?}", client.username, path); + client.fs().create_dir(&path).await.unwrap(); + } else { + let exists = client.fs().metadata(&path).await?.is_some(); + let verb = if exists { "updating" } else { "creating" }; + log::info!("{}: {} file at {:?}", verb, client.username, path); + + client + .fs() + .save(&path, &content.as_str().into(), text::LineEnding::Unix) + .await + .unwrap(); + } + } + + ClientOperation::GitOperation { operation } => match operation { + GitOperation::WriteGitIndex { + repo_path, + contents, + } => { + if !client.fs().directories(false).contains(&repo_path) { + return Err(TestError::Inapplicable); + } + + for (path, _) in contents.iter() { + if !client.fs().files().contains(&repo_path.join(path)) { + return Err(TestError::Inapplicable); + } + } + + log::info!( + "{}: writing git index for repo {:?}: {:?}", + client.username, + repo_path, + contents + ); + + let dot_git_dir = repo_path.join(".git"); + let contents = contents + .iter() + .map(|(path, contents)| (path.as_path(), contents.clone())) + .collect::>(); + if client.fs().metadata(&dot_git_dir).await?.is_none() { + client.fs().create_dir(&dot_git_dir).await?; + } + client.fs().set_index_for_repo(&dot_git_dir, &contents); + } + GitOperation::WriteGitBranch { + repo_path, + new_branch, + } => { + if !client.fs().directories(false).contains(&repo_path) { + return Err(TestError::Inapplicable); + } + + log::info!( + "{}: writing git branch for repo {:?}: {:?}", + client.username, + repo_path, + new_branch + ); + + let dot_git_dir = repo_path.join(".git"); + if client.fs().metadata(&dot_git_dir).await?.is_none() { + client.fs().create_dir(&dot_git_dir).await?; + } + client + .fs() + .set_branch_name(&dot_git_dir, new_branch.clone()); + } + GitOperation::WriteGitStatuses { + repo_path, + statuses, + git_operation, + } => { + if !client.fs().directories(false).contains(&repo_path) { + return Err(TestError::Inapplicable); + } + for (path, _) in statuses.iter() { + if !client.fs().files().contains(&repo_path.join(path)) { + return Err(TestError::Inapplicable); + } + } + + log::info!( + "{}: writing git statuses for repo {:?}: {:?}", + client.username, + repo_path, + statuses + ); + + let dot_git_dir = repo_path.join(".git"); + + let statuses = statuses + .iter() + .map(|(path, val)| (path.as_path(), *val)) + .collect::>(); + + if client.fs().metadata(&dot_git_dir).await?.is_none() { + client.fs().create_dir(&dot_git_dir).await?; + } + + if git_operation { + client.fs().set_status_for_repo_via_git_operation( + &dot_git_dir, + statuses.as_slice(), + ); + } else { + client.fs().set_status_for_repo_via_working_copy_change( + &dot_git_dir, + statuses.as_slice(), + ); + } + } + }, + } + Ok(()) + } + + async fn on_client_added(client: &Rc, _: &mut TestAppContext) { + client.language_registry().add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + ))); + client.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "the-fake-language-server", + capabilities: lsp::LanguageServer::full_capabilities(), + initializer: Some(Box::new({ + let fs = client.app_state.fs.clone(); + move |fake_server: &mut FakeLanguageServer| { + fake_server.handle_request::( + |_, _| async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + text_edit: Some(lsp::CompletionTextEdit::Edit( + lsp::TextEdit { + range: lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(0, 0), + ), + new_text: "the-new-text".to_string(), + }, + )), + ..Default::default() + }, + ]))) + }, + ); + + fake_server.handle_request::( + |_, _| async move { + Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction( + lsp::CodeAction { + title: "the-code-action".to_string(), + ..Default::default() + }, + )])) + }, + ); + + fake_server.handle_request::( + |params, _| async move { + Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new( + params.position, + params.position, + )))) + }, + ); + + fake_server.handle_request::({ + let fs = fs.clone(); + move |_, cx| { + let background = cx.background_executor(); + let mut rng = background.rng(); + let count = rng.gen_range::(1..3); + let files = fs.as_fake().files(); + let files = (0..count) + .map(|_| files.choose(&mut rng).unwrap().clone()) + .collect::>(); + async move { + log::info!("LSP: Returning definitions in files {:?}", &files); + Ok(Some(lsp::GotoDefinitionResponse::Array( + files + .into_iter() + .map(|file| lsp::Location { + uri: lsp::Url::from_file_path(file).unwrap(), + range: Default::default(), + }) + .collect(), + ))) + } + } + }); + + fake_server.handle_request::( + move |_, cx| { + let mut highlights = Vec::new(); + let background = cx.background_executor(); + let mut rng = background.rng(); + + let highlight_count = rng.gen_range(1..=5); + for _ in 0..highlight_count { + let start_row = rng.gen_range(0..100); + let start_column = rng.gen_range(0..100); + let end_row = rng.gen_range(0..100); + let end_column = rng.gen_range(0..100); + let start = PointUtf16::new(start_row, start_column); + let end = PointUtf16::new(end_row, end_column); + let range = if start > end { end..start } else { start..end }; + highlights.push(lsp::DocumentHighlight { + range: range_to_lsp(range.clone()), + kind: Some(lsp::DocumentHighlightKind::READ), + }); + } + highlights.sort_unstable_by_key(|highlight| { + (highlight.range.start, highlight.range.end) + }); + async move { Ok(Some(highlights)) } + }, + ); + } + })), + ..Default::default() + }, + ); + } + + async fn on_quiesce(_: &mut TestServer, clients: &mut [(Rc, TestAppContext)]) { + for (client, client_cx) in clients.iter() { + for guest_project in client.dev_server_projects().iter() { + guest_project.read_with(client_cx, |guest_project, cx| { + let host_project = clients.iter().find_map(|(client, cx)| { + let project = client + .local_projects() + .iter() + .find(|host_project| { + host_project.read_with(cx, |host_project, _| { + host_project.remote_id() == guest_project.remote_id() + }) + })? + .clone(); + Some((project, cx)) + }); + + if !guest_project.is_disconnected() { + if let Some((host_project, host_cx)) = host_project { + let host_worktree_snapshots = + host_project.read_with(host_cx, |host_project, cx| { + host_project + .worktrees() + .map(|worktree| { + let worktree = worktree.read(cx); + (worktree.id(), worktree.snapshot()) + }) + .collect::>() + }); + let guest_worktree_snapshots = guest_project + .worktrees() + .map(|worktree| { + let worktree = worktree.read(cx); + (worktree.id(), worktree.snapshot()) + }) + .collect::>(); + + assert_eq!( + guest_worktree_snapshots.values().map(|w| w.abs_path()).collect::>(), + host_worktree_snapshots.values().map(|w| w.abs_path()).collect::>(), + "{} has different worktrees than the host for project {:?}", + client.username, guest_project.remote_id(), + ); + + for (id, host_snapshot) in &host_worktree_snapshots { + let guest_snapshot = &guest_worktree_snapshots[id]; + assert_eq!( + guest_snapshot.root_name(), + host_snapshot.root_name(), + "{} has different root name than the host for worktree {}, project {:?}", + client.username, + id, + guest_project.remote_id(), + ); + assert_eq!( + guest_snapshot.abs_path(), + host_snapshot.abs_path(), + "{} has different abs path than the host for worktree {}, project: {:?}", + client.username, + id, + guest_project.remote_id(), + ); + assert_eq!( + guest_snapshot.entries(false).collect::>(), + host_snapshot.entries(false).collect::>(), + "{} has different snapshot than the host for worktree {:?} ({:?}) and project {:?}", + client.username, + host_snapshot.abs_path(), + id, + guest_project.remote_id(), + ); + assert_eq!(guest_snapshot.repositories().collect::>(), host_snapshot.repositories().collect::>(), + "{} has different repositories than the host for worktree {:?} and project {:?}", + client.username, + host_snapshot.abs_path(), + guest_project.remote_id(), + ); + assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id(), + "{} has different scan id than the host for worktree {:?} and project {:?}", + client.username, + host_snapshot.abs_path(), + guest_project.remote_id(), + ); + } + } + } + + for buffer in guest_project.opened_buffers() { + let buffer = buffer.read(cx); + assert_eq!( + buffer.deferred_ops_len(), + 0, + "{} has deferred operations for buffer {:?} in project {:?}", + client.username, + buffer.file().unwrap().full_path(cx), + guest_project.remote_id(), + ); + } + }); + } + + let buffers = client.buffers().clone(); + for (guest_project, guest_buffers) in &buffers { + let project_id = if guest_project.read_with(client_cx, |project, _| { + project.is_local() || project.is_disconnected() + }) { + continue; + } else { + guest_project + .read_with(client_cx, |project, _| project.remote_id()) + .unwrap() + }; + let guest_user_id = client.user_id().unwrap(); + + let host_project = clients.iter().find_map(|(client, cx)| { + let project = client + .local_projects() + .iter() + .find(|host_project| { + host_project.read_with(cx, |host_project, _| { + host_project.remote_id() == Some(project_id) + }) + })? + .clone(); + Some((client.user_id().unwrap(), project, cx)) + }); + + let (host_user_id, host_project, host_cx) = + if let Some((host_user_id, host_project, host_cx)) = host_project { + (host_user_id, host_project, host_cx) + } else { + continue; + }; + + for guest_buffer in guest_buffers { + let buffer_id = + guest_buffer.read_with(client_cx, |buffer, _| buffer.remote_id()); + let host_buffer = host_project.read_with(host_cx, |project, _| { + project.buffer_for_id(buffer_id).unwrap_or_else(|| { + panic!( + "host does not have buffer for guest:{}, peer:{:?}, id:{}", + client.username, + client.peer_id(), + buffer_id + ) + }) + }); + let path = host_buffer + .read_with(host_cx, |buffer, cx| buffer.file().unwrap().full_path(cx)); + + assert_eq!( + guest_buffer.read_with(client_cx, |buffer, _| buffer.deferred_ops_len()), + 0, + "{}, buffer {}, path {:?} has deferred operations", + client.username, + buffer_id, + path, + ); + assert_eq!( + guest_buffer.read_with(client_cx, |buffer, _| buffer.text()), + host_buffer.read_with(host_cx, |buffer, _| buffer.text()), + "{}, buffer {}, path {:?}, differs from the host's buffer", + client.username, + buffer_id, + path + ); + + let host_file = host_buffer.read_with(host_cx, |b, _| b.file().cloned()); + let guest_file = guest_buffer.read_with(client_cx, |b, _| b.file().cloned()); + match (host_file, guest_file) { + (Some(host_file), Some(guest_file)) => { + assert_eq!(guest_file.path(), host_file.path()); + assert_eq!(guest_file.is_deleted(), host_file.is_deleted()); + assert_eq!( + guest_file.mtime(), + host_file.mtime(), + "guest {} mtime does not match host {} for path {:?} in project {}", + guest_user_id, + host_user_id, + guest_file.path(), + project_id, + ); + } + (None, None) => {} + (None, _) => panic!("host's file is None, guest's isn't"), + (_, None) => panic!("guest's file is None, hosts's isn't"), + } + + let host_diff_base = host_buffer + .read_with(host_cx, |b, _| b.diff_base().map(ToString::to_string)); + let guest_diff_base = guest_buffer + .read_with(client_cx, |b, _| b.diff_base().map(ToString::to_string)); + assert_eq!( + guest_diff_base, host_diff_base, + "guest {} diff base does not match host's for path {path:?} in project {project_id}", + client.username + ); + + let host_saved_version = + host_buffer.read_with(host_cx, |b, _| b.saved_version().clone()); + let guest_saved_version = + guest_buffer.read_with(client_cx, |b, _| b.saved_version().clone()); + assert_eq!( + guest_saved_version, host_saved_version, + "guest {} saved version does not match host's for path {path:?} in project {project_id}", + client.username + ); + + let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty()); + let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty()); + assert_eq!( + guest_is_dirty, host_is_dirty, + "guest {} dirty state does not match host's for path {path:?} in project {project_id}", + client.username + ); + + let host_saved_mtime = host_buffer.read_with(host_cx, |b, _| b.saved_mtime()); + let guest_saved_mtime = + guest_buffer.read_with(client_cx, |b, _| b.saved_mtime()); + assert_eq!( + guest_saved_mtime, host_saved_mtime, + "guest {} saved mtime does not match host's for path {path:?} in project {project_id}", + client.username + ); + + let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty()); + let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty()); + assert_eq!(guest_is_dirty, host_is_dirty, + "guest {} dirty status does not match host's for path {path:?} in project {project_id}", + client.username + ); + + let host_has_conflict = host_buffer.read_with(host_cx, |b, _| b.has_conflict()); + let guest_has_conflict = + guest_buffer.read_with(client_cx, |b, _| b.has_conflict()); + assert_eq!(guest_has_conflict, host_has_conflict, + "guest {} conflict status does not match host's for path {path:?} in project {project_id}", + client.username + ); + } + } + } + } +} + +fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation { + fn generate_file_paths( + repo_path: &Path, + rng: &mut StdRng, + client: &TestClient, + ) -> Vec { + let mut paths = client + .fs() + .files() + .into_iter() + .filter(|path| path.starts_with(repo_path)) + .collect::>(); + + let count = rng.gen_range(0..=paths.len()); + paths.shuffle(rng); + paths.truncate(count); + + paths + .iter() + .map(|path| path.strip_prefix(repo_path).unwrap().to_path_buf()) + .collect::>() + } + + let repo_path = client.fs().directories(false).choose(rng).unwrap().clone(); + + match rng.gen_range(0..100_u32) { + 0..=25 => { + let file_paths = generate_file_paths(&repo_path, rng, client); + + let contents = file_paths + .into_iter() + .map(|path| (path, Alphanumeric.sample_string(rng, 16))) + .collect(); + + GitOperation::WriteGitIndex { + repo_path, + contents, + } + } + 26..=63 => { + let new_branch = (rng.gen_range(0..10) > 3).then(|| Alphanumeric.sample_string(rng, 8)); + + GitOperation::WriteGitBranch { + repo_path, + new_branch, + } + } + 64..=100 => { + let file_paths = generate_file_paths(&repo_path, rng, client); + + let statuses = file_paths + .into_iter() + .map(|paths| { + ( + paths, + match rng.gen_range(0..3_u32) { + 0 => GitFileStatus::Added, + 1 => GitFileStatus::Modified, + 2 => GitFileStatus::Conflict, + _ => unreachable!(), + }, + ) + }) + .collect::>(); + + let git_operation = rng.gen::(); + + GitOperation::WriteGitStatuses { + repo_path, + statuses, + git_operation, + } + } + _ => unreachable!(), + } +} + +fn buffer_for_full_path( + client: &TestClient, + project: &Model, + full_path: &PathBuf, + cx: &TestAppContext, +) -> Option> { + client + .buffers_for_project(project) + .iter() + .find(|buffer| { + buffer.read_with(cx, |buffer, cx| { + buffer.file().unwrap().full_path(cx) == *full_path + }) + }) + .cloned() +} + +fn project_for_root_name( + client: &TestClient, + root_name: &str, + cx: &TestAppContext, +) -> Option> { + if let Some(ix) = project_ix_for_root_name(client.local_projects().deref(), root_name, cx) { + return Some(client.local_projects()[ix].clone()); + } + if let Some(ix) = project_ix_for_root_name(client.dev_server_projects().deref(), root_name, cx) + { + return Some(client.dev_server_projects()[ix].clone()); + } + None +} + +fn project_ix_for_root_name( + projects: &[Model], + root_name: &str, + cx: &TestAppContext, +) -> Option { + projects.iter().position(|project| { + project.read_with(cx, |project, cx| { + let worktree = project.visible_worktrees(cx).next().unwrap(); + worktree.read(cx).root_name() == root_name + }) + }) +} + +fn root_name_for_project(project: &Model, cx: &TestAppContext) -> String { + project.read_with(cx, |project, cx| { + project + .visible_worktrees(cx) + .next() + .unwrap() + .read(cx) + .root_name() + .to_string() + }) +} + +fn project_path_for_full_path( + project: &Model, + full_path: &Path, + cx: &TestAppContext, +) -> Option { + let mut components = full_path.components(); + let root_name = components.next().unwrap().as_os_str().to_str().unwrap(); + let path = components.as_path().into(); + let worktree_id = project.read_with(cx, |project, cx| { + project.worktrees().find_map(|worktree| { + let worktree = worktree.read(cx); + if worktree.root_name() == root_name { + Some(worktree.id()) + } else { + None + } + }) + })?; + Some(ProjectPath { worktree_id, path }) +} + +async fn ensure_project_shared( + project: &Model, + client: &TestClient, + cx: &mut TestAppContext, +) { + let first_root_name = root_name_for_project(project, cx); + let active_call = cx.read(ActiveCall::global); + if active_call.read_with(cx, |call, _| call.room().is_some()) + && project.read_with(cx, |project, _| project.is_local() && !project.is_shared()) + { + match active_call + .update(cx, |call, cx| call.share_project(project.clone(), cx)) + .await + { + Ok(project_id) => { + log::info!( + "{}: shared project {} with id {}", + client.username, + first_root_name, + project_id + ); + } + Err(error) => { + log::error!( + "{}: error sharing project {}: {:?}", + client.username, + first_root_name, + error + ); + } + } + } +} + +fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option> { + client + .local_projects() + .deref() + .iter() + .chain(client.dev_server_projects().iter()) + .choose(rng) + .cloned() +} + +fn gen_file_name(rng: &mut StdRng) -> String { + let mut name = String::new(); + for _ in 0..10 { + let letter = rng.gen_range('a'..='z'); + name.push(letter); + } + name +} diff --git a/crates/collab/src/tests/randomized_test_helpers.rs b/crates/collab/src/tests/randomized_test_helpers.rs new file mode 100644 index 0000000..80ff9fe --- /dev/null +++ b/crates/collab/src/tests/randomized_test_helpers.rs @@ -0,0 +1,695 @@ +use crate::{ + db::{self, NewUserParams, UserId}, + rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}, + tests::{TestClient, TestServer}, +}; +use async_trait::async_trait; +use futures::StreamExt; +use gpui::{BackgroundExecutor, Task, TestAppContext}; +use parking_lot::Mutex; +use rand::prelude::*; +use rpc::RECEIVE_TIMEOUT; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use settings::SettingsStore; +use std::sync::OnceLock; +use std::{ + env, + path::PathBuf, + rc::Rc, + sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, + }, +}; + +fn plan_load_path() -> &'static Option { + static PLAN_LOAD_PATH: OnceLock> = OnceLock::new(); + PLAN_LOAD_PATH.get_or_init(|| path_env_var("LOAD_PLAN")) +} + +fn plan_save_path() -> &'static Option { + static PLAN_SAVE_PATH: OnceLock> = OnceLock::new(); + PLAN_SAVE_PATH.get_or_init(|| path_env_var("SAVE_PLAN")) +} + +fn max_peers() -> usize { + static MAX_PEERS: OnceLock = OnceLock::new(); + *MAX_PEERS.get_or_init(|| { + env::var("MAX_PEERS") + .map(|i| i.parse().expect("invalid `MAX_PEERS` variable")) + .unwrap_or(3) + }) +} + +fn max_operations() -> usize { + static MAX_OPERATIONS: OnceLock = OnceLock::new(); + *MAX_OPERATIONS.get_or_init(|| { + env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10) + }) +} + +static LOADED_PLAN_JSON: Mutex>> = Mutex::new(None); +static LAST_PLAN: Mutex Vec>>> = Mutex::new(None); + +struct TestPlan { + rng: StdRng, + replay: bool, + stored_operations: Vec<(StoredOperation, Arc)>, + max_operations: usize, + operation_ix: usize, + users: Vec, + next_batch_id: usize, + allow_server_restarts: bool, + allow_client_reconnection: bool, + allow_client_disconnection: bool, +} + +pub struct UserTestPlan { + pub user_id: UserId, + pub username: String, + pub allow_client_reconnection: bool, + pub allow_client_disconnection: bool, + next_root_id: usize, + operation_ix: usize, + online: bool, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(untagged)] +enum StoredOperation { + Server(ServerOperation), + Client { + user_id: UserId, + batch_id: usize, + operation: T, + }, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum ServerOperation { + AddConnection { + user_id: UserId, + }, + RemoveConnection { + user_id: UserId, + }, + BounceConnection { + user_id: UserId, + }, + RestartServer, + MutateClients { + batch_id: usize, + #[serde(skip_serializing)] + #[serde(skip_deserializing)] + user_ids: Vec, + quiesce: bool, + }, +} + +pub enum TestError { + Inapplicable, + Other(anyhow::Error), +} + +#[async_trait(?Send)] +pub trait RandomizedTest: 'static + Sized { + type Operation: Send + Clone + Serialize + DeserializeOwned; + + fn generate_operation( + client: &TestClient, + rng: &mut StdRng, + plan: &mut UserTestPlan, + cx: &TestAppContext, + ) -> Self::Operation; + + async fn apply_operation( + client: &TestClient, + operation: Self::Operation, + cx: &mut TestAppContext, + ) -> Result<(), TestError>; + + async fn initialize(server: &mut TestServer, users: &[UserTestPlan]); + + async fn on_client_added(_client: &Rc, _cx: &mut TestAppContext) {} + + async fn on_quiesce(server: &mut TestServer, client: &mut [(Rc, TestAppContext)]); +} + +pub async fn run_randomized_test( + cx: &mut TestAppContext, + executor: BackgroundExecutor, + rng: StdRng, +) { + let mut server = TestServer::start(executor.clone()).await; + let plan = TestPlan::::new(&mut server, rng).await; + + LAST_PLAN.lock().replace({ + let plan = plan.clone(); + Box::new(move || plan.lock().serialize()) + }); + + let mut clients = Vec::new(); + let mut client_tasks = Vec::new(); + let mut operation_channels = Vec::new(); + loop { + let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else { + break; + }; + applied.store(true, SeqCst); + let did_apply = TestPlan::apply_server_operation( + plan.clone(), + executor.clone(), + &mut server, + &mut clients, + &mut client_tasks, + &mut operation_channels, + next_operation, + cx, + ) + .await; + if !did_apply { + applied.store(false, SeqCst); + } + } + + drop(operation_channels); + executor.start_waiting(); + futures::future::join_all(client_tasks).await; + executor.finish_waiting(); + + executor.run_until_parked(); + T::on_quiesce(&mut server, &mut clients).await; + + for (client, cx) in clients { + cx.update(|cx| { + let store = cx.remove_global::(); + cx.clear_globals(); + cx.set_global(store); + drop(client); + }); + } + executor.run_until_parked(); + + if let Some(path) = plan_save_path() { + eprintln!("saved test plan to path {:?}", path); + std::fs::write(path, plan.lock().serialize()).unwrap(); + } +} + +pub fn save_randomized_test_plan() { + if let Some(serialize_plan) = LAST_PLAN.lock().take() { + if let Some(path) = plan_save_path() { + eprintln!("saved test plan to path {:?}", path); + std::fs::write(path, serialize_plan()).unwrap(); + } + } +} + +impl TestPlan { + pub async fn new(server: &mut TestServer, mut rng: StdRng) -> Arc> { + let allow_server_restarts = rng.gen_bool(0.7); + let allow_client_reconnection = rng.gen_bool(0.7); + let allow_client_disconnection = rng.gen_bool(0.1); + + let mut users = Vec::new(); + for ix in 0..max_peers() { + let username = format!("user-{}", ix + 1); + let user_id = server + .app_state + .db + .create_user( + &format!("{username}@example.com"), + false, + NewUserParams { + github_login: username.clone(), + github_user_id: ix as i32, + }, + ) + .await + .unwrap() + .user_id; + users.push(UserTestPlan { + user_id, + username, + online: false, + next_root_id: 0, + operation_ix: 0, + allow_client_disconnection, + allow_client_reconnection, + }); + } + + T::initialize(server, &users).await; + + let plan = Arc::new(Mutex::new(Self { + replay: false, + allow_server_restarts, + allow_client_reconnection, + allow_client_disconnection, + stored_operations: Vec::new(), + operation_ix: 0, + next_batch_id: 0, + max_operations: max_operations(), + users, + rng, + })); + + if let Some(path) = plan_load_path() { + let json = LOADED_PLAN_JSON + .lock() + .get_or_insert_with(|| { + eprintln!("loaded test plan from path {:?}", path); + std::fs::read(path).unwrap() + }) + .clone(); + plan.lock().deserialize(json); + } + + plan + } + + fn deserialize(&mut self, json: Vec) { + let stored_operations: Vec> = + serde_json::from_slice(&json).unwrap(); + self.replay = true; + self.stored_operations = stored_operations + .iter() + .cloned() + .enumerate() + .map(|(i, mut operation)| { + let did_apply = Arc::new(AtomicBool::new(false)); + if let StoredOperation::Server(ServerOperation::MutateClients { + batch_id: current_batch_id, + user_ids, + .. + }) = &mut operation + { + assert!(user_ids.is_empty()); + user_ids.extend(stored_operations[i + 1..].iter().filter_map(|operation| { + if let StoredOperation::Client { + user_id, batch_id, .. + } = operation + { + if batch_id == current_batch_id { + return Some(user_id); + } + } + None + })); + user_ids.sort_unstable(); + } + (operation, did_apply) + }) + .collect() + } + + fn serialize(&mut self) -> Vec { + // Format each operation as one line + let mut json = Vec::new(); + json.push(b'['); + for (operation, applied) in &self.stored_operations { + if !applied.load(SeqCst) { + continue; + } + if json.len() > 1 { + json.push(b','); + } + json.extend_from_slice(b"\n "); + serde_json::to_writer(&mut json, operation).unwrap(); + } + json.extend_from_slice(b"\n]\n"); + json + } + + fn next_server_operation( + &mut self, + clients: &[(Rc, TestAppContext)], + ) -> Option<(ServerOperation, Arc)> { + if self.replay { + while let Some(stored_operation) = self.stored_operations.get(self.operation_ix) { + self.operation_ix += 1; + if let (StoredOperation::Server(operation), applied) = stored_operation { + return Some((operation.clone(), applied.clone())); + } + } + None + } else { + let operation = self.generate_server_operation(clients)?; + let applied = Arc::new(AtomicBool::new(false)); + self.stored_operations + .push((StoredOperation::Server(operation.clone()), applied.clone())); + Some((operation, applied)) + } + } + + fn next_client_operation( + &mut self, + client: &TestClient, + current_batch_id: usize, + cx: &TestAppContext, + ) -> Option<(T::Operation, Arc)> { + let current_user_id = client.current_user_id(cx); + let user_ix = self + .users + .iter() + .position(|user| user.user_id == current_user_id) + .unwrap(); + let user_plan = &mut self.users[user_ix]; + + if self.replay { + while let Some(stored_operation) = self.stored_operations.get(user_plan.operation_ix) { + user_plan.operation_ix += 1; + if let ( + StoredOperation::Client { + user_id, operation, .. + }, + applied, + ) = stored_operation + { + if user_id == ¤t_user_id { + return Some((operation.clone(), applied.clone())); + } + } + } + None + } else { + if self.operation_ix == self.max_operations { + return None; + } + self.operation_ix += 1; + let operation = T::generate_operation( + client, + &mut self.rng, + self.users + .iter_mut() + .find(|user| user.user_id == current_user_id) + .unwrap(), + cx, + ); + let applied = Arc::new(AtomicBool::new(false)); + self.stored_operations.push(( + StoredOperation::Client { + user_id: current_user_id, + batch_id: current_batch_id, + operation: operation.clone(), + }, + applied.clone(), + )); + Some((operation, applied)) + } + } + + fn generate_server_operation( + &mut self, + clients: &[(Rc, TestAppContext)], + ) -> Option { + if self.operation_ix == self.max_operations { + return None; + } + + Some(loop { + break match self.rng.gen_range(0..100) { + 0..=29 if clients.len() < self.users.len() => { + let user = self + .users + .iter() + .filter(|u| !u.online) + .choose(&mut self.rng) + .unwrap(); + self.operation_ix += 1; + ServerOperation::AddConnection { + user_id: user.user_id, + } + } + 30..=34 if clients.len() > 1 && self.allow_client_disconnection => { + let (client, cx) = &clients[self.rng.gen_range(0..clients.len())]; + let user_id = client.current_user_id(cx); + self.operation_ix += 1; + ServerOperation::RemoveConnection { user_id } + } + 35..=39 if clients.len() > 1 && self.allow_client_reconnection => { + let (client, cx) = &clients[self.rng.gen_range(0..clients.len())]; + let user_id = client.current_user_id(cx); + self.operation_ix += 1; + ServerOperation::BounceConnection { user_id } + } + 40..=44 if self.allow_server_restarts && clients.len() > 1 => { + self.operation_ix += 1; + ServerOperation::RestartServer + } + _ if !clients.is_empty() => { + let count = self + .rng + .gen_range(1..10) + .min(self.max_operations - self.operation_ix); + let batch_id = util::post_inc(&mut self.next_batch_id); + let mut user_ids = (0..count) + .map(|_| { + let ix = self.rng.gen_range(0..clients.len()); + let (client, cx) = &clients[ix]; + client.current_user_id(cx) + }) + .collect::>(); + user_ids.sort_unstable(); + ServerOperation::MutateClients { + user_ids, + batch_id, + quiesce: self.rng.gen_bool(0.7), + } + } + _ => continue, + }; + }) + } + + #[allow(clippy::too_many_arguments)] + async fn apply_server_operation( + plan: Arc>, + deterministic: BackgroundExecutor, + server: &mut TestServer, + clients: &mut Vec<(Rc, TestAppContext)>, + client_tasks: &mut Vec>, + operation_channels: &mut Vec>, + operation: ServerOperation, + cx: &mut TestAppContext, + ) -> bool { + match operation { + ServerOperation::AddConnection { user_id } => { + let username; + { + let mut plan = plan.lock(); + let user = plan.user(user_id); + if user.online { + return false; + } + user.online = true; + username = user.username.clone(); + }; + log::info!("adding new connection for {}", username); + + let mut client_cx = cx.new_app(); + + let (operation_tx, operation_rx) = futures::channel::mpsc::unbounded(); + let client = Rc::new(server.create_client(&mut client_cx, &username).await); + operation_channels.push(operation_tx); + clients.push((client.clone(), client_cx.clone())); + + let foreground_executor = client_cx.foreground_executor().clone(); + let simulate_client = + Self::simulate_client(plan.clone(), client, operation_rx, client_cx); + client_tasks.push(foreground_executor.spawn(simulate_client)); + + log::info!("added connection for {}", username); + } + + ServerOperation::RemoveConnection { + user_id: removed_user_id, + } => { + log::info!("simulating full disconnection of user {}", removed_user_id); + let client_ix = clients + .iter() + .position(|(client, cx)| client.current_user_id(cx) == removed_user_id); + let Some(client_ix) = client_ix else { + return false; + }; + let user_connection_ids = server + .connection_pool + .lock() + .user_connection_ids(removed_user_id) + .collect::>(); + assert_eq!(user_connection_ids.len(), 1); + let removed_peer_id = user_connection_ids[0].into(); + let (client, client_cx) = clients.remove(client_ix); + let client_task = client_tasks.remove(client_ix); + operation_channels.remove(client_ix); + server.forbid_connections(); + server.disconnect_client(removed_peer_id); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + deterministic.start_waiting(); + log::info!("waiting for user {} to exit...", removed_user_id); + client_task.await; + deterministic.finish_waiting(); + server.allow_connections(); + + for project in client.dev_server_projects().iter() { + project.read_with(&client_cx, |project, _| { + assert!( + project.is_disconnected(), + "project {:?} should be read only", + project.remote_id() + ) + }); + } + + for (client, cx) in clients { + let contacts = server + .app_state + .db + .get_contacts(client.current_user_id(cx)) + .await + .unwrap(); + let pool = server.connection_pool.lock(); + for contact in contacts { + if let db::Contact::Accepted { user_id, busy, .. } = contact { + if user_id == removed_user_id { + assert!(!pool.is_user_online(user_id)); + assert!(!busy); + } + } + } + } + + log::info!("{} removed", client.username); + plan.lock().user(removed_user_id).online = false; + client_cx.update(|cx| { + cx.clear_globals(); + drop(client); + }); + } + + ServerOperation::BounceConnection { user_id } => { + log::info!("simulating temporary disconnection of user {}", user_id); + let user_connection_ids = server + .connection_pool + .lock() + .user_connection_ids(user_id) + .collect::>(); + if user_connection_ids.is_empty() { + return false; + } + assert_eq!(user_connection_ids.len(), 1); + let peer_id = user_connection_ids[0].into(); + server.disconnect_client(peer_id); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + } + + ServerOperation::RestartServer => { + log::info!("simulating server restart"); + server.reset().await; + deterministic.advance_clock(RECEIVE_TIMEOUT); + server.start().await.unwrap(); + deterministic.advance_clock(CLEANUP_TIMEOUT); + let environment = &server.app_state.config.zed_environment; + let (stale_room_ids, _) = server + .app_state + .db + .stale_server_resource_ids(environment, server.id()) + .await + .unwrap(); + assert_eq!(stale_room_ids, vec![]); + } + + ServerOperation::MutateClients { + user_ids, + batch_id, + quiesce, + } => { + let mut applied = false; + for user_id in user_ids { + let client_ix = clients + .iter() + .position(|(client, cx)| client.current_user_id(cx) == user_id); + let Some(client_ix) = client_ix else { continue }; + applied = true; + if let Err(err) = operation_channels[client_ix].unbounded_send(batch_id) { + log::error!("error signaling user {user_id}: {err}"); + } + } + + if quiesce && applied { + deterministic.run_until_parked(); + T::on_quiesce(server, clients).await; + } + + return applied; + } + } + true + } + + async fn simulate_client( + plan: Arc>, + client: Rc, + mut operation_rx: futures::channel::mpsc::UnboundedReceiver, + mut cx: TestAppContext, + ) { + T::on_client_added(&client, &mut cx).await; + + while let Some(batch_id) = operation_rx.next().await { + let Some((operation, applied)) = + plan.lock().next_client_operation(&client, batch_id, &cx) + else { + break; + }; + applied.store(true, SeqCst); + match T::apply_operation(&client, operation, &mut cx).await { + Ok(()) => {} + Err(TestError::Inapplicable) => { + applied.store(false, SeqCst); + log::info!("skipped operation"); + } + Err(TestError::Other(error)) => { + log::error!("{} error: {}", client.username, error); + } + } + cx.executor().simulate_random_delay().await; + } + log::info!("{}: done", client.username); + } + + fn user(&mut self, user_id: UserId) -> &mut UserTestPlan { + self.users + .iter_mut() + .find(|user| user.user_id == user_id) + .unwrap() + } +} + +impl UserTestPlan { + pub fn next_root_dir_name(&mut self) -> String { + let user_id = self.user_id; + let root_id = util::post_inc(&mut self.next_root_id); + format!("dir-{user_id}-{root_id}") + } +} + +impl From for TestError { + fn from(value: anyhow::Error) -> Self { + Self::Other(value) + } +} + +fn path_env_var(name: &str) -> Option { + let value = env::var(name).ok()?; + let mut path = PathBuf::from(value); + if path.is_relative() { + let mut abs_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + abs_path.pop(); + abs_path.pop(); + abs_path.push(path); + path = abs_path + } + Some(path) +} diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs new file mode 100644 index 0000000..fc663cb --- /dev/null +++ b/crates/collab/src/tests/test_server.rs @@ -0,0 +1,961 @@ +use crate::{ + auth::split_dev_server_token, + db::{tests::TestDb, NewUserParams, UserId}, + executor::Executor, + rpc::{Principal, Server, ZedVersion, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}, + AppState, Config, RateLimiter, +}; +use anyhow::anyhow; +use call::ActiveCall; +use channel::{ChannelBuffer, ChannelStore}; +use client::{ + self, proto::PeerId, ChannelId, Client, Connection, Credentials, EstablishConnectionError, + UserStore, +}; +use clock::FakeSystemClock; +use collab_ui::channel_view::ChannelView; +use collections::{HashMap, HashSet}; +use fs::FakeFs; +use futures::{channel::oneshot, StreamExt as _}; +use git::GitHostingProviderRegistry; +use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext}; +use http::FakeHttpClient; +use language::LanguageRegistry; +use node_runtime::FakeNodeRuntime; +use notifications::NotificationStore; +use parking_lot::Mutex; +use project::{Project, WorktreeId}; +use rpc::{ + proto::{self, ChannelRole}, + RECEIVE_TIMEOUT, +}; +use semantic_version::SemanticVersion; +use serde_json::json; +use settings::SettingsStore; +use std::{ + cell::{Ref, RefCell, RefMut}, + env, + ops::{Deref, DerefMut}, + path::Path, + sync::{ + atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, + Arc, + }, +}; +use workspace::{Workspace, WorkspaceId, WorkspaceStore}; + +pub struct TestServer { + pub app_state: Arc, + pub test_live_kit_server: Arc, + server: Arc, + next_github_user_id: i32, + connection_killers: Arc>>>, + forbid_connections: Arc, + _test_db: TestDb, +} + +pub struct TestClient { + pub username: String, + pub app_state: Arc, + channel_store: Model, + notification_store: Model, + state: RefCell, +} + +#[derive(Default)] +struct TestClientState { + local_projects: Vec>, + dev_server_projects: Vec>, + buffers: HashMap, HashSet>>, + channel_buffers: HashSet>, +} + +pub struct ContactsSummary { + pub current: Vec, + pub outgoing_requests: Vec, + pub incoming_requests: Vec, +} + +impl TestServer { + pub async fn start(deterministic: BackgroundExecutor) -> Self { + static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0); + + let use_postgres = env::var("USE_POSTGRES").ok(); + let use_postgres = use_postgres.as_deref(); + let test_db = if use_postgres == Some("true") || use_postgres == Some("1") { + TestDb::postgres(deterministic.clone()) + } else { + TestDb::sqlite(deterministic.clone()) + }; + let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst); + let live_kit_server = live_kit_client::TestServer::create( + format!("http://livekit.{}.test", live_kit_server_id), + format!("devkey-{}", live_kit_server_id), + format!("secret-{}", live_kit_server_id), + deterministic.clone(), + ) + .unwrap(); + let executor = Executor::Deterministic(deterministic.clone()); + let app_state = Self::build_app_state(&test_db, &live_kit_server, executor.clone()).await; + let epoch = app_state + .db + .create_server(&app_state.config.zed_environment) + .await + .unwrap(); + let server = Server::new(epoch, app_state.clone()); + server.start().await.unwrap(); + // Advance clock to ensure the server's cleanup task is finished. + deterministic.advance_clock(CLEANUP_TIMEOUT); + Self { + app_state, + server, + connection_killers: Default::default(), + forbid_connections: Default::default(), + next_github_user_id: 0, + _test_db: test_db, + test_live_kit_server: live_kit_server, + } + } + + pub async fn start2( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + ) -> (TestServer, TestClient, TestClient, ChannelId) { + let mut server = Self::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let channel_id = server + .make_channel( + "test-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + cx_a.run_until_parked(); + + (server, client_a, client_b, channel_id) + } + + pub async fn start1(cx: &mut TestAppContext) -> (TestServer, TestClient) { + let mut server = Self::start(cx.executor().clone()).await; + let client = server.create_client(cx, "user_a").await; + (server, client) + } + + pub async fn reset(&self) { + self.app_state.db.reset(); + let epoch = self + .app_state + .db + .create_server(&self.app_state.config.zed_environment) + .await + .unwrap(); + self.server.reset(epoch); + } + + pub async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient { + cx.update(|cx| { + if cx.has_global::() { + panic!("Same cx used to create two test clients") + } + let settings = SettingsStore::test(cx); + cx.set_global(settings); + release_channel::init("0.0.0", cx); + client::init_settings(cx); + }); + + let clock = Arc::new(FakeSystemClock::default()); + let http = FakeHttpClient::with_404_response(); + let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await + { + user.id + } else { + let github_user_id = self.next_github_user_id; + self.next_github_user_id += 1; + self.app_state + .db + .create_user( + &format!("{name}@example.com"), + false, + NewUserParams { + github_login: name.into(), + github_user_id, + }, + ) + .await + .expect("creating user failed") + .user_id + }; + let client_name = name.to_string(); + let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx)); + let server = self.server.clone(); + let db = self.app_state.db.clone(); + let connection_killers = self.connection_killers.clone(); + let forbid_connections = self.forbid_connections.clone(); + + Arc::get_mut(&mut client) + .unwrap() + .set_id(user_id.to_proto()) + .override_authenticate(move |cx| { + cx.spawn(|_| async move { + let access_token = "the-token".to_string(); + Ok(Credentials::User { + user_id: user_id.to_proto(), + access_token, + }) + }) + }) + .override_establish_connection(move |credentials, cx| { + assert_eq!( + credentials, + &Credentials::User { + user_id: user_id.0 as u64, + access_token: "the-token".into() + } + ); + + let server = server.clone(); + let db = db.clone(); + let connection_killers = connection_killers.clone(); + let forbid_connections = forbid_connections.clone(); + let client_name = client_name.clone(); + cx.spawn(move |cx| async move { + if forbid_connections.load(SeqCst) { + Err(EstablishConnectionError::other(anyhow!( + "server is forbidding connections" + ))) + } else { + let (client_conn, server_conn, killed) = + Connection::in_memory(cx.background_executor().clone()); + let (connection_id_tx, connection_id_rx) = oneshot::channel(); + let user = db + .get_user_by_id(user_id) + .await + .expect("retrieving user failed") + .unwrap(); + cx.background_executor() + .spawn(server.handle_connection( + server_conn, + client_name, + Principal::User(user), + ZedVersion(SemanticVersion::new(1, 0, 0)), + Some(connection_id_tx), + Executor::Deterministic(cx.background_executor().clone()), + )) + .detach(); + let connection_id = connection_id_rx.await.map_err(|e| { + EstablishConnectionError::Other(anyhow!( + "{} (is server shutting down?)", + e + )) + })?; + connection_killers + .lock() + .insert(connection_id.into(), killed); + Ok(client_conn) + } + }) + }); + + let git_hosting_provider_registry = + cx.update(|cx| GitHostingProviderRegistry::default_global(cx)); + git_hosting_provider_registry + .register_hosting_provider(Arc::new(git_hosting_providers::Github)); + + let fs = FakeFs::new(cx.executor()); + let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); + let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx)); + let language_registry = Arc::new(LanguageRegistry::test(cx.executor())); + let app_state = Arc::new(workspace::AppState { + client: client.clone(), + user_store: user_store.clone(), + workspace_store, + languages: language_registry, + fs: fs.clone(), + build_window_options: |_, _| Default::default(), + node_runtime: FakeNodeRuntime::new(), + }); + + let os_keymap = if cfg!(target_os = "linux") { + "keymaps/default-linux.json" + } else { + "keymaps/default-macos.json" + }; + + cx.update(|cx| { + theme::init(theme::LoadThemes::JustBase, cx); + Project::init(&client, cx); + client::init(&client, cx); + language::init(cx); + editor::init(cx); + workspace::init(app_state.clone(), cx); + call::init(client.clone(), user_store.clone(), cx); + channel::init(&client, user_store.clone(), cx); + notifications::init(client.clone(), user_store, cx); + collab_ui::init(&app_state, cx); + file_finder::init(cx); + menu::init(); + dev_server_projects::init(client.clone(), cx); + settings::KeymapFile::load_asset(os_keymap, cx).unwrap(); + }); + + client + .authenticate_and_connect(false, &cx.to_async()) + .await + .unwrap(); + + let client = TestClient { + app_state, + username: name.to_string(), + channel_store: cx.read(ChannelStore::global).clone(), + notification_store: cx.read(NotificationStore::global).clone(), + state: Default::default(), + }; + client.wait_for_current_user(cx).await; + client + } + + pub async fn create_dev_server( + &self, + access_token: String, + cx: &mut TestAppContext, + ) -> TestClient { + cx.update(|cx| { + if cx.has_global::() { + panic!("Same cx used to create two test clients") + } + let settings = SettingsStore::test(cx); + cx.set_global(settings); + release_channel::init("0.0.0", cx); + client::init_settings(cx); + }); + let (dev_server_id, _) = split_dev_server_token(&access_token).unwrap(); + + let clock = Arc::new(FakeSystemClock::default()); + let http = FakeHttpClient::with_404_response(); + let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx)); + let server = self.server.clone(); + let db = self.app_state.db.clone(); + let connection_killers = self.connection_killers.clone(); + let forbid_connections = self.forbid_connections.clone(); + Arc::get_mut(&mut client) + .unwrap() + .set_id(1) + .set_dev_server_token(client::DevServerToken(access_token.clone())) + .override_establish_connection(move |credentials, cx| { + assert_eq!( + credentials, + &Credentials::DevServer { + token: client::DevServerToken(access_token.to_string()) + } + ); + + let server = server.clone(); + let db = db.clone(); + let connection_killers = connection_killers.clone(); + let forbid_connections = forbid_connections.clone(); + cx.spawn(move |cx| async move { + if forbid_connections.load(SeqCst) { + Err(EstablishConnectionError::other(anyhow!( + "server is forbidding connections" + ))) + } else { + let (client_conn, server_conn, killed) = + Connection::in_memory(cx.background_executor().clone()); + let (connection_id_tx, connection_id_rx) = oneshot::channel(); + let dev_server = db + .get_dev_server(dev_server_id) + .await + .expect("retrieving dev_server failed"); + cx.background_executor() + .spawn(server.handle_connection( + server_conn, + "dev-server".to_string(), + Principal::DevServer(dev_server), + ZedVersion(SemanticVersion::new(1, 0, 0)), + Some(connection_id_tx), + Executor::Deterministic(cx.background_executor().clone()), + )) + .detach(); + let connection_id = connection_id_rx.await.map_err(|e| { + EstablishConnectionError::Other(anyhow!( + "{} (is server shutting down?)", + e + )) + })?; + connection_killers + .lock() + .insert(connection_id.into(), killed); + Ok(client_conn) + } + }) + }); + + let fs = FakeFs::new(cx.executor()); + let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); + let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx)); + let language_registry = Arc::new(LanguageRegistry::test(cx.executor())); + let app_state = Arc::new(workspace::AppState { + client: client.clone(), + user_store: user_store.clone(), + workspace_store, + languages: language_registry, + fs: fs.clone(), + build_window_options: |_, _| Default::default(), + node_runtime: FakeNodeRuntime::new(), + }); + + cx.update(|cx| { + theme::init(theme::LoadThemes::JustBase, cx); + Project::init(&client, cx); + client::init(&client, cx); + language::init(cx); + editor::init(cx); + workspace::init(app_state.clone(), cx); + call::init(client.clone(), user_store.clone(), cx); + channel::init(&client, user_store.clone(), cx); + notifications::init(client.clone(), user_store, cx); + collab_ui::init(&app_state, cx); + file_finder::init(cx); + menu::init(); + headless::init( + client.clone(), + headless::AppState { + languages: app_state.languages.clone(), + user_store: app_state.user_store.clone(), + fs: fs.clone(), + node_runtime: app_state.node_runtime.clone(), + }, + cx, + ) + }) + .await + .unwrap(); + + TestClient { + app_state, + username: "dev-server".to_string(), + channel_store: cx.read(ChannelStore::global).clone(), + notification_store: cx.read(NotificationStore::global).clone(), + state: Default::default(), + } + } + + pub fn disconnect_client(&self, peer_id: PeerId) { + self.connection_killers + .lock() + .remove(&peer_id) + .unwrap() + .store(true, SeqCst); + } + + pub fn simulate_long_connection_interruption( + &self, + peer_id: PeerId, + deterministic: BackgroundExecutor, + ) { + self.forbid_connections(); + self.disconnect_client(peer_id); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + self.allow_connections(); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + deterministic.run_until_parked(); + } + + pub fn forbid_connections(&self) { + self.forbid_connections.store(true, SeqCst); + } + + pub fn allow_connections(&self) { + self.forbid_connections.store(false, SeqCst); + } + + pub async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) { + for ix in 1..clients.len() { + let (left, right) = clients.split_at_mut(ix); + let (client_a, cx_a) = left.last_mut().unwrap(); + for (client_b, cx_b) in right { + client_a + .app_state + .user_store + .update(*cx_a, |store, cx| { + store.request_contact(client_b.user_id().unwrap(), cx) + }) + .await + .unwrap(); + cx_a.executor().run_until_parked(); + client_b + .app_state + .user_store + .update(*cx_b, |store, cx| { + store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx) + }) + .await + .unwrap(); + } + } + } + + pub async fn make_channel( + &self, + channel: &str, + parent: Option, + admin: (&TestClient, &mut TestAppContext), + members: &mut [(&TestClient, &mut TestAppContext)], + ) -> ChannelId { + let (_, admin_cx) = admin; + let channel_id = admin_cx + .read(ChannelStore::global) + .update(admin_cx, |channel_store, cx| { + channel_store.create_channel(channel, parent, cx) + }) + .await + .unwrap(); + + for (member_client, member_cx) in members { + admin_cx + .read(ChannelStore::global) + .update(admin_cx, |channel_store, cx| { + channel_store.invite_member( + channel_id, + member_client.user_id().unwrap(), + ChannelRole::Member, + cx, + ) + }) + .await + .unwrap(); + + admin_cx.executor().run_until_parked(); + + member_cx + .read(ChannelStore::global) + .update(*member_cx, |channels, cx| { + channels.respond_to_channel_invite(channel_id, true, cx) + }) + .await + .unwrap(); + } + + channel_id + } + + pub async fn make_public_channel( + &self, + channel: &str, + client: &TestClient, + cx: &mut TestAppContext, + ) -> ChannelId { + let channel_id = self + .make_channel(channel, None, (client, cx), &mut []) + .await; + + client + .channel_store() + .update(cx, |channel_store, cx| { + channel_store.set_channel_visibility( + channel_id, + proto::ChannelVisibility::Public, + cx, + ) + }) + .await + .unwrap(); + + channel_id + } + + pub async fn make_channel_tree( + &self, + channels: &[(&str, Option<&str>)], + creator: (&TestClient, &mut TestAppContext), + ) -> Vec { + let mut observed_channels = HashMap::default(); + let mut result = Vec::new(); + for (channel, parent) in channels { + let id; + if let Some(parent) = parent { + if let Some(parent_id) = observed_channels.get(parent) { + id = self + .make_channel(channel, Some(*parent_id), (creator.0, creator.1), &mut []) + .await; + } else { + panic!( + "Edge {}->{} referenced before {} was created", + parent, channel, parent + ) + } + } else { + id = self + .make_channel(channel, None, (creator.0, creator.1), &mut []) + .await; + } + + observed_channels.insert(channel, id); + result.push(id); + } + + result + } + + pub async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) { + self.make_contacts(clients).await; + + let (left, right) = clients.split_at_mut(1); + let (_client_a, cx_a) = &mut left[0]; + let active_call_a = cx_a.read(ActiveCall::global); + + for (client_b, cx_b) in right { + let user_id_b = client_b.current_user_id(cx_b).to_proto(); + active_call_a + .update(*cx_a, |call, cx| call.invite(user_id_b, None, cx)) + .await + .unwrap(); + + cx_b.executor().run_until_parked(); + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(*cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + } + } + + pub async fn build_app_state( + test_db: &TestDb, + live_kit_test_server: &live_kit_client::TestServer, + executor: Executor, + ) -> Arc { + Arc::new(AppState { + db: test_db.db().clone(), + live_kit_client: Some(Arc::new(live_kit_test_server.create_api_client())), + blob_store_client: None, + rate_limiter: Arc::new(RateLimiter::new(test_db.db().clone())), + executor, + clickhouse_client: None, + config: Config { + http_port: 0, + database_url: "".into(), + database_max_connections: 0, + api_token: "".into(), + invite_link_prefix: "".into(), + live_kit_server: None, + live_kit_key: None, + live_kit_secret: None, + rust_log: None, + log_json: None, + zed_environment: "test".into(), + blob_store_url: None, + blob_store_region: None, + blob_store_access_key: None, + blob_store_secret_key: None, + blob_store_bucket: None, + openai_api_key: None, + google_ai_api_key: None, + anthropic_api_key: None, + clickhouse_url: None, + clickhouse_user: None, + clickhouse_password: None, + clickhouse_database: None, + zed_client_checksum_seed: None, + slack_panics_webhook: None, + auto_join_channel_id: None, + migrations_path: None, + seed_path: None, + supermaven_admin_api_key: None, + }, + }) + } +} + +impl Deref for TestServer { + type Target = Server; + + fn deref(&self) -> &Self::Target { + &self.server + } +} + +impl Drop for TestServer { + fn drop(&mut self) { + self.server.teardown(); + self.test_live_kit_server.teardown().unwrap(); + } +} + +impl Deref for TestClient { + type Target = Arc; + + fn deref(&self) -> &Self::Target { + &self.app_state.client + } +} + +impl TestClient { + pub fn fs(&self) -> &FakeFs { + self.app_state.fs.as_fake() + } + + pub fn channel_store(&self) -> &Model { + &self.channel_store + } + + pub fn notification_store(&self) -> &Model { + &self.notification_store + } + + pub fn user_store(&self) -> &Model { + &self.app_state.user_store + } + + pub fn language_registry(&self) -> &Arc { + &self.app_state.languages + } + + pub fn client(&self) -> &Arc { + &self.app_state.client + } + + pub fn current_user_id(&self, cx: &TestAppContext) -> UserId { + UserId::from_proto( + self.app_state + .user_store + .read_with(cx, |user_store, _| user_store.current_user().unwrap().id), + ) + } + + pub async fn wait_for_current_user(&self, cx: &TestAppContext) { + let mut authed_user = self + .app_state + .user_store + .read_with(cx, |user_store, _| user_store.watch_current_user()); + while authed_user.next().await.unwrap().is_none() {} + } + + pub async fn clear_contacts(&self, cx: &mut TestAppContext) { + self.app_state + .user_store + .update(cx, |store, _| store.clear_contacts()) + .await; + } + + pub fn local_projects(&self) -> impl Deref>> + '_ { + Ref::map(self.state.borrow(), |state| &state.local_projects) + } + + pub fn dev_server_projects(&self) -> impl Deref>> + '_ { + Ref::map(self.state.borrow(), |state| &state.dev_server_projects) + } + + pub fn local_projects_mut(&self) -> impl DerefMut>> + '_ { + RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects) + } + + pub fn dev_server_projects_mut(&self) -> impl DerefMut>> + '_ { + RefMut::map(self.state.borrow_mut(), |state| { + &mut state.dev_server_projects + }) + } + + pub fn buffers_for_project<'a>( + &'a self, + project: &Model, + ) -> impl DerefMut>> + 'a { + RefMut::map(self.state.borrow_mut(), |state| { + state.buffers.entry(project.clone()).or_default() + }) + } + + pub fn buffers( + &self, + ) -> impl DerefMut, HashSet>>> + '_ + { + RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers) + } + + pub fn channel_buffers(&self) -> impl DerefMut>> + '_ { + RefMut::map(self.state.borrow_mut(), |state| &mut state.channel_buffers) + } + + pub fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary { + self.app_state + .user_store + .read_with(cx, |store, _| ContactsSummary { + current: store + .contacts() + .iter() + .map(|contact| contact.user.github_login.clone()) + .collect(), + outgoing_requests: store + .outgoing_contact_requests() + .iter() + .map(|user| user.github_login.clone()) + .collect(), + incoming_requests: store + .incoming_contact_requests() + .iter() + .map(|user| user.github_login.clone()) + .collect(), + }) + } + + pub async fn build_local_project( + &self, + root_path: impl AsRef, + cx: &mut TestAppContext, + ) -> (Model, WorktreeId) { + let project = self.build_empty_local_project(cx); + let (worktree, _) = project + .update(cx, |p, cx| { + p.find_or_create_local_worktree(root_path, true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + (project, worktree.read_with(cx, |tree, _| tree.id())) + } + + pub async fn build_test_project(&self, cx: &mut TestAppContext) -> Model { + self.fs() + .insert_tree( + "/a", + json!({ + "1.txt": "one\none\none", + "2.js": "function two() { return 2; }", + "3.rs": "mod test", + }), + ) + .await; + self.build_local_project("/a", cx).await.0 + } + + pub async fn host_workspace( + &self, + workspace: &View, + channel_id: ChannelId, + cx: &mut VisualTestContext, + ) { + cx.update(|cx| { + let active_call = ActiveCall::global(cx); + active_call.update(cx, |call, cx| call.join_channel(channel_id, cx)) + }) + .await + .unwrap(); + cx.update(|cx| { + let active_call = ActiveCall::global(cx); + let project = workspace.read(cx).project().clone(); + active_call.update(cx, |call, cx| call.share_project(project, cx)) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + } + + pub async fn join_workspace<'a>( + &'a self, + channel_id: ChannelId, + cx: &'a mut TestAppContext, + ) -> (View, &'a mut VisualTestContext) { + cx.update(|cx| workspace::join_channel(channel_id, self.app_state.clone(), None, cx)) + .await + .unwrap(); + cx.run_until_parked(); + + self.active_workspace(cx) + } + + pub fn build_empty_local_project(&self, cx: &mut TestAppContext) -> Model { + cx.update(|cx| { + Project::local( + self.client().clone(), + self.app_state.node_runtime.clone(), + self.app_state.user_store.clone(), + self.app_state.languages.clone(), + self.app_state.fs.clone(), + cx, + ) + }) + } + + pub async fn build_dev_server_project( + &self, + host_project_id: u64, + guest_cx: &mut TestAppContext, + ) -> Model { + let active_call = guest_cx.read(ActiveCall::global); + let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone()); + room.update(guest_cx, |room, cx| { + room.join_project( + host_project_id, + self.app_state.languages.clone(), + self.app_state.fs.clone(), + cx, + ) + }) + .await + .unwrap() + } + + pub fn build_workspace<'a>( + &'a self, + project: &Model, + cx: &'a mut TestAppContext, + ) -> (View, &'a mut VisualTestContext) { + cx.add_window_view(|cx| { + cx.activate_window(); + Workspace::new( + WorkspaceId::default(), + project.clone(), + self.app_state.clone(), + cx, + ) + }) + } + + pub async fn build_test_workspace<'a>( + &'a self, + cx: &'a mut TestAppContext, + ) -> (View, &'a mut VisualTestContext) { + let project = self.build_test_project(cx).await; + cx.add_window_view(|cx| { + cx.activate_window(); + Workspace::new( + WorkspaceId::default(), + project.clone(), + self.app_state.clone(), + cx, + ) + }) + } + + pub fn active_workspace<'a>( + &'a self, + cx: &'a mut TestAppContext, + ) -> (View, &'a mut VisualTestContext) { + let window = cx.update(|cx| cx.active_window().unwrap().downcast::().unwrap()); + + let view = window.root_view(cx).unwrap(); + let cx = VisualTestContext::from_window(*window.deref(), cx).as_mut(); + // it might be nice to try and cleanup these at the end of each test. + (view, cx) + } +} + +pub fn open_channel_notes( + channel_id: ChannelId, + cx: &mut VisualTestContext, +) -> Task>> { + let window = cx.update(|cx| cx.active_window().unwrap().downcast::().unwrap()); + let view = window.root_view(cx).unwrap(); + + cx.update(|cx| ChannelView::open(channel_id, None, view.clone(), cx)) +} + +impl Drop for TestClient { + fn drop(&mut self) { + self.app_state.client.teardown(); + } +} diff --git a/crates/collab_ui/Cargo.toml b/crates/collab_ui/Cargo.toml new file mode 100644 index 0000000..01da2ac --- /dev/null +++ b/crates/collab_ui/Cargo.toml @@ -0,0 +1,88 @@ +[package] +name = "collab_ui" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/collab_ui.rs" +doctest = false + +[features] +default = [] +stories = ["dep:story"] +test-support = [ + "call/test-support", + "client/test-support", + "collections/test-support", + "editor/test-support", + "gpui/test-support", + "project/test-support", + "settings/test-support", + "util/test-support", + "workspace/test-support", + "http/test-support", +] + +[dependencies] +anyhow.workspace = true +auto_update.workspace = true +call.workspace = true +channel.workspace = true +client.workspace = true +collections.workspace = true +db.workspace = true +editor.workspace = true +emojis.workspace = true +extensions_ui.workspace = true +futures.workspace = true +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +lazy_static.workspace = true +menu.workspace = true +notifications.workspace = true +parking_lot.workspace = true +picker.workspace = true +project.workspace = true +recent_projects.workspace = true +dev_server_projects.workspace = true +release_channel.workspace = true +rich_text.workspace = true +rpc.workspace = true +schemars.workspace = true +serde.workspace = true +serde_derive.workspace = true +serde_json.workspace = true +settings.workspace = true +smallvec.workspace = true +story = { workspace = true, optional = true } +theme.workspace = true +theme_selector.workspace = true +time_format.workspace = true +time.workspace = true +ui.workspace = true +util.workspace = true +vcs_menu.workspace = true +workspace.workspace = true +zed_actions.workspace = true + +[dev-dependencies] +call = { workspace = true, features = ["test-support"] } +client = { workspace = true, features = ["test-support"] } +collections = { workspace = true, features = ["test-support"] } +editor = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +notifications = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true +project = { workspace = true, features = ["test-support"] } +rpc = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +tree-sitter-markdown.workspace = true +util = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/collab_ui/LICENSE-GPL b/crates/collab_ui/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/collab_ui/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs new file mode 100644 index 0000000..59099dd --- /dev/null +++ b/crates/collab_ui/src/channel_view.rs @@ -0,0 +1,586 @@ +use anyhow::Result; +use call::report_call_event_for_channel; +use channel::{Channel, ChannelBuffer, ChannelBufferEvent, ChannelStore}; +use client::{ + proto::{self, PeerId}, + ChannelId, Collaborator, ParticipantIndex, +}; +use collections::HashMap; +use editor::{ + display_map::ToDisplayPoint, scroll::Autoscroll, CollaborationHub, DisplayPoint, Editor, + EditorEvent, +}; +use gpui::{ + actions, AnyElement, AnyView, AppContext, ClipboardItem, Entity as _, EventEmitter, + FocusableView, IntoElement as _, Model, Pixels, Point, Render, Subscription, Task, View, + ViewContext, VisualContext as _, WeakView, WindowContext, +}; +use project::Project; +use std::{ + any::{Any, TypeId}, + sync::Arc, +}; +use ui::{prelude::*, Label}; +use util::ResultExt; +use workspace::notifications::NotificationId; +use workspace::{ + item::{FollowableItem, Item, ItemEvent, ItemHandle, TabContentParams}, + register_followable_item, + searchable::SearchableItemHandle, + ItemNavHistory, Pane, SaveIntent, Toast, ViewId, Workspace, WorkspaceId, +}; + +actions!(collab, [CopyLink]); + +pub fn init(cx: &mut AppContext) { + register_followable_item::(cx) +} + +pub struct ChannelView { + pub editor: View, + workspace: WeakView, + project: Model, + channel_store: Model, + channel_buffer: Model, + remote_id: Option, + _editor_event_subscription: Subscription, + _reparse_subscription: Option, +} + +impl ChannelView { + pub fn open( + channel_id: ChannelId, + link_position: Option, + workspace: View, + cx: &mut WindowContext, + ) -> Task>> { + let pane = workspace.read(cx).active_pane().clone(); + let channel_view = Self::open_in_pane( + channel_id, + link_position, + pane.clone(), + workspace.clone(), + cx, + ); + cx.spawn(|mut cx| async move { + let channel_view = channel_view.await?; + pane.update(&mut cx, |pane, cx| { + report_call_event_for_channel( + "open channel notes", + channel_id, + &workspace.read(cx).app_state().client, + cx, + ); + pane.add_item(Box::new(channel_view.clone()), true, true, None, cx); + })?; + anyhow::Ok(channel_view) + }) + } + + pub fn open_in_pane( + channel_id: ChannelId, + link_position: Option, + pane: View, + workspace: View, + cx: &mut WindowContext, + ) -> Task>> { + let weak_workspace = workspace.downgrade(); + let workspace = workspace.read(cx); + let project = workspace.project().to_owned(); + let channel_store = ChannelStore::global(cx); + let language_registry = workspace.app_state().languages.clone(); + let markdown = language_registry.language_for_name("Markdown"); + let channel_buffer = + channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx)); + + cx.spawn(|mut cx| async move { + let channel_buffer = channel_buffer.await?; + let markdown = markdown.await.log_err(); + + channel_buffer.update(&mut cx, |channel_buffer, cx| { + channel_buffer.buffer().update(cx, |buffer, cx| { + buffer.set_language_registry(language_registry); + let Some(markdown) = markdown else { + return; + }; + buffer.set_language(Some(markdown), cx); + }) + })?; + + pane.update(&mut cx, |pane, cx| { + let buffer_id = channel_buffer.read(cx).remote_id(cx); + + let existing_view = pane + .items_of_type::() + .find(|view| view.read(cx).channel_buffer.read(cx).remote_id(cx) == buffer_id); + + // If this channel buffer is already open in this pane, just return it. + if let Some(existing_view) = existing_view.clone() { + if existing_view.read(cx).channel_buffer == channel_buffer { + if let Some(link_position) = link_position { + existing_view.update(cx, |channel_view, cx| { + channel_view.focus_position_from_link(link_position, true, cx) + }); + } + return existing_view; + } + } + + let view = cx.new_view(|cx| { + let mut this = + Self::new(project, weak_workspace, channel_store, channel_buffer, cx); + this.acknowledge_buffer_version(cx); + this + }); + + // If the pane contained a disconnected view for this channel buffer, + // replace that. + if let Some(existing_item) = existing_view { + if let Some(ix) = pane.index_for_item(&existing_item) { + pane.close_item_by_id(existing_item.entity_id(), SaveIntent::Skip, cx) + .detach(); + pane.add_item(Box::new(view.clone()), true, true, Some(ix), cx); + } + } + + if let Some(link_position) = link_position { + view.update(cx, |channel_view, cx| { + channel_view.focus_position_from_link(link_position, true, cx) + }); + } + + view + }) + }) + } + + pub fn new( + project: Model, + workspace: WeakView, + channel_store: Model, + channel_buffer: Model, + cx: &mut ViewContext, + ) -> Self { + let buffer = channel_buffer.read(cx).buffer(); + let this = cx.view().downgrade(); + let editor = cx.new_view(|cx| { + let mut editor = Editor::for_buffer(buffer, None, cx); + editor.set_collaboration_hub(Box::new(ChannelBufferCollaborationHub( + channel_buffer.clone(), + ))); + editor.set_custom_context_menu(move |_, position, cx| { + let this = this.clone(); + Some(ui::ContextMenu::build(cx, move |menu, _| { + menu.entry("Copy link to section", None, move |cx| { + this.update(cx, |this, cx| this.copy_link_for_position(position, cx)) + .ok(); + }) + })) + }); + editor + }); + let _editor_event_subscription = + cx.subscribe(&editor, |_, _, e: &EditorEvent, cx| cx.emit(e.clone())); + + cx.subscribe(&channel_buffer, Self::handle_channel_buffer_event) + .detach(); + + Self { + editor, + workspace, + project, + channel_store, + channel_buffer, + remote_id: None, + _editor_event_subscription, + _reparse_subscription: None, + } + } + + fn focus_position_from_link( + &mut self, + position: String, + first_attempt: bool, + cx: &mut ViewContext, + ) { + let position = Channel::slug(&position).to_lowercase(); + let snapshot = self.editor.update(cx, |editor, cx| editor.snapshot(cx)); + + if let Some(outline) = snapshot.buffer_snapshot.outline(None) { + if let Some(item) = outline + .items + .iter() + .find(|item| &Channel::slug(&item.text).to_lowercase() == &position) + { + self.editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::focused()), cx, |s| { + s.replace_cursors_with(|map| vec![item.range.start.to_display_point(&map)]) + }) + }); + return; + } + } + + if !first_attempt { + return; + } + self._reparse_subscription = Some(cx.subscribe( + &self.editor, + move |this, _, e: &EditorEvent, cx| { + match e { + EditorEvent::Reparsed => { + this.focus_position_from_link(position.clone(), false, cx); + this._reparse_subscription.take(); + } + EditorEvent::Edited | EditorEvent::SelectionsChanged { local: true } => { + this._reparse_subscription.take(); + } + _ => {} + }; + }, + )); + } + + fn copy_link(&mut self, _: &CopyLink, cx: &mut ViewContext) { + let position = self + .editor + .update(cx, |editor, cx| editor.selections.newest_display(cx).start); + self.copy_link_for_position(position, cx) + } + + fn copy_link_for_position(&self, position: DisplayPoint, cx: &mut ViewContext) { + let snapshot = self.editor.update(cx, |editor, cx| editor.snapshot(cx)); + + let mut closest_heading = None; + + if let Some(outline) = snapshot.buffer_snapshot.outline(None) { + for item in outline.items { + if item.range.start.to_display_point(&snapshot) > position { + break; + } + closest_heading = Some(item); + } + } + + let Some(channel) = self.channel(cx) else { + return; + }; + + let link = channel.notes_link(closest_heading.map(|heading| heading.text), cx); + cx.write_to_clipboard(ClipboardItem::new(link)); + self.workspace + .update(cx, |workspace, cx| { + struct CopyLinkForPositionToast; + + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + "Link copied to clipboard", + ), + cx, + ); + }) + .ok(); + } + + pub fn channel(&self, cx: &AppContext) -> Option> { + self.channel_buffer.read(cx).channel(cx) + } + + fn handle_channel_buffer_event( + &mut self, + _: Model, + event: &ChannelBufferEvent, + cx: &mut ViewContext, + ) { + match event { + ChannelBufferEvent::Disconnected => self.editor.update(cx, |editor, cx| { + editor.set_read_only(true); + cx.notify(); + }), + ChannelBufferEvent::ChannelChanged => { + self.editor.update(cx, |_, cx| { + cx.emit(editor::EditorEvent::TitleChanged); + cx.notify() + }); + } + ChannelBufferEvent::BufferEdited => { + if self.editor.read(cx).is_focused(cx) { + self.acknowledge_buffer_version(cx); + } else { + self.channel_store.update(cx, |store, cx| { + let channel_buffer = self.channel_buffer.read(cx); + store.update_latest_notes_version( + channel_buffer.channel_id, + channel_buffer.epoch(), + &channel_buffer.buffer().read(cx).version(), + cx, + ) + }); + } + } + ChannelBufferEvent::CollaboratorsChanged => {} + } + } + + fn acknowledge_buffer_version(&mut self, cx: &mut ViewContext) { + self.channel_store.update(cx, |store, cx| { + let channel_buffer = self.channel_buffer.read(cx); + store.acknowledge_notes_version( + channel_buffer.channel_id, + channel_buffer.epoch(), + &channel_buffer.buffer().read(cx).version(), + cx, + ) + }); + self.channel_buffer.update(cx, |buffer, cx| { + buffer.acknowledge_buffer_version(cx); + }); + } +} + +impl EventEmitter for ChannelView {} + +impl Render for ChannelView { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .size_full() + .on_action(cx.listener(Self::copy_link)) + .child(self.editor.clone()) + } +} + +impl FocusableView for ChannelView { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.editor.read(cx).focus_handle(cx) + } +} + +impl Item for ChannelView { + type Event = EditorEvent; + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a View, + _: &'a AppContext, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } + + fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { + let label = if let Some(channel) = self.channel(cx) { + match ( + self.channel_buffer.read(cx).buffer().read(cx).read_only(), + self.channel_buffer.read(cx).is_connected(), + ) { + (false, true) => format!("#{}", channel.name), + (true, true) => format!("#{} (read-only)", channel.name), + (_, false) => format!("#{} (disconnected)", channel.name), + } + } else { + "channel notes (disconnected)".to_string() + }; + Label::new(label) + .color(if params.selected { + Color::Default + } else { + Color::Muted + }) + .into_any_element() + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + None + } + + fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext) -> Option> { + Some(cx.new_view(|cx| { + Self::new( + self.project.clone(), + self.workspace.clone(), + self.channel_store.clone(), + self.channel_buffer.clone(), + cx, + ) + })) + } + + fn is_singleton(&self, _cx: &AppContext) -> bool { + false + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, cx)) + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| Item::deactivated(editor, cx)) + } + + fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| Item::set_nav_history(editor, history, cx)) + } + + fn as_searchable(&self, _: &View) -> Option> { + Some(Box::new(self.editor.clone())) + } + + fn show_toolbar(&self) -> bool { + true + } + + fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option> { + self.editor.read(cx).pixel_position_of_cursor(cx) + } + + fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { + Editor::to_item_events(event, f) + } +} + +impl FollowableItem for ChannelView { + fn remote_id(&self) -> Option { + self.remote_id + } + + fn to_state_proto(&self, cx: &WindowContext) -> Option { + let channel_buffer = self.channel_buffer.read(cx); + if !channel_buffer.is_connected() { + return None; + } + + Some(proto::view::Variant::ChannelView( + proto::view::ChannelView { + channel_id: channel_buffer.channel_id.0, + editor: if let Some(proto::view::Variant::Editor(proto)) = + self.editor.read(cx).to_state_proto(cx) + { + Some(proto) + } else { + None + }, + }, + )) + } + + fn from_state_proto( + pane: View, + workspace: View, + remote_id: workspace::ViewId, + state: &mut Option, + cx: &mut WindowContext, + ) -> Option>>> { + let Some(proto::view::Variant::ChannelView(_)) = state else { + return None; + }; + let Some(proto::view::Variant::ChannelView(state)) = state.take() else { + unreachable!() + }; + + let open = + ChannelView::open_in_pane(ChannelId(state.channel_id), None, pane, workspace, cx); + + Some(cx.spawn(|mut cx| async move { + let this = open.await?; + + let task = this.update(&mut cx, |this, cx| { + this.remote_id = Some(remote_id); + + if let Some(state) = state.editor { + Some(this.editor.update(cx, |editor, cx| { + editor.apply_update_proto( + &this.project, + proto::update_view::Variant::Editor(proto::update_view::Editor { + selections: state.selections, + pending_selection: state.pending_selection, + scroll_top_anchor: state.scroll_top_anchor, + scroll_x: state.scroll_x, + scroll_y: state.scroll_y, + ..Default::default() + }), + cx, + ) + })) + } else { + None + } + })?; + + if let Some(task) = task { + task.await?; + } + + Ok(this) + })) + } + + fn add_event_to_update_proto( + &self, + event: &EditorEvent, + update: &mut Option, + cx: &WindowContext, + ) -> bool { + self.editor + .read(cx) + .add_event_to_update_proto(event, update, cx) + } + + fn apply_update_proto( + &mut self, + project: &Model, + message: proto::update_view::Variant, + cx: &mut ViewContext, + ) -> gpui::Task> { + self.editor.update(cx, |editor, cx| { + editor.apply_update_proto(project, message, cx) + }) + } + + fn set_leader_peer_id(&mut self, leader_peer_id: Option, cx: &mut ViewContext) { + self.editor.update(cx, |editor, cx| { + editor.set_leader_peer_id(leader_peer_id, cx) + }) + } + + fn is_project_item(&self, _cx: &WindowContext) -> bool { + false + } + + fn to_follow_event(event: &Self::Event) -> Option { + Editor::to_follow_event(event) + } +} + +struct ChannelBufferCollaborationHub(Model); + +impl CollaborationHub for ChannelBufferCollaborationHub { + fn collaborators<'a>(&self, cx: &'a AppContext) -> &'a HashMap { + self.0.read(cx).collaborators() + } + + fn user_participant_indices<'a>( + &self, + cx: &'a AppContext, + ) -> &'a HashMap { + self.0.read(cx).user_store().read(cx).participant_indices() + } + + fn user_names(&self, cx: &AppContext) -> HashMap { + let user_ids = self.collaborators(cx).values().map(|c| c.user_id); + self.0 + .read(cx) + .user_store() + .read(cx) + .participant_names(user_ids, cx) + } +} diff --git a/crates/collab_ui/src/chat_panel.rs b/crates/collab_ui/src/chat_panel.rs new file mode 100644 index 0000000..2b05b93 --- /dev/null +++ b/crates/collab_ui/src/chat_panel.rs @@ -0,0 +1,1344 @@ +use crate::{collab_panel, ChatPanelSettings}; +use anyhow::Result; +use call::{room, ActiveCall}; +use channel::{ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId, ChannelStore}; +use client::{ChannelId, Client}; +use collections::HashMap; +use db::kvp::KEY_VALUE_STORE; +use editor::{actions, Editor}; +use gpui::{ + actions, div, list, prelude::*, px, Action, AppContext, AsyncWindowContext, ClipboardItem, + CursorStyle, DismissEvent, ElementId, EventEmitter, FocusHandle, FocusableView, FontWeight, + HighlightStyle, ListOffset, ListScrollEvent, ListState, Model, Render, Stateful, Subscription, + Task, View, ViewContext, VisualContext, WeakView, +}; +use language::LanguageRegistry; +use menu::Confirm; +use message_editor::MessageEditor; +use project::Fs; +use rich_text::{Highlight, RichText}; +use serde::{Deserialize, Serialize}; +use settings::Settings; +use std::{sync::Arc, time::Duration}; +use time::{OffsetDateTime, UtcOffset}; +use ui::{ + popover_menu, prelude::*, Avatar, Button, ContextMenu, IconButton, IconName, KeyBinding, Label, + TabBar, Tooltip, +}; +use util::{ResultExt, TryFutureExt}; +use workspace::{ + dock::{DockPosition, Panel, PanelEvent}, + Workspace, +}; + +mod message_editor; + +const MESSAGE_LOADING_THRESHOLD: usize = 50; +const CHAT_PANEL_KEY: &str = "ChatPanel"; + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(|workspace: &mut Workspace, _| { + workspace.register_action(|workspace, _: &ToggleFocus, cx| { + workspace.toggle_panel_focus::(cx); + }); + }) + .detach(); +} + +pub struct ChatPanel { + client: Arc, + channel_store: Model, + languages: Arc, + message_list: ListState, + active_chat: Option<(Model, Subscription)>, + message_editor: View, + local_timezone: UtcOffset, + fs: Arc, + width: Option, + active: bool, + pending_serialization: Task>, + subscriptions: Vec, + is_scrolled_to_bottom: bool, + markdown_data: HashMap, + focus_handle: FocusHandle, + open_context_menu: Option<(u64, Subscription)>, + highlighted_message: Option<(u64, Task<()>)>, + last_acknowledged_message_id: Option, +} + +#[derive(Serialize, Deserialize)] +struct SerializedChatPanel { + width: Option, +} + +actions!(chat_panel, [ToggleFocus]); + +impl ChatPanel { + pub fn new(workspace: &mut Workspace, cx: &mut ViewContext) -> View { + let fs = workspace.app_state().fs.clone(); + let client = workspace.app_state().client.clone(); + let channel_store = ChannelStore::global(cx); + let user_store = workspace.app_state().user_store.clone(); + let languages = workspace.app_state().languages.clone(); + + let input_editor = cx.new_view(|cx| { + MessageEditor::new( + languages.clone(), + user_store.clone(), + None, + cx.new_view(|cx| Editor::auto_height(4, cx)), + cx, + ) + }); + + cx.new_view(|cx: &mut ViewContext| { + let view = cx.view().downgrade(); + let message_list = + ListState::new(0, gpui::ListAlignment::Bottom, px(1000.), move |ix, cx| { + if let Some(view) = view.upgrade() { + view.update(cx, |view, cx| { + view.render_message(ix, cx).into_any_element() + }) + } else { + div().into_any() + } + }); + + message_list.set_scroll_handler(cx.listener(|this, event: &ListScrollEvent, cx| { + if event.visible_range.start < MESSAGE_LOADING_THRESHOLD { + this.load_more_messages(cx); + } + this.is_scrolled_to_bottom = !event.is_scrolled; + })); + + let mut this = Self { + fs, + client, + channel_store, + languages, + message_list, + active_chat: Default::default(), + pending_serialization: Task::ready(None), + message_editor: input_editor, + local_timezone: cx.local_timezone(), + subscriptions: Vec::new(), + is_scrolled_to_bottom: true, + active: false, + width: None, + markdown_data: Default::default(), + focus_handle: cx.focus_handle(), + open_context_menu: None, + highlighted_message: None, + last_acknowledged_message_id: None, + }; + + if let Some(channel_id) = ActiveCall::global(cx) + .read(cx) + .room() + .and_then(|room| room.read(cx).channel_id()) + { + this.select_channel(channel_id, None, cx) + .detach_and_log_err(cx); + } + + this.subscriptions.push(cx.subscribe( + &ActiveCall::global(cx), + move |this: &mut Self, call, event: &room::Event, cx| match event { + room::Event::RoomJoined { channel_id } => { + if let Some(channel_id) = channel_id { + this.select_channel(*channel_id, None, cx) + .detach_and_log_err(cx); + + if call + .read(cx) + .room() + .is_some_and(|room| room.read(cx).contains_guests()) + { + cx.emit(PanelEvent::Activate) + } + } + } + room::Event::RoomLeft { channel_id } => { + if channel_id == &this.channel_id(cx) { + cx.emit(PanelEvent::Close) + } + } + _ => {} + }, + )); + + this + }) + } + + pub fn channel_id(&self, cx: &AppContext) -> Option { + self.active_chat + .as_ref() + .map(|(chat, _)| chat.read(cx).channel_id) + } + + pub fn is_scrolled_to_bottom(&self) -> bool { + self.is_scrolled_to_bottom + } + + pub fn active_chat(&self) -> Option> { + self.active_chat.as_ref().map(|(chat, _)| chat.clone()) + } + + pub fn load( + workspace: WeakView, + cx: AsyncWindowContext, + ) -> Task>> { + cx.spawn(|mut cx| async move { + let serialized_panel = if let Some(panel) = cx + .background_executor() + .spawn(async move { KEY_VALUE_STORE.read_kvp(CHAT_PANEL_KEY) }) + .await + .log_err() + .flatten() + { + Some(serde_json::from_str::(&panel)?) + } else { + None + }; + + workspace.update(&mut cx, |workspace, cx| { + let panel = Self::new(workspace, cx); + if let Some(serialized_panel) = serialized_panel { + panel.update(cx, |panel, cx| { + panel.width = serialized_panel.width.map(|r| r.round()); + cx.notify(); + }); + } + panel + }) + }) + } + + fn serialize(&mut self, cx: &mut ViewContext) { + let width = self.width; + self.pending_serialization = cx.background_executor().spawn( + async move { + KEY_VALUE_STORE + .write_kvp( + CHAT_PANEL_KEY.into(), + serde_json::to_string(&SerializedChatPanel { width })?, + ) + .await?; + anyhow::Ok(()) + } + .log_err(), + ); + } + + fn set_active_chat(&mut self, chat: Model, cx: &mut ViewContext) { + if self.active_chat.as_ref().map(|e| &e.0) != Some(&chat) { + self.markdown_data.clear(); + self.message_list.reset(chat.read(cx).message_count()); + self.message_editor.update(cx, |editor, cx| { + editor.set_channel_chat(chat.clone(), cx); + editor.clear_reply_to_message_id(); + }); + let subscription = cx.subscribe(&chat, Self::channel_did_change); + self.active_chat = Some((chat, subscription)); + self.acknowledge_last_message(cx); + cx.notify(); + } + } + + fn channel_did_change( + &mut self, + _: Model, + event: &ChannelChatEvent, + cx: &mut ViewContext, + ) { + match event { + ChannelChatEvent::MessagesUpdated { + old_range, + new_count, + } => { + self.message_list.splice(old_range.clone(), *new_count); + if self.active { + self.acknowledge_last_message(cx); + } + } + ChannelChatEvent::UpdateMessage { + message_id, + message_ix, + } => { + self.message_list.splice(*message_ix..*message_ix + 1, 1); + self.markdown_data.remove(message_id); + } + ChannelChatEvent::NewMessage { + channel_id, + message_id, + } => { + if !self.active { + self.channel_store.update(cx, |store, cx| { + store.update_latest_message_id(*channel_id, *message_id, cx) + }) + } + } + } + cx.notify(); + } + + fn acknowledge_last_message(&mut self, cx: &mut ViewContext) { + if self.active && self.is_scrolled_to_bottom { + if let Some((chat, _)) = &self.active_chat { + if let Some(channel_id) = self.channel_id(cx) { + self.last_acknowledged_message_id = self + .channel_store + .read(cx) + .last_acknowledge_message_id(channel_id); + } + + chat.update(cx, |chat, cx| { + chat.acknowledge_last_message(cx); + }); + } + } + } + + fn render_replied_to_message( + &mut self, + message_id: Option, + reply_to_message: &Option, + cx: &mut ViewContext, + ) -> impl IntoElement { + let reply_to_message = match reply_to_message { + None => { + return div().child( + h_flex() + .text_ui_xs(cx) + .my_0p5() + .px_0p5() + .gap_x_1() + .rounded_md() + .child(Icon::new(IconName::ReplyArrowRight).color(Color::Muted)) + .when(reply_to_message.is_none(), |el| { + el.child( + Label::new("Message has been deleted...") + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + }), + ) + } + Some(val) => val, + }; + + let user_being_replied_to = reply_to_message.sender.clone(); + let message_being_replied_to = reply_to_message.clone(); + + let message_element_id: ElementId = match message_id { + Some(ChannelMessageId::Saved(id)) => ("reply-to-saved-message-container", id).into(), + Some(ChannelMessageId::Pending(id)) => { + ("reply-to-pending-message-container", id).into() + } // This should never happen + None => ("composing-reply-container").into(), + }; + + let current_channel_id = self.channel_id(cx); + let reply_to_message_id = reply_to_message.id; + + div().child( + h_flex() + .id(message_element_id) + .text_ui_xs(cx) + .my_0p5() + .px_0p5() + .gap_x_1() + .rounded_md() + .overflow_hidden() + .hover(|style| style.bg(cx.theme().colors().element_background)) + .child(Icon::new(IconName::ReplyArrowRight).color(Color::Muted)) + .child(Avatar::new(user_being_replied_to.avatar_uri.clone()).size(rems(0.7))) + .child( + div().font_weight(FontWeight::SEMIBOLD).child( + Label::new(format!("@{}", user_being_replied_to.github_login)) + .size(LabelSize::XSmall) + .color(Color::Muted), + ), + ) + .child( + div().overflow_y_hidden().child( + Label::new(message_being_replied_to.body.replace('\n', " ")) + .size(LabelSize::XSmall) + .color(Color::Default), + ), + ) + .cursor(CursorStyle::PointingHand) + .tooltip(|cx| Tooltip::text("Go to message", cx)) + .on_click(cx.listener(move |chat_panel, _, cx| { + if let Some(channel_id) = current_channel_id { + chat_panel + .select_channel(channel_id, reply_to_message_id.into(), cx) + .detach_and_log_err(cx) + } + })), + ) + } + + fn render_message(&mut self, ix: usize, cx: &mut ViewContext) -> impl IntoElement { + let active_chat = &self.active_chat.as_ref().unwrap().0; + let (message, is_continuation_from_previous, is_admin) = + active_chat.update(cx, |active_chat, cx| { + let is_admin = self + .channel_store + .read(cx) + .is_channel_admin(active_chat.channel_id); + + let last_message = active_chat.message(ix.saturating_sub(1)); + let this_message = active_chat.message(ix).clone(); + + let duration_since_last_message = this_message.timestamp - last_message.timestamp; + let is_continuation_from_previous = last_message.sender.id + == this_message.sender.id + && last_message.id != this_message.id + && duration_since_last_message < Duration::from_secs(5 * 60); + + if let ChannelMessageId::Saved(id) = this_message.id { + if this_message + .mentions + .iter() + .any(|(_, user_id)| Some(*user_id) == self.client.user_id()) + { + active_chat.acknowledge_message(id); + } + } + + (this_message, is_continuation_from_previous, is_admin) + }); + + let _is_pending = message.is_pending(); + + let belongs_to_user = Some(message.sender.id) == self.client.user_id(); + let can_delete_message = belongs_to_user || is_admin; + let can_edit_message = belongs_to_user; + + let element_id: ElementId = match message.id { + ChannelMessageId::Saved(id) => ("saved-message", id).into(), + ChannelMessageId::Pending(id) => ("pending-message", id).into(), + }; + + let mentioning_you = message + .mentions + .iter() + .any(|m| Some(m.1) == self.client.user_id()); + + let message_id = match message.id { + ChannelMessageId::Saved(id) => Some(id), + ChannelMessageId::Pending(_) => None, + }; + + let reply_to_message = message + .reply_to_message_id + .and_then(|id| active_chat.read(cx).find_loaded_message(id)) + .cloned(); + + let replied_to_you = + reply_to_message.as_ref().map(|m| m.sender.id) == self.client.user_id(); + + let is_highlighted_message = self + .highlighted_message + .as_ref() + .is_some_and(|(id, _)| Some(id) == message_id.as_ref()); + let background = if is_highlighted_message { + cx.theme().status().info_background + } else if mentioning_you || replied_to_you { + cx.theme().colors().background + } else { + cx.theme().colors().panel_background + }; + + let reply_to_message_id = self.message_editor.read(cx).reply_to_message_id(); + + v_flex() + .w_full() + .relative() + .group("") + .when(!is_continuation_from_previous, |this| this.pt_2()) + .child( + div() + .group("") + .bg(background) + .rounded_md() + .overflow_hidden() + .px_1p5() + .py_0p5() + .when_some(reply_to_message_id, |el, reply_id| { + el.when_some(message_id, |el, message_id| { + el.when(reply_id == message_id, |el| { + el.bg(cx.theme().colors().element_selected) + }) + }) + }) + .when(!self.has_open_menu(message_id), |this| { + this.hover(|style| style.bg(cx.theme().colors().element_hover)) + }) + .when(message.reply_to_message_id.is_some(), |el| { + el.child(self.render_replied_to_message( + Some(message.id), + &reply_to_message, + cx, + )) + .when(is_continuation_from_previous, |this| this.mt_2()) + }) + .when( + !is_continuation_from_previous || message.reply_to_message_id.is_some(), + |this| { + this.child( + h_flex() + .text_ui_sm(cx) + .child( + div().absolute().child( + Avatar::new(message.sender.avatar_uri.clone()) + .size(rems(1.)), + ), + ) + .child( + div() + .pl(cx.rem_size() + px(6.0)) + .pr(px(8.0)) + .font_weight(FontWeight::BOLD) + .child( + Label::new(message.sender.github_login.clone()) + .size(LabelSize::Small), + ), + ) + .child( + Label::new(time_format::format_localized_timestamp( + message.timestamp, + OffsetDateTime::now_utc(), + self.local_timezone, + time_format::TimestampFormat::EnhancedAbsolute, + )) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + }, + ) + .when(mentioning_you || replied_to_you, |this| this.my_0p5()) + .map(|el| { + let text = self.markdown_data.entry(message.id).or_insert_with(|| { + Self::render_markdown_with_mentions( + &self.languages, + self.client.id(), + &message, + self.local_timezone, + cx, + ) + }); + el.child( + v_flex() + .w_full() + .text_ui_sm(cx) + .id(element_id) + .child(text.element("body".into(), cx)), + ) + .when(self.has_open_menu(message_id), |el| { + el.bg(cx.theme().colors().element_selected) + }) + }), + ) + .when( + self.last_acknowledged_message_id + .is_some_and(|l| Some(l) == message_id), + |this| { + this.child( + h_flex() + .py_2() + .gap_1() + .items_center() + .child(div().w_full().h_0p5().bg(cx.theme().colors().border)) + .child( + div() + .px_1() + .rounded_md() + .text_ui_xs(cx) + .bg(cx.theme().colors().background) + .child("New messages"), + ) + .child(div().w_full().h_0p5().bg(cx.theme().colors().border)), + ) + }, + ) + .child( + self.render_popover_buttons(&cx, message_id, can_delete_message, can_edit_message) + .mt_neg_2p5(), + ) + } + + fn has_open_menu(&self, message_id: Option) -> bool { + match self.open_context_menu.as_ref() { + Some((id, _)) => Some(*id) == message_id, + None => false, + } + } + + fn render_popover_button(&self, cx: &ViewContext, child: Stateful
) -> Div { + div() + .w_6() + .bg(cx.theme().colors().element_background) + .hover(|style| style.bg(cx.theme().colors().element_hover).rounded_md()) + .child(child) + } + + fn render_popover_buttons( + &self, + cx: &ViewContext, + message_id: Option, + can_delete_message: bool, + can_edit_message: bool, + ) -> Div { + h_flex() + .absolute() + .right_2() + .overflow_hidden() + .rounded_md() + .border_color(cx.theme().colors().element_selected) + .border_1() + .when(!self.has_open_menu(message_id), |el| { + el.visible_on_hover("") + }) + .bg(cx.theme().colors().element_background) + .when_some(message_id, |el, message_id| { + el.child( + self.render_popover_button( + cx, + div() + .id("reply") + .child( + IconButton::new(("reply", message_id), IconName::ReplyArrowRight) + .on_click(cx.listener(move |this, _, cx| { + this.cancel_edit_message(cx); + + this.message_editor.update(cx, |editor, cx| { + editor.set_reply_to_message_id(message_id); + editor.focus_handle(cx).focus(cx); + }) + })), + ) + .tooltip(|cx| Tooltip::text("Reply", cx)), + ), + ) + }) + .when_some(message_id, |el, message_id| { + el.when(can_edit_message, |el| { + el.child( + self.render_popover_button( + cx, + div() + .id("edit") + .child( + IconButton::new(("edit", message_id), IconName::Pencil) + .on_click(cx.listener(move |this, _, cx| { + this.message_editor.update(cx, |editor, cx| { + editor.clear_reply_to_message_id(); + + let message = this + .active_chat() + .and_then(|active_chat| { + active_chat + .read(cx) + .find_loaded_message(message_id) + }) + .cloned(); + + if let Some(message) = message { + let buffer = editor + .editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .expect("message editor must be singleton"); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(message.body.clone(), cx) + }); + + editor.set_edit_message_id(message_id); + editor.focus_handle(cx).focus(cx); + } + }) + })), + ) + .tooltip(|cx| Tooltip::text("Edit", cx)), + ), + ) + }) + }) + .when_some(message_id, |el, message_id| { + let this = cx.view().clone(); + + el.child( + self.render_popover_button( + cx, + div() + .child( + popover_menu(("menu", message_id)) + .trigger(IconButton::new( + ("trigger", message_id), + IconName::Ellipsis, + )) + .menu(move |cx| { + Some(Self::render_message_menu( + &this, + message_id, + can_delete_message, + cx, + )) + }), + ) + .id("more") + .tooltip(|cx| Tooltip::text("More", cx)), + ), + ) + }) + } + + fn render_message_menu( + this: &View, + message_id: u64, + can_delete_message: bool, + cx: &mut WindowContext, + ) -> View { + let menu = { + ContextMenu::build(cx, move |menu, cx| { + menu.entry( + "Copy message text", + None, + cx.handler_for(&this, move |this, cx| { + if let Some(message) = this.active_chat().and_then(|active_chat| { + active_chat.read(cx).find_loaded_message(message_id) + }) { + let text = message.body.clone(); + cx.write_to_clipboard(ClipboardItem::new(text)) + } + }), + ) + .when(can_delete_message, |menu| { + menu.entry( + "Delete message", + None, + cx.handler_for(&this, move |this, cx| this.remove_message(message_id, cx)), + ) + }) + }) + }; + this.update(cx, |this, cx| { + let subscription = cx.subscribe(&menu, |this: &mut Self, _, _: &DismissEvent, _| { + this.open_context_menu = None; + }); + this.open_context_menu = Some((message_id, subscription)); + }); + menu + } + + fn render_markdown_with_mentions( + language_registry: &Arc, + current_user_id: u64, + message: &channel::ChannelMessage, + local_timezone: UtcOffset, + cx: &AppContext, + ) -> RichText { + let mentions = message + .mentions + .iter() + .map(|(range, user_id)| rich_text::Mention { + range: range.clone(), + is_self_mention: *user_id == current_user_id, + }) + .collect::>(); + + const MESSAGE_EDITED: &str = " (edited)"; + + let mut body = message.body.clone(); + + if message.edited_at.is_some() { + body.push_str(MESSAGE_EDITED); + } + + let mut rich_text = RichText::new(body, &mentions, language_registry); + + if message.edited_at.is_some() { + let range = (rich_text.text.len() - MESSAGE_EDITED.len())..rich_text.text.len(); + rich_text.highlights.push(( + range.clone(), + Highlight::Highlight(HighlightStyle { + color: Some(cx.theme().colors().text_muted), + ..Default::default() + }), + )); + + if let Some(edit_timestamp) = message.edited_at { + let edit_timestamp_text = time_format::format_localized_timestamp( + edit_timestamp, + OffsetDateTime::now_utc(), + local_timezone, + time_format::TimestampFormat::Absolute, + ); + + rich_text.custom_ranges.push(range); + rich_text.set_tooltip_builder_for_custom_ranges(move |_, _, cx| { + Some(Tooltip::text(edit_timestamp_text.clone(), cx)) + }) + } + } + rich_text + } + + fn send(&mut self, _: &Confirm, cx: &mut ViewContext) { + if let Some((chat, _)) = self.active_chat.as_ref() { + let message = self + .message_editor + .update(cx, |editor, cx| editor.take_message(cx)); + + if let Some(id) = self.message_editor.read(cx).edit_message_id() { + self.message_editor.update(cx, |editor, _| { + editor.clear_edit_message_id(); + }); + + if let Some(task) = chat + .update(cx, |chat, cx| chat.update_message(id, message, cx)) + .log_err() + { + task.detach(); + } + } else { + if let Some(task) = chat + .update(cx, |chat, cx| chat.send_message(message, cx)) + .log_err() + { + task.detach(); + } + } + } + } + + fn remove_message(&mut self, id: u64, cx: &mut ViewContext) { + if let Some((chat, _)) = self.active_chat.as_ref() { + chat.update(cx, |chat, cx| chat.remove_message(id, cx).detach()) + } + } + + fn load_more_messages(&mut self, cx: &mut ViewContext) { + if let Some((chat, _)) = self.active_chat.as_ref() { + chat.update(cx, |channel, cx| { + if let Some(task) = channel.load_more_messages(cx) { + task.detach(); + } + }) + } + } + + pub fn select_channel( + &mut self, + selected_channel_id: ChannelId, + scroll_to_message_id: Option, + cx: &mut ViewContext, + ) -> Task> { + let open_chat = self + .active_chat + .as_ref() + .and_then(|(chat, _)| { + (chat.read(cx).channel_id == selected_channel_id) + .then(|| Task::ready(anyhow::Ok(chat.clone()))) + }) + .unwrap_or_else(|| { + self.channel_store.update(cx, |store, cx| { + store.open_channel_chat(selected_channel_id, cx) + }) + }); + + cx.spawn(|this, mut cx| async move { + let chat = open_chat.await?; + let highlight_message_id = scroll_to_message_id; + let scroll_to_message_id = this.update(&mut cx, |this, cx| { + this.set_active_chat(chat.clone(), cx); + + scroll_to_message_id.or_else(|| this.last_acknowledged_message_id) + })?; + + if let Some(message_id) = scroll_to_message_id { + if let Some(item_ix) = + ChannelChat::load_history_since_message(chat.clone(), message_id, (*cx).clone()) + .await + { + this.update(&mut cx, |this, cx| { + if let Some(highlight_message_id) = highlight_message_id { + let task = cx.spawn({ + |this, mut cx| async move { + cx.background_executor().timer(Duration::from_secs(2)).await; + this.update(&mut cx, |this, cx| { + this.highlighted_message.take(); + cx.notify(); + }) + .ok(); + } + }); + + this.highlighted_message = Some((highlight_message_id, task)); + } + + if this.active_chat.as_ref().map_or(false, |(c, _)| *c == chat) { + this.message_list.scroll_to(ListOffset { + item_ix, + offset_in_item: px(0.0), + }); + cx.notify(); + } + })?; + } + } + + Ok(()) + }) + } + + fn close_reply_preview(&mut self, cx: &mut ViewContext) { + self.message_editor + .update(cx, |editor, _| editor.clear_reply_to_message_id()); + } + + fn cancel_edit_message(&mut self, cx: &mut ViewContext) { + self.message_editor.update(cx, |editor, cx| { + // only clear the editor input if we were editing a message + if editor.edit_message_id().is_none() { + return; + } + + editor.clear_edit_message_id(); + + let buffer = editor + .editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .expect("message editor must be singleton"); + + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + }); + } +} + +impl Render for ChatPanel { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let channel_id = self + .active_chat + .as_ref() + .map(|(c, _)| c.read(cx).channel_id); + let message_editor = self.message_editor.read(cx); + + let reply_to_message_id = message_editor.reply_to_message_id(); + let edit_message_id = message_editor.edit_message_id(); + + v_flex() + .key_context("ChatPanel") + .track_focus(&self.focus_handle) + .size_full() + .on_action(cx.listener(Self::send)) + .child( + h_flex().child( + TabBar::new("chat_header").child( + h_flex() + .w_full() + .h(rems(ui::Tab::CONTAINER_HEIGHT_IN_REMS)) + .px_2() + .child(Label::new( + self.active_chat + .as_ref() + .and_then(|c| { + Some(format!("#{}", c.0.read(cx).channel(cx)?.name)) + }) + .unwrap_or("Chat".to_string()), + )), + ), + ), + ) + .child(div().flex_grow().px_2().map(|this| { + if self.active_chat.is_some() { + this.child(list(self.message_list.clone()).size_full()) + } else { + this.child( + div() + .size_full() + .p_4() + .child( + Label::new("Select a channel to chat in.") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + div().pt_1().w_full().items_center().child( + Button::new("toggle-collab", "Open") + .full_width() + .key_binding(KeyBinding::for_action( + &collab_panel::ToggleFocus, + cx, + )) + .on_click(|_, cx| { + cx.dispatch_action( + collab_panel::ToggleFocus.boxed_clone(), + ) + }), + ), + ), + ) + } + })) + .when(!self.is_scrolled_to_bottom, |el| { + el.child(div().border_t_1().border_color(cx.theme().colors().border)) + }) + .when_some(edit_message_id, |el, _| { + el.child( + h_flex() + .px_2() + .text_ui_xs(cx) + .justify_between() + .border_t_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().background) + .child("Editing message") + .child( + IconButton::new("cancel-edit-message", IconName::Close) + .shape(ui::IconButtonShape::Square) + .tooltip(|cx| Tooltip::text("Cancel edit message", cx)) + .on_click(cx.listener(move |this, _, cx| { + this.cancel_edit_message(cx); + })), + ), + ) + }) + .when_some(reply_to_message_id, |el, reply_to_message_id| { + let reply_message = self + .active_chat() + .and_then(|active_chat| { + active_chat + .read(cx) + .find_loaded_message(reply_to_message_id) + }) + .cloned(); + + el.when_some(reply_message, |el, reply_message| { + let user_being_replied_to = reply_message.sender.clone(); + + el.child( + h_flex() + .when(!self.is_scrolled_to_bottom, |el| { + el.border_t_1().border_color(cx.theme().colors().border) + }) + .justify_between() + .overflow_hidden() + .items_start() + .py_1() + .px_2() + .bg(cx.theme().colors().background) + .child( + div().flex_shrink().overflow_hidden().child( + h_flex() + .id(("reply-preview", reply_to_message_id)) + .child(Label::new("Replying to ").size(LabelSize::Small)) + .child( + div().font_weight(FontWeight::BOLD).child( + Label::new(format!( + "@{}", + user_being_replied_to.github_login.clone() + )) + .size(LabelSize::Small), + ), + ) + .when_some(channel_id, |this, channel_id| { + this.cursor_pointer().on_click(cx.listener( + move |chat_panel, _, cx| { + chat_panel + .select_channel( + channel_id, + reply_to_message_id.into(), + cx, + ) + .detach_and_log_err(cx) + }, + )) + }), + ), + ) + .child( + IconButton::new("close-reply-preview", IconName::Close) + .shape(ui::IconButtonShape::Square) + .tooltip(|cx| Tooltip::text("Close reply", cx)) + .on_click(cx.listener(move |this, _, cx| { + this.close_reply_preview(cx); + })), + ), + ) + }) + }) + .children( + Some( + h_flex() + .p_2() + .on_action(cx.listener(|this, _: &actions::Cancel, cx| { + this.cancel_edit_message(cx); + this.close_reply_preview(cx); + })) + .map(|el| el.child(self.message_editor.clone())), + ) + .filter(|_| self.active_chat.is_some()), + ) + .into_any() + } +} + +impl FocusableView for ChatPanel { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + if self.active_chat.is_some() { + self.message_editor.read(cx).focus_handle(cx) + } else { + self.focus_handle.clone() + } + } +} + +impl Panel for ChatPanel { + fn position(&self, cx: &gpui::WindowContext) -> DockPosition { + ChatPanelSettings::get_global(cx).dock + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + matches!(position, DockPosition::Left | DockPosition::Right) + } + + fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext) { + settings::update_settings_file::(self.fs.clone(), cx, move |settings| { + settings.dock = Some(position) + }); + } + + fn size(&self, cx: &gpui::WindowContext) -> Pixels { + self.width + .unwrap_or_else(|| ChatPanelSettings::get_global(cx).default_width) + } + + fn set_size(&mut self, size: Option, cx: &mut ViewContext) { + self.width = size; + self.serialize(cx); + cx.notify(); + } + + fn set_active(&mut self, active: bool, cx: &mut ViewContext) { + self.active = active; + if active { + self.acknowledge_last_message(cx); + } + } + + fn persistent_name() -> &'static str { + "ChatPanel" + } + + fn icon(&self, cx: &WindowContext) -> Option { + Some(ui::IconName::MessageBubbles).filter(|_| ChatPanelSettings::get_global(cx).button) + } + + fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { + Some("Chat Panel") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } + + fn starts_open(&self, cx: &WindowContext) -> bool { + ActiveCall::global(cx) + .read(cx) + .room() + .is_some_and(|room| room.read(cx).contains_guests()) + } +} + +impl EventEmitter for ChatPanel {} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::HighlightStyle; + use pretty_assertions::assert_eq; + use rich_text::Highlight; + use time::OffsetDateTime; + use util::test::marked_text_ranges; + + #[gpui::test] + fn test_render_markdown_with_mentions(cx: &mut AppContext) { + let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); + let (body, ranges) = marked_text_ranges("*hi*, «@abc», let's **call** «@fgh»", false); + let message = channel::ChannelMessage { + id: ChannelMessageId::Saved(0), + body, + timestamp: OffsetDateTime::now_utc(), + sender: Arc::new(client::User { + github_login: "fgh".into(), + avatar_uri: "avatar_fgh".into(), + id: 103, + }), + nonce: 5, + mentions: vec![(ranges[0].clone(), 101), (ranges[1].clone(), 102)], + reply_to_message_id: None, + edited_at: None, + }; + + let message = ChatPanel::render_markdown_with_mentions( + &language_registry, + 102, + &message, + UtcOffset::UTC, + cx, + ); + + // Note that the "'" was replaced with ’ due to smart punctuation. + let (body, ranges) = marked_text_ranges("«hi», «@abc», let’s «call» «@fgh»", false); + assert_eq!(message.text, body); + assert_eq!( + message.highlights, + vec![ + ( + ranges[0].clone(), + HighlightStyle { + font_style: Some(gpui::FontStyle::Italic), + ..Default::default() + } + .into() + ), + (ranges[1].clone(), Highlight::Mention), + ( + ranges[2].clone(), + HighlightStyle { + font_weight: Some(gpui::FontWeight::BOLD), + ..Default::default() + } + .into() + ), + (ranges[3].clone(), Highlight::SelfMention) + ] + ); + } + + #[gpui::test] + fn test_render_markdown_with_auto_detect_links(cx: &mut AppContext) { + let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); + let message = channel::ChannelMessage { + id: ChannelMessageId::Saved(0), + body: "Here is a link https://zed.dev to zeds website".to_string(), + timestamp: OffsetDateTime::now_utc(), + sender: Arc::new(client::User { + github_login: "fgh".into(), + avatar_uri: "avatar_fgh".into(), + id: 103, + }), + nonce: 5, + mentions: Vec::new(), + reply_to_message_id: None, + edited_at: None, + }; + + let message = ChatPanel::render_markdown_with_mentions( + &language_registry, + 102, + &message, + UtcOffset::UTC, + cx, + ); + + // Note that the "'" was replaced with ’ due to smart punctuation. + let (body, ranges) = + marked_text_ranges("Here is a link «https://zed.dev» to zeds website", false); + assert_eq!(message.text, body); + assert_eq!(1, ranges.len()); + assert_eq!( + message.highlights, + vec![( + ranges[0].clone(), + HighlightStyle { + underline: Some(gpui::UnderlineStyle { + thickness: 1.0.into(), + ..Default::default() + }), + ..Default::default() + } + .into() + ),] + ); + } + + #[gpui::test] + fn test_render_markdown_with_auto_detect_links_and_additional_formatting(cx: &mut AppContext) { + let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); + let message = channel::ChannelMessage { + id: ChannelMessageId::Saved(0), + body: "**Here is a link https://zed.dev to zeds website**".to_string(), + timestamp: OffsetDateTime::now_utc(), + sender: Arc::new(client::User { + github_login: "fgh".into(), + avatar_uri: "avatar_fgh".into(), + id: 103, + }), + nonce: 5, + mentions: Vec::new(), + reply_to_message_id: None, + edited_at: None, + }; + + let message = ChatPanel::render_markdown_with_mentions( + &language_registry, + 102, + &message, + UtcOffset::UTC, + cx, + ); + + // Note that the "'" was replaced with ’ due to smart punctuation. + let (body, ranges) = marked_text_ranges( + "«Here is a link »«https://zed.dev»« to zeds website»", + false, + ); + assert_eq!(message.text, body); + assert_eq!(3, ranges.len()); + assert_eq!( + message.highlights, + vec![ + ( + ranges[0].clone(), + HighlightStyle { + font_weight: Some(gpui::FontWeight::BOLD), + ..Default::default() + } + .into() + ), + ( + ranges[1].clone(), + HighlightStyle { + font_weight: Some(gpui::FontWeight::BOLD), + underline: Some(gpui::UnderlineStyle { + thickness: 1.0.into(), + ..Default::default() + }), + ..Default::default() + } + .into() + ), + ( + ranges[2].clone(), + HighlightStyle { + font_weight: Some(gpui::FontWeight::BOLD), + ..Default::default() + } + .into() + ), + ] + ); + } +} diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs new file mode 100644 index 0000000..35439a4 --- /dev/null +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -0,0 +1,537 @@ +use anyhow::Result; +use channel::{ChannelChat, ChannelStore, MessageParams}; +use client::{UserId, UserStore}; +use collections::HashSet; +use editor::{AnchorRangeExt, CompletionProvider, Editor, EditorElement, EditorStyle}; +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{ + AsyncWindowContext, FocusableView, FontStyle, FontWeight, HighlightStyle, IntoElement, Model, + Render, Task, TextStyle, View, ViewContext, WeakView, WhiteSpace, +}; +use language::{ + language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry, + LanguageServerId, ToOffset, +}; +use lazy_static::lazy_static; +use parking_lot::RwLock; +use project::{search::SearchQuery, Completion}; +use settings::Settings; +use std::{ops::Range, sync::Arc, time::Duration}; +use theme::ThemeSettings; +use ui::{prelude::*, TextSize}; + +use crate::panel_settings::MessageEditorSettings; + +const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); + +lazy_static! { + static ref MENTIONS_SEARCH: SearchQuery = + SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap(); +} + +pub struct MessageEditor { + pub editor: View, + user_store: Model, + channel_chat: Option>, + mentions: Vec, + mentions_task: Option>, + reply_to_message_id: Option, + edit_message_id: Option, +} + +struct MessageEditorCompletionProvider(WeakView); + +impl CompletionProvider for MessageEditorCompletionProvider { + fn completions( + &self, + buffer: &Model, + buffer_position: language::Anchor, + cx: &mut ViewContext, + ) -> Task>> { + let Some(handle) = self.0.upgrade() else { + return Task::ready(Ok(Vec::new())); + }; + handle.update(cx, |message_editor, cx| { + message_editor.completions(buffer, buffer_position, cx) + }) + } + + fn resolve_completions( + &self, + _buffer: Model, + _completion_indices: Vec, + _completions: Arc>>, + _cx: &mut ViewContext, + ) -> Task> { + Task::ready(Ok(false)) + } + + fn apply_additional_edits_for_completion( + &self, + _buffer: Model, + _completion: Completion, + _push_to_history: bool, + _cx: &mut ViewContext, + ) -> Task>> { + Task::ready(Ok(None)) + } +} + +impl MessageEditor { + pub fn new( + language_registry: Arc, + user_store: Model, + channel_chat: Option>, + editor: View, + cx: &mut ViewContext, + ) -> Self { + let this = cx.view().downgrade(); + editor.update(cx, |editor, cx| { + editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); + editor.set_use_autoclose(false); + editor.set_completion_provider(Box::new(MessageEditorCompletionProvider(this))); + editor.set_auto_replace_emoji_shortcode( + MessageEditorSettings::get_global(cx) + .auto_replace_emoji_shortcode + .unwrap_or_default(), + ); + }); + + let buffer = editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .expect("message editor must be singleton"); + + cx.subscribe(&buffer, Self::on_buffer_event).detach(); + cx.observe_global::(|view, cx| { + view.editor.update(cx, |editor, cx| { + editor.set_auto_replace_emoji_shortcode( + MessageEditorSettings::get_global(cx) + .auto_replace_emoji_shortcode + .unwrap_or_default(), + ) + }) + }) + .detach(); + + let markdown = language_registry.language_for_name("Markdown"); + cx.spawn(|_, mut cx| async move { + let markdown = markdown.await?; + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(Some(markdown), cx) + }) + }) + .detach_and_log_err(cx); + + Self { + editor, + user_store, + channel_chat, + mentions: Vec::new(), + mentions_task: None, + reply_to_message_id: None, + edit_message_id: None, + } + } + + pub fn reply_to_message_id(&self) -> Option { + self.reply_to_message_id + } + + pub fn set_reply_to_message_id(&mut self, reply_to_message_id: u64) { + self.reply_to_message_id = Some(reply_to_message_id); + } + + pub fn clear_reply_to_message_id(&mut self) { + self.reply_to_message_id = None; + } + + pub fn edit_message_id(&self) -> Option { + self.edit_message_id + } + + pub fn set_edit_message_id(&mut self, edit_message_id: u64) { + self.edit_message_id = Some(edit_message_id); + } + + pub fn clear_edit_message_id(&mut self) { + self.edit_message_id = None; + } + + pub fn set_channel_chat(&mut self, chat: Model, cx: &mut ViewContext) { + let channel_id = chat.read(cx).channel_id; + self.channel_chat = Some(chat); + let channel_name = ChannelStore::global(cx) + .read(cx) + .channel_for_id(channel_id) + .map(|channel| channel.name.clone()); + self.editor.update(cx, |editor, cx| { + if let Some(channel_name) = channel_name { + editor.set_placeholder_text(format!("Message #{channel_name}"), cx); + } else { + editor.set_placeholder_text("Message Channel", cx); + } + }); + } + + pub fn take_message(&mut self, cx: &mut ViewContext) -> MessageParams { + self.editor.update(cx, |editor, cx| { + let highlights = editor.text_highlights::(cx); + let text = editor.text(cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + let mentions = if let Some((_, ranges)) = highlights { + ranges + .iter() + .map(|range| range.to_offset(&snapshot)) + .zip(self.mentions.iter().copied()) + .collect() + } else { + Vec::new() + }; + + editor.clear(cx); + self.mentions.clear(); + let reply_to_message_id = std::mem::take(&mut self.reply_to_message_id); + + MessageParams { + text, + mentions, + reply_to_message_id, + } + }) + } + + fn on_buffer_event( + &mut self, + buffer: Model, + event: &language::Event, + cx: &mut ViewContext, + ) { + if let language::Event::Reparsed | language::Event::Edited = event { + let buffer = buffer.read(cx).snapshot(); + self.mentions_task = Some(cx.spawn(|this, cx| async move { + cx.background_executor() + .timer(MENTIONS_DEBOUNCE_INTERVAL) + .await; + Self::find_mentions(this, buffer, cx).await; + })); + } + } + + fn completions( + &mut self, + buffer: &Model, + end_anchor: Anchor, + cx: &mut ViewContext, + ) -> Task>> { + if let Some((start_anchor, query, candidates)) = + self.collect_mention_candidates(buffer, end_anchor, cx) + { + if !candidates.is_empty() { + return cx.spawn(|_, cx| async move { + Ok(Self::resolve_completions_for_candidates( + &cx, + query.as_str(), + &candidates, + start_anchor..end_anchor, + Self::completion_for_mention, + ) + .await) + }); + } + } + + if let Some((start_anchor, query, candidates)) = + self.collect_emoji_candidates(buffer, end_anchor, cx) + { + if !candidates.is_empty() { + return cx.spawn(|_, cx| async move { + Ok(Self::resolve_completions_for_candidates( + &cx, + query.as_str(), + candidates, + start_anchor..end_anchor, + Self::completion_for_emoji, + ) + .await) + }); + } + } + + Task::ready(Ok(vec![])) + } + + async fn resolve_completions_for_candidates( + cx: &AsyncWindowContext, + query: &str, + candidates: &[StringMatchCandidate], + range: Range, + completion_fn: impl Fn(&StringMatch) -> (String, CodeLabel), + ) -> Vec { + let matches = fuzzy::match_strings( + &candidates, + &query, + true, + 10, + &Default::default(), + cx.background_executor().clone(), + ) + .await; + + matches + .into_iter() + .map(|mat| { + let (new_text, label) = completion_fn(&mat); + Completion { + old_range: range.clone(), + new_text, + label, + documentation: None, + server_id: LanguageServerId(0), // TODO: Make this optional or something? + lsp_completion: Default::default(), // TODO: Make this optional or something? + } + }) + .collect() + } + + fn completion_for_mention(mat: &StringMatch) -> (String, CodeLabel) { + let label = CodeLabel { + filter_range: 1..mat.string.len() + 1, + text: format!("@{}", mat.string), + runs: Vec::new(), + }; + (mat.string.clone(), label) + } + + fn completion_for_emoji(mat: &StringMatch) -> (String, CodeLabel) { + let emoji = emojis::get_by_shortcode(&mat.string).unwrap(); + let label = CodeLabel { + filter_range: 1..mat.string.len() + 1, + text: format!(":{}: {}", mat.string, emoji), + runs: Vec::new(), + }; + (emoji.to_string(), label) + } + + fn collect_mention_candidates( + &mut self, + buffer: &Model, + end_anchor: Anchor, + cx: &mut ViewContext, + ) -> Option<(Anchor, String, Vec)> { + let end_offset = end_anchor.to_offset(buffer.read(cx)); + + let Some(query) = buffer.update(cx, |buffer, _| { + let mut query = String::new(); + for ch in buffer.reversed_chars_at(end_offset).take(100) { + if ch == '@' { + return Some(query.chars().rev().collect::()); + } + if ch.is_whitespace() || !ch.is_ascii() { + break; + } + query.push(ch); + } + None + }) else { + return None; + }; + + let start_offset = end_offset - query.len(); + let start_anchor = buffer.read(cx).anchor_before(start_offset); + + let mut names = HashSet::default(); + if let Some(chat) = self.channel_chat.as_ref() { + let chat = chat.read(cx); + for participant in ChannelStore::global(cx) + .read(cx) + .channel_participants(chat.channel_id) + { + names.insert(participant.github_login.clone()); + } + for message in chat + .messages_in_range(chat.message_count().saturating_sub(100)..chat.message_count()) + { + names.insert(message.sender.github_login.clone()); + } + } + + let candidates = names + .into_iter() + .map(|user| StringMatchCandidate { + id: 0, + string: user.clone(), + char_bag: user.chars().collect(), + }) + .collect::>(); + + Some((start_anchor, query, candidates)) + } + + fn collect_emoji_candidates( + &mut self, + buffer: &Model, + end_anchor: Anchor, + cx: &mut ViewContext, + ) -> Option<(Anchor, String, &'static [StringMatchCandidate])> { + lazy_static! { + static ref EMOJI_FUZZY_MATCH_CANDIDATES: Vec = { + let emojis = emojis::iter() + .flat_map(|s| s.shortcodes()) + .map(|emoji| StringMatchCandidate { + id: 0, + string: emoji.to_string(), + char_bag: emoji.chars().collect(), + }) + .collect::>(); + emojis + }; + } + + let end_offset = end_anchor.to_offset(buffer.read(cx)); + + let Some(query) = buffer.update(cx, |buffer, _| { + let mut query = String::new(); + for ch in buffer.reversed_chars_at(end_offset).take(100) { + if ch == ':' { + let next_char = buffer + .reversed_chars_at(end_offset - query.len() - 1) + .next(); + // Ensure we are at the start of the message or that the previous character is a whitespace + if next_char.is_none() || next_char.unwrap().is_whitespace() { + return Some(query.chars().rev().collect::()); + } + + // If the previous character is not a whitespace, we are in the middle of a word + // and we only want to complete the shortcode if the word is made up of other emojis + let mut containing_word = String::new(); + for ch in buffer + .reversed_chars_at(end_offset - query.len() - 1) + .take(100) + { + if ch.is_whitespace() { + break; + } + containing_word.push(ch); + } + let containing_word = containing_word.chars().rev().collect::(); + if util::word_consists_of_emojis(containing_word.as_str()) { + return Some(query.chars().rev().collect::()); + } + break; + } + if ch.is_whitespace() || !ch.is_ascii() { + break; + } + query.push(ch); + } + None + }) else { + return None; + }; + + let start_offset = end_offset - query.len() - 1; + let start_anchor = buffer.read(cx).anchor_before(start_offset); + + Some((start_anchor, query, &EMOJI_FUZZY_MATCH_CANDIDATES)) + } + + async fn find_mentions( + this: WeakView, + buffer: BufferSnapshot, + mut cx: AsyncWindowContext, + ) { + let (buffer, ranges) = cx + .background_executor() + .spawn(async move { + let ranges = MENTIONS_SEARCH.search(&buffer, None).await; + (buffer, ranges) + }) + .await; + + this.update(&mut cx, |this, cx| { + let mut anchor_ranges = Vec::new(); + let mut mentioned_user_ids = Vec::new(); + let mut text = String::new(); + + this.editor.update(cx, |editor, cx| { + let multi_buffer = editor.buffer().read(cx).snapshot(cx); + for range in ranges { + text.clear(); + text.extend(buffer.text_for_range(range.clone())); + if let Some(username) = text.strip_prefix('@') { + if let Some(user) = this + .user_store + .read(cx) + .cached_user_by_github_login(username) + { + let start = multi_buffer.anchor_after(range.start); + let end = multi_buffer.anchor_after(range.end); + + mentioned_user_ids.push(user.id); + anchor_ranges.push(start..end); + } + } + } + + editor.clear_highlights::(cx); + editor.highlight_text::( + anchor_ranges, + HighlightStyle { + font_weight: Some(FontWeight::BOLD), + ..Default::default() + }, + cx, + ) + }); + + this.mentions = mentioned_user_ids; + this.mentions_task.take(); + }) + .ok(); + } + + pub(crate) fn focus_handle(&self, cx: &gpui::AppContext) -> gpui::FocusHandle { + self.editor.read(cx).focus_handle(cx) + } +} + +impl Render for MessageEditor { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: if self.editor.read(cx).read_only(cx) { + cx.theme().colors().text_disabled + } else { + cx.theme().colors().text + }, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: TextSize::Small.rems(cx).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(1.3), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }; + + div() + .w_full() + .px_2() + .py_1() + .bg(cx.theme().colors().editor_background) + .rounded_md() + .child(EditorElement::new( + &self.editor, + EditorStyle { + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + )) + } +} diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs new file mode 100644 index 0000000..a609503 --- /dev/null +++ b/crates/collab_ui/src/collab_panel.rs @@ -0,0 +1,3012 @@ +mod channel_modal; +mod contact_finder; + +use self::channel_modal::ChannelModal; +use crate::{ + channel_view::ChannelView, chat_panel::ChatPanel, face_pile::FacePile, + CollaborationPanelSettings, +}; +use call::ActiveCall; +use channel::{Channel, ChannelEvent, ChannelStore}; +use client::{ChannelId, Client, Contact, ProjectId, User, UserStore}; +use contact_finder::ContactFinder; +use db::kvp::KEY_VALUE_STORE; +use editor::{Editor, EditorElement, EditorStyle}; +use fuzzy::{match_strings, StringMatchCandidate}; +use gpui::{ + actions, anchored, canvas, deferred, div, fill, list, point, prelude::*, px, AnyElement, + AppContext, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div, + EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, InteractiveElement, + IntoElement, ListOffset, ListState, Model, MouseDownEvent, ParentElement, Pixels, Point, + PromptLevel, Render, SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext, + VisualContext, WeakView, WhiteSpace, +}; +use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev}; +use project::{Fs, Project}; +use rpc::{ + proto::{self, ChannelVisibility, PeerId}, + ErrorCode, ErrorExt, +}; +use serde_derive::{Deserialize, Serialize}; +use settings::Settings; +use smallvec::SmallVec; +use std::{mem, sync::Arc}; +use theme::{ActiveTheme, ThemeSettings}; +use ui::{ + prelude::*, tooltip_container, Avatar, AvatarAvailabilityIndicator, Button, Color, ContextMenu, + Icon, IconButton, IconName, IconSize, Indicator, Label, ListHeader, ListItem, Tooltip, +}; +use util::{maybe, ResultExt, TryFutureExt}; +use workspace::{ + dock::{DockPosition, Panel, PanelEvent}, + notifications::{DetachAndPromptErr, NotifyResultExt, NotifyTaskExt}, + OpenChannelNotes, Workspace, +}; + +actions!( + collab_panel, + [ + ToggleFocus, + Remove, + Secondary, + CollapseSelectedChannel, + ExpandSelectedChannel, + StartMoveChannel, + MoveSelected, + InsertSpace, + ] +); + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +struct ChannelMoveClipboard { + channel_id: ChannelId, +} + +const COLLABORATION_PANEL_KEY: &str = "CollaborationPanel"; + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(|workspace: &mut Workspace, _| { + workspace.register_action(|workspace, _: &ToggleFocus, cx| { + workspace.toggle_panel_focus::(cx); + }); + workspace.register_action(|_, _: &OpenChannelNotes, cx| { + let channel_id = ActiveCall::global(cx) + .read(cx) + .room() + .and_then(|room| room.read(cx).channel_id()); + + if let Some(channel_id) = channel_id { + let workspace = cx.view().clone(); + cx.window_context().defer(move |cx| { + ChannelView::open(channel_id, None, workspace, cx).detach_and_log_err(cx) + }); + } + }); + }) + .detach(); +} + +#[derive(Debug)] +pub enum ChannelEditingState { + Create { + location: Option, + pending_name: Option, + }, + Rename { + location: ChannelId, + pending_name: Option, + }, +} + +impl ChannelEditingState { + fn pending_name(&self) -> Option { + match self { + ChannelEditingState::Create { pending_name, .. } => pending_name.clone(), + ChannelEditingState::Rename { pending_name, .. } => pending_name.clone(), + } + } +} + +pub struct CollabPanel { + width: Option, + fs: Arc, + focus_handle: FocusHandle, + channel_clipboard: Option, + pending_serialization: Task>, + context_menu: Option<(View, Point, Subscription)>, + list_state: ListState, + filter_editor: View, + channel_name_editor: View, + channel_editing_state: Option, + entries: Vec, + selection: Option, + channel_store: Model, + user_store: Model, + client: Arc, + project: Model, + match_candidates: Vec, + subscriptions: Vec, + collapsed_sections: Vec
, + collapsed_channels: Vec, + workspace: WeakView, +} + +#[derive(Serialize, Deserialize)] +struct SerializedCollabPanel { + width: Option, + collapsed_channels: Option>, +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)] +enum Section { + ActiveCall, + Channels, + ChannelInvites, + ContactRequests, + Contacts, + Online, + Offline, +} + +#[derive(Clone, Debug)] +enum ListEntry { + Header(Section), + CallParticipant { + user: Arc, + peer_id: Option, + is_pending: bool, + role: proto::ChannelRole, + }, + ParticipantProject { + project_id: u64, + worktree_root_names: Vec, + host_user_id: u64, + is_last: bool, + }, + ParticipantScreen { + peer_id: Option, + is_last: bool, + }, + IncomingRequest(Arc), + OutgoingRequest(Arc), + ChannelInvite(Arc), + Channel { + channel: Arc, + depth: usize, + has_children: bool, + }, + ChannelNotes { + channel_id: ChannelId, + }, + ChannelChat { + channel_id: ChannelId, + }, + ChannelEditor { + depth: usize, + }, + HostedProject { + id: ProjectId, + name: SharedString, + }, + Contact { + contact: Arc, + calling: bool, + }, + ContactPlaceholder, +} + +impl CollabPanel { + pub fn new(workspace: &mut Workspace, cx: &mut ViewContext) -> View { + cx.new_view(|cx| { + let filter_editor = cx.new_view(|cx| { + let mut editor = Editor::single_line(cx); + editor.set_placeholder_text("Filter...", cx); + editor + }); + + cx.subscribe(&filter_editor, |this: &mut Self, _, event, cx| { + if let editor::EditorEvent::BufferEdited = event { + let query = this.filter_editor.read(cx).text(cx); + if !query.is_empty() { + this.selection.take(); + } + this.update_entries(true, cx); + if !query.is_empty() { + this.selection = this + .entries + .iter() + .position(|entry| !matches!(entry, ListEntry::Header(_))); + } + } + }) + .detach(); + + let channel_name_editor = cx.new_view(|cx| Editor::single_line(cx)); + + cx.subscribe(&channel_name_editor, |this: &mut Self, _, event, cx| { + if let editor::EditorEvent::Blurred = event { + if let Some(state) = &this.channel_editing_state { + if state.pending_name().is_some() { + return; + } + } + this.take_editing_state(cx); + this.update_entries(false, cx); + cx.notify(); + } + }) + .detach(); + + let view = cx.view().downgrade(); + let list_state = + ListState::new(0, gpui::ListAlignment::Top, px(1000.), move |ix, cx| { + if let Some(view) = view.upgrade() { + view.update(cx, |view, cx| view.render_list_entry(ix, cx)) + } else { + div().into_any() + } + }); + + let mut this = Self { + width: None, + focus_handle: cx.focus_handle(), + channel_clipboard: None, + fs: workspace.app_state().fs.clone(), + pending_serialization: Task::ready(None), + context_menu: None, + list_state, + channel_name_editor, + filter_editor, + entries: Vec::default(), + channel_editing_state: None, + selection: None, + channel_store: ChannelStore::global(cx), + user_store: workspace.user_store().clone(), + project: workspace.project().clone(), + subscriptions: Vec::default(), + match_candidates: Vec::default(), + collapsed_sections: vec![Section::Offline], + collapsed_channels: Vec::default(), + workspace: workspace.weak_handle(), + client: workspace.app_state().client.clone(), + }; + + this.update_entries(false, cx); + + let active_call = ActiveCall::global(cx); + this.subscriptions + .push(cx.observe(&this.user_store, |this, _, cx| { + this.update_entries(true, cx) + })); + this.subscriptions + .push(cx.observe(&this.channel_store, move |this, _, cx| { + this.update_entries(true, cx) + })); + this.subscriptions + .push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx))); + this.subscriptions.push(cx.subscribe( + &this.channel_store, + |this, _channel_store, e, cx| match e { + ChannelEvent::ChannelCreated(channel_id) + | ChannelEvent::ChannelRenamed(channel_id) => { + if this.take_editing_state(cx) { + this.update_entries(false, cx); + this.selection = this.entries.iter().position(|entry| { + if let ListEntry::Channel { channel, .. } = entry { + channel.id == *channel_id + } else { + false + } + }); + } + } + }, + )); + + this + }) + } + + pub async fn load( + workspace: WeakView, + mut cx: AsyncWindowContext, + ) -> anyhow::Result> { + let serialized_panel = cx + .background_executor() + .spawn(async move { KEY_VALUE_STORE.read_kvp(COLLABORATION_PANEL_KEY) }) + .await + .map_err(|_| anyhow::anyhow!("Failed to read collaboration panel from key value store")) + .log_err() + .flatten() + .map(|panel| serde_json::from_str::(&panel)) + .transpose() + .log_err() + .flatten(); + + workspace.update(&mut cx, |workspace, cx| { + let panel = CollabPanel::new(workspace, cx); + if let Some(serialized_panel) = serialized_panel { + panel.update(cx, |panel, cx| { + panel.width = serialized_panel.width.map(|w| w.round()); + panel.collapsed_channels = serialized_panel + .collapsed_channels + .unwrap_or_else(|| Vec::new()) + .iter() + .map(|cid| ChannelId(*cid)) + .collect(); + cx.notify(); + }); + } + panel + }) + } + + fn serialize(&mut self, cx: &mut ViewContext) { + let width = self.width; + let collapsed_channels = self.collapsed_channels.clone(); + self.pending_serialization = cx.background_executor().spawn( + async move { + KEY_VALUE_STORE + .write_kvp( + COLLABORATION_PANEL_KEY.into(), + serde_json::to_string(&SerializedCollabPanel { + width, + collapsed_channels: Some( + collapsed_channels.iter().map(|cid| cid.0).collect(), + ), + })?, + ) + .await?; + anyhow::Ok(()) + } + .log_err(), + ); + } + + fn scroll_to_item(&mut self, ix: usize) { + self.list_state.scroll_to_reveal_item(ix) + } + + fn update_entries(&mut self, select_same_item: bool, cx: &mut ViewContext) { + let channel_store = self.channel_store.read(cx); + let user_store = self.user_store.read(cx); + let query = self.filter_editor.read(cx).text(cx); + let executor = cx.background_executor().clone(); + + let prev_selected_entry = self.selection.and_then(|ix| self.entries.get(ix).cloned()); + let old_entries = mem::take(&mut self.entries); + let mut scroll_to_top = false; + + if let Some(room) = ActiveCall::global(cx).read(cx).room() { + self.entries.push(ListEntry::Header(Section::ActiveCall)); + if !old_entries + .iter() + .any(|entry| matches!(entry, ListEntry::Header(Section::ActiveCall))) + { + scroll_to_top = true; + } + + if !self.collapsed_sections.contains(&Section::ActiveCall) { + let room = room.read(cx); + + if query.is_empty() { + if let Some(channel_id) = room.channel_id() { + self.entries.push(ListEntry::ChannelNotes { channel_id }); + self.entries.push(ListEntry::ChannelChat { channel_id }); + } + } + + // Populate the active user. + if let Some(user) = user_store.current_user() { + self.match_candidates.clear(); + self.match_candidates.push(StringMatchCandidate { + id: 0, + string: user.github_login.clone(), + char_bag: user.github_login.chars().collect(), + }); + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + if !matches.is_empty() { + let user_id = user.id; + self.entries.push(ListEntry::CallParticipant { + user, + peer_id: None, + is_pending: false, + role: room.local_participant().role, + }); + let mut projects = room.local_participant().projects.iter().peekable(); + while let Some(project) = projects.next() { + self.entries.push(ListEntry::ParticipantProject { + project_id: project.id, + worktree_root_names: project.worktree_root_names.clone(), + host_user_id: user_id, + is_last: projects.peek().is_none() && !room.is_screen_sharing(), + }); + } + if room.is_screen_sharing() { + self.entries.push(ListEntry::ParticipantScreen { + peer_id: None, + is_last: true, + }); + } + } + } + + // Populate remote participants. + self.match_candidates.clear(); + self.match_candidates + .extend(room.remote_participants().values().map(|participant| { + StringMatchCandidate { + id: participant.user.id as usize, + string: participant.user.github_login.clone(), + char_bag: participant.user.github_login.chars().collect(), + } + })); + let mut matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + matches.sort_by(|a, b| { + let a_is_guest = room.role_for_user(a.candidate_id as u64) + == Some(proto::ChannelRole::Guest); + let b_is_guest = room.role_for_user(b.candidate_id as u64) + == Some(proto::ChannelRole::Guest); + a_is_guest + .cmp(&b_is_guest) + .then_with(|| a.string.cmp(&b.string)) + }); + for mat in matches { + let user_id = mat.candidate_id as u64; + let participant = &room.remote_participants()[&user_id]; + self.entries.push(ListEntry::CallParticipant { + user: participant.user.clone(), + peer_id: Some(participant.peer_id), + is_pending: false, + role: participant.role, + }); + let mut projects = participant.projects.iter().peekable(); + while let Some(project) = projects.next() { + self.entries.push(ListEntry::ParticipantProject { + project_id: project.id, + worktree_root_names: project.worktree_root_names.clone(), + host_user_id: participant.user.id, + is_last: projects.peek().is_none() + && participant.video_tracks.is_empty(), + }); + } + if !participant.video_tracks.is_empty() { + self.entries.push(ListEntry::ParticipantScreen { + peer_id: Some(participant.peer_id), + is_last: true, + }); + } + } + + // Populate pending participants. + self.match_candidates.clear(); + self.match_candidates + .extend(room.pending_participants().iter().enumerate().map( + |(id, participant)| StringMatchCandidate { + id, + string: participant.github_login.clone(), + char_bag: participant.github_login.chars().collect(), + }, + )); + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + self.entries + .extend(matches.iter().map(|mat| ListEntry::CallParticipant { + user: room.pending_participants()[mat.candidate_id].clone(), + peer_id: None, + is_pending: true, + role: proto::ChannelRole::Member, + })); + } + } + + let mut request_entries = Vec::new(); + + self.entries.push(ListEntry::Header(Section::Channels)); + + if channel_store.channel_count() > 0 || self.channel_editing_state.is_some() { + self.match_candidates.clear(); + self.match_candidates + .extend( + channel_store + .ordered_channels() + .enumerate() + .map(|(ix, (_, channel))| StringMatchCandidate { + id: ix, + string: channel.name.clone().into(), + char_bag: channel.name.chars().collect(), + }), + ); + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + if let Some(state) = &self.channel_editing_state { + if matches!(state, ChannelEditingState::Create { location: None, .. }) { + self.entries.push(ListEntry::ChannelEditor { depth: 0 }); + } + } + let mut collapse_depth = None; + for mat in matches { + let channel = channel_store.channel_at_index(mat.candidate_id).unwrap(); + let depth = channel.parent_path.len(); + + if collapse_depth.is_none() && self.is_channel_collapsed(channel.id) { + collapse_depth = Some(depth); + } else if let Some(collapsed_depth) = collapse_depth { + if depth > collapsed_depth { + continue; + } + if self.is_channel_collapsed(channel.id) { + collapse_depth = Some(depth); + } else { + collapse_depth = None; + } + } + + let hosted_projects = channel_store.projects_for_id(channel.id); + let has_children = channel_store + .channel_at_index(mat.candidate_id + 1) + .map_or(false, |next_channel| { + next_channel.parent_path.ends_with(&[channel.id]) + }); + + match &self.channel_editing_state { + Some(ChannelEditingState::Create { + location: parent_id, + .. + }) if *parent_id == Some(channel.id) => { + self.entries.push(ListEntry::Channel { + channel: channel.clone(), + depth, + has_children: false, + }); + self.entries + .push(ListEntry::ChannelEditor { depth: depth + 1 }); + } + Some(ChannelEditingState::Rename { + location: parent_id, + .. + }) if parent_id == &channel.id => { + self.entries.push(ListEntry::ChannelEditor { depth }); + } + _ => { + self.entries.push(ListEntry::Channel { + channel: channel.clone(), + depth, + has_children, + }); + } + } + + for (name, id) in hosted_projects { + self.entries.push(ListEntry::HostedProject { id, name }); + } + } + } + + let channel_invites = channel_store.channel_invitations(); + if !channel_invites.is_empty() { + self.match_candidates.clear(); + self.match_candidates + .extend(channel_invites.iter().enumerate().map(|(ix, channel)| { + StringMatchCandidate { + id: ix, + string: channel.name.clone().into(), + char_bag: channel.name.chars().collect(), + } + })); + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + request_entries.extend( + matches + .iter() + .map(|mat| ListEntry::ChannelInvite(channel_invites[mat.candidate_id].clone())), + ); + + if !request_entries.is_empty() { + self.entries + .push(ListEntry::Header(Section::ChannelInvites)); + if !self.collapsed_sections.contains(&Section::ChannelInvites) { + self.entries.append(&mut request_entries); + } + } + } + + self.entries.push(ListEntry::Header(Section::Contacts)); + + request_entries.clear(); + let incoming = user_store.incoming_contact_requests(); + if !incoming.is_empty() { + self.match_candidates.clear(); + self.match_candidates + .extend( + incoming + .iter() + .enumerate() + .map(|(ix, user)| StringMatchCandidate { + id: ix, + string: user.github_login.clone(), + char_bag: user.github_login.chars().collect(), + }), + ); + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + request_entries.extend( + matches + .iter() + .map(|mat| ListEntry::IncomingRequest(incoming[mat.candidate_id].clone())), + ); + } + + let outgoing = user_store.outgoing_contact_requests(); + if !outgoing.is_empty() { + self.match_candidates.clear(); + self.match_candidates + .extend( + outgoing + .iter() + .enumerate() + .map(|(ix, user)| StringMatchCandidate { + id: ix, + string: user.github_login.clone(), + char_bag: user.github_login.chars().collect(), + }), + ); + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + request_entries.extend( + matches + .iter() + .map(|mat| ListEntry::OutgoingRequest(outgoing[mat.candidate_id].clone())), + ); + } + + if !request_entries.is_empty() { + self.entries + .push(ListEntry::Header(Section::ContactRequests)); + if !self.collapsed_sections.contains(&Section::ContactRequests) { + self.entries.append(&mut request_entries); + } + } + + let contacts = user_store.contacts(); + if !contacts.is_empty() { + self.match_candidates.clear(); + self.match_candidates + .extend( + contacts + .iter() + .enumerate() + .map(|(ix, contact)| StringMatchCandidate { + id: ix, + string: contact.user.github_login.clone(), + char_bag: contact.user.github_login.chars().collect(), + }), + ); + + let matches = executor.block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + executor.clone(), + )); + + let (online_contacts, offline_contacts) = matches + .iter() + .partition::, _>(|mat| contacts[mat.candidate_id].online); + + for (matches, section) in [ + (online_contacts, Section::Online), + (offline_contacts, Section::Offline), + ] { + if !matches.is_empty() { + self.entries.push(ListEntry::Header(section)); + if !self.collapsed_sections.contains(§ion) { + let active_call = &ActiveCall::global(cx).read(cx); + for mat in matches { + let contact = &contacts[mat.candidate_id]; + self.entries.push(ListEntry::Contact { + contact: contact.clone(), + calling: active_call.pending_invites().contains(&contact.user.id), + }); + } + } + } + } + } + + if incoming.is_empty() && outgoing.is_empty() && contacts.is_empty() { + self.entries.push(ListEntry::ContactPlaceholder); + } + + if select_same_item { + if let Some(prev_selected_entry) = prev_selected_entry { + self.selection.take(); + for (ix, entry) in self.entries.iter().enumerate() { + if *entry == prev_selected_entry { + self.selection = Some(ix); + break; + } + } + } + } else { + self.selection = self.selection.and_then(|prev_selection| { + if self.entries.is_empty() { + None + } else { + Some(prev_selection.min(self.entries.len() - 1)) + } + }); + } + + let old_scroll_top = self.list_state.logical_scroll_top(); + self.list_state.reset(self.entries.len()); + + if scroll_to_top { + self.list_state.scroll_to(ListOffset::default()); + } else { + // Attempt to maintain the same scroll position. + if let Some(old_top_entry) = old_entries.get(old_scroll_top.item_ix) { + let new_scroll_top = self + .entries + .iter() + .position(|entry| entry == old_top_entry) + .map(|item_ix| ListOffset { + item_ix, + offset_in_item: old_scroll_top.offset_in_item, + }) + .or_else(|| { + let entry_after_old_top = old_entries.get(old_scroll_top.item_ix + 1)?; + let item_ix = self + .entries + .iter() + .position(|entry| entry == entry_after_old_top)?; + Some(ListOffset { + item_ix, + offset_in_item: Pixels::ZERO, + }) + }) + .or_else(|| { + let entry_before_old_top = + old_entries.get(old_scroll_top.item_ix.saturating_sub(1))?; + let item_ix = self + .entries + .iter() + .position(|entry| entry == entry_before_old_top)?; + Some(ListOffset { + item_ix, + offset_in_item: Pixels::ZERO, + }) + }); + + self.list_state + .scroll_to(new_scroll_top.unwrap_or(old_scroll_top)); + } + } + + cx.notify(); + } + + fn render_call_participant( + &self, + user: &Arc, + peer_id: Option, + is_pending: bool, + role: proto::ChannelRole, + is_selected: bool, + cx: &mut ViewContext, + ) -> ListItem { + let user_id = user.id; + let is_current_user = + self.user_store.read(cx).current_user().map(|user| user.id) == Some(user_id); + let tooltip = format!("Follow {}", user.github_login); + + let is_call_admin = ActiveCall::global(cx).read(cx).room().is_some_and(|room| { + room.read(cx).local_participant().role == proto::ChannelRole::Admin + }); + + ListItem::new(SharedString::from(user.github_login.clone())) + .start_slot(Avatar::new(user.avatar_uri.clone())) + .child(Label::new(user.github_login.clone())) + .selected(is_selected) + .end_slot(if is_pending { + Label::new("Calling").color(Color::Muted).into_any_element() + } else if is_current_user { + IconButton::new("leave-call", IconName::Exit) + .style(ButtonStyle::Subtle) + .on_click(move |_, cx| Self::leave_call(cx)) + .tooltip(|cx| Tooltip::text("Leave Call", cx)) + .into_any_element() + } else if role == proto::ChannelRole::Guest { + Label::new("Guest").color(Color::Muted).into_any_element() + } else if role == proto::ChannelRole::Talker { + Label::new("Mic only") + .color(Color::Muted) + .into_any_element() + } else { + div().into_any_element() + }) + .when_some(peer_id, |el, peer_id| { + if role == proto::ChannelRole::Guest { + return el; + } + el.tooltip(move |cx| Tooltip::text(tooltip.clone(), cx)) + .on_click(cx.listener(move |this, _, cx| { + this.workspace + .update(cx, |workspace, cx| workspace.follow(peer_id, cx)) + .ok(); + })) + }) + .when(is_call_admin, |el| { + el.on_secondary_mouse_down(cx.listener(move |this, event: &MouseDownEvent, cx| { + this.deploy_participant_context_menu(event.position, user_id, role, cx) + })) + }) + } + + fn render_participant_project( + &self, + project_id: u64, + worktree_root_names: &[String], + host_user_id: u64, + is_last: bool, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + let project_name: SharedString = if worktree_root_names.is_empty() { + "untitled".to_string() + } else { + worktree_root_names.join(", ") + } + .into(); + + ListItem::new(project_id as usize) + .selected(is_selected) + .on_click(cx.listener(move |this, _, cx| { + this.workspace + .update(cx, |workspace, cx| { + let app_state = workspace.app_state().clone(); + workspace::join_in_room_project(project_id, host_user_id, app_state, cx) + .detach_and_prompt_err("Failed to join project", cx, |_, _| None); + }) + .ok(); + })) + .start_slot( + h_flex() + .gap_1() + .child(render_tree_branch(is_last, false, cx)) + .child(IconButton::new(0, IconName::Folder)), + ) + .child(Label::new(project_name.clone())) + .tooltip(move |cx| Tooltip::text(format!("Open {}", project_name), cx)) + } + + fn render_participant_screen( + &self, + peer_id: Option, + is_last: bool, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize); + + ListItem::new(("screen", id)) + .selected(is_selected) + .start_slot( + h_flex() + .gap_1() + .child(render_tree_branch(is_last, false, cx)) + .child(IconButton::new(0, IconName::Screen)), + ) + .child(Label::new("Screen")) + .when_some(peer_id, |this, _| { + this.on_click(cx.listener(move |this, _, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace.open_shared_screen(peer_id.unwrap(), cx) + }) + .ok(); + })) + .tooltip(move |cx| Tooltip::text("Open shared screen", cx)) + }) + } + + fn take_editing_state(&mut self, cx: &mut ViewContext) -> bool { + if let Some(_) = self.channel_editing_state.take() { + self.channel_name_editor.update(cx, |editor, cx| { + editor.set_text("", cx); + }); + true + } else { + false + } + } + + fn render_channel_notes( + &self, + channel_id: ChannelId, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + let channel_store = self.channel_store.read(cx); + let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id); + ListItem::new("channel-notes") + .selected(is_selected) + .on_click(cx.listener(move |this, _, cx| { + this.open_channel_notes(channel_id, cx); + })) + .start_slot( + h_flex() + .relative() + .gap_1() + .child(render_tree_branch(false, true, cx)) + .child(IconButton::new(0, IconName::File)) + .children(has_channel_buffer_changed.then(|| { + div() + .w_1p5() + .absolute() + .right(px(2.)) + .top(px(2.)) + .child(Indicator::dot().color(Color::Info)) + })), + ) + .child(Label::new("notes")) + .tooltip(move |cx| Tooltip::text("Open Channel Notes", cx)) + } + + fn render_channel_chat( + &self, + channel_id: ChannelId, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + let channel_store = self.channel_store.read(cx); + let has_messages_notification = channel_store.has_new_messages(channel_id); + ListItem::new("channel-chat") + .selected(is_selected) + .on_click(cx.listener(move |this, _, cx| { + this.join_channel_chat(channel_id, cx); + })) + .start_slot( + h_flex() + .relative() + .gap_1() + .child(render_tree_branch(false, false, cx)) + .child(IconButton::new(0, IconName::MessageBubbles)) + .children(has_messages_notification.then(|| { + div() + .w_1p5() + .absolute() + .right(px(2.)) + .top(px(4.)) + .child(Indicator::dot().color(Color::Info)) + })), + ) + .child(Label::new("chat")) + .tooltip(move |cx| Tooltip::text("Open Chat", cx)) + } + + fn render_channel_project( + &self, + id: ProjectId, + name: &SharedString, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + ListItem::new(ElementId::NamedInteger( + "channel-project".into(), + id.0 as usize, + )) + .indent_level(2) + .indent_step_size(px(20.)) + .selected(is_selected) + .on_click(cx.listener(move |this, _, cx| { + if let Some(workspace) = this.workspace.upgrade() { + let app_state = workspace.read(cx).app_state().clone(); + workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err( + "Failed to open project", + cx, + |_, _| None, + ) + } + })) + .start_slot( + h_flex() + .relative() + .gap_1() + .child(IconButton::new(0, IconName::FileTree)), + ) + .child(Label::new(name.clone())) + .tooltip(move |cx| Tooltip::text("Open Project", cx)) + } + + fn has_subchannels(&self, ix: usize) -> bool { + self.entries.get(ix).map_or(false, |entry| { + if let ListEntry::Channel { has_children, .. } = entry { + *has_children + } else { + false + } + }) + } + + fn deploy_participant_context_menu( + &mut self, + position: Point, + user_id: u64, + role: proto::ChannelRole, + cx: &mut ViewContext, + ) { + let this = cx.view().clone(); + if !(role == proto::ChannelRole::Guest + || role == proto::ChannelRole::Talker + || role == proto::ChannelRole::Member) + { + return; + } + + let context_menu = ContextMenu::build(cx, |mut context_menu, cx| { + if role == proto::ChannelRole::Guest { + context_menu = context_menu.entry( + "Grant Mic Access", + None, + cx.handler_for(&this, move |_, cx| { + ActiveCall::global(cx) + .update(cx, |call, cx| { + let Some(room) = call.room() else { + return Task::ready(Ok(())); + }; + room.update(cx, |room, cx| { + room.set_participant_role( + user_id, + proto::ChannelRole::Talker, + cx, + ) + }) + }) + .detach_and_prompt_err("Failed to grant mic access", cx, |_, _| None) + }), + ); + } + if role == proto::ChannelRole::Guest || role == proto::ChannelRole::Talker { + context_menu = context_menu.entry( + "Grant Write Access", + None, + cx.handler_for(&this, move |_, cx| { + ActiveCall::global(cx) + .update(cx, |call, cx| { + let Some(room) = call.room() else { + return Task::ready(Ok(())); + }; + room.update(cx, |room, cx| { + room.set_participant_role( + user_id, + proto::ChannelRole::Member, + cx, + ) + }) + }) + .detach_and_prompt_err("Failed to grant write access", cx, |e, _| { + match e.error_code() { + ErrorCode::NeedsCla => Some("This user has not yet signed the CLA at https://zed.dev/cla.".into()), + _ => None, + } + }) + }), + ); + } + if role == proto::ChannelRole::Member || role == proto::ChannelRole::Talker { + let label = if role == proto::ChannelRole::Talker { + "Mute" + } else { + "Revoke Access" + }; + context_menu = context_menu.entry( + label, + None, + cx.handler_for(&this, move |_, cx| { + ActiveCall::global(cx) + .update(cx, |call, cx| { + let Some(room) = call.room() else { + return Task::ready(Ok(())); + }; + room.update(cx, |room, cx| { + room.set_participant_role( + user_id, + proto::ChannelRole::Guest, + cx, + ) + }) + }) + .detach_and_prompt_err("Failed to revoke access", cx, |_, _| None) + }), + ); + } + + context_menu + }); + + cx.focus_view(&context_menu); + let subscription = + cx.subscribe(&context_menu, |this, _, _: &DismissEvent, cx| { + if this.context_menu.as_ref().is_some_and(|context_menu| { + context_menu.0.focus_handle(cx).contains_focused(cx) + }) { + cx.focus_self(); + } + this.context_menu.take(); + cx.notify(); + }); + self.context_menu = Some((context_menu, position, subscription)); + } + + fn deploy_channel_context_menu( + &mut self, + position: Point, + channel_id: ChannelId, + ix: usize, + cx: &mut ViewContext, + ) { + let clipboard_channel_name = self.channel_clipboard.as_ref().and_then(|clipboard| { + self.channel_store + .read(cx) + .channel_for_id(clipboard.channel_id) + .map(|channel| channel.name.clone()) + }); + let this = cx.view().clone(); + + let context_menu = ContextMenu::build(cx, |mut context_menu, cx| { + if self.has_subchannels(ix) { + let expand_action_name = if self.is_channel_collapsed(channel_id) { + "Expand Subchannels" + } else { + "Collapse Subchannels" + }; + context_menu = context_menu.entry( + expand_action_name, + None, + cx.handler_for(&this, move |this, cx| { + this.toggle_channel_collapsed(channel_id, cx) + }), + ); + } + + context_menu = context_menu + .entry( + "Open Notes", + None, + cx.handler_for(&this, move |this, cx| { + this.open_channel_notes(channel_id, cx) + }), + ) + .entry( + "Open Chat", + None, + cx.handler_for(&this, move |this, cx| { + this.join_channel_chat(channel_id, cx) + }), + ) + .entry( + "Copy Channel Link", + None, + cx.handler_for(&this, move |this, cx| { + this.copy_channel_link(channel_id, cx) + }), + ); + + let mut has_destructive_actions = false; + if self.channel_store.read(cx).is_channel_admin(channel_id) { + has_destructive_actions = true; + context_menu = context_menu + .separator() + .entry( + "New Subchannel", + None, + cx.handler_for(&this, move |this, cx| this.new_subchannel(channel_id, cx)), + ) + .entry( + "Rename", + Some(Box::new(SecondaryConfirm)), + cx.handler_for(&this, move |this, cx| this.rename_channel(channel_id, cx)), + ); + + if let Some(channel_name) = clipboard_channel_name { + context_menu = context_menu.separator().entry( + format!("Move '#{}' here", channel_name), + None, + cx.handler_for(&this, move |this, cx| { + this.move_channel_on_clipboard(channel_id, cx) + }), + ); + } + + if self.channel_store.read(cx).is_root_channel(channel_id) { + context_menu = context_menu.separator().entry( + "Manage Members", + None, + cx.handler_for(&this, move |this, cx| this.manage_members(channel_id, cx)), + ) + } else { + context_menu = context_menu.entry( + "Move this channel", + None, + cx.handler_for(&this, move |this, cx| { + this.start_move_channel(channel_id, cx) + }), + ); + if self.channel_store.read(cx).is_public_channel(channel_id) { + context_menu = context_menu.separator().entry( + "Make Channel Private", + None, + cx.handler_for(&this, move |this, cx| { + this.set_channel_visibility( + channel_id, + ChannelVisibility::Members, + cx, + ) + }), + ) + } else { + context_menu = context_menu.separator().entry( + "Make Channel Public", + None, + cx.handler_for(&this, move |this, cx| { + this.set_channel_visibility( + channel_id, + ChannelVisibility::Public, + cx, + ) + }), + ) + } + } + + context_menu = context_menu.entry( + "Delete", + None, + cx.handler_for(&this, move |this, cx| this.remove_channel(channel_id, cx)), + ); + } + + if self.channel_store.read(cx).is_root_channel(channel_id) { + if !has_destructive_actions { + context_menu = context_menu.separator() + } + context_menu = context_menu.entry( + "Leave Channel", + None, + cx.handler_for(&this, move |this, cx| this.leave_channel(channel_id, cx)), + ); + } + + context_menu + }); + + cx.focus_view(&context_menu); + let subscription = + cx.subscribe(&context_menu, |this, _, _: &DismissEvent, cx| { + if this.context_menu.as_ref().is_some_and(|context_menu| { + context_menu.0.focus_handle(cx).contains_focused(cx) + }) { + cx.focus_self(); + } + this.context_menu.take(); + cx.notify(); + }); + self.context_menu = Some((context_menu, position, subscription)); + + cx.notify(); + } + + fn deploy_contact_context_menu( + &mut self, + position: Point, + contact: Arc, + cx: &mut ViewContext, + ) { + let this = cx.view().clone(); + let in_room = ActiveCall::global(cx).read(cx).room().is_some(); + + let context_menu = ContextMenu::build(cx, |mut context_menu, _| { + let user_id = contact.user.id; + + if contact.online && !contact.busy { + let label = if in_room { + format!("Invite {} to join", contact.user.github_login) + } else { + format!("Call {}", contact.user.github_login) + }; + context_menu = context_menu.entry(label, None, { + let this = this.clone(); + move |cx| { + this.update(cx, |this, cx| { + this.call(user_id, cx); + }); + } + }); + } + + context_menu.entry("Remove Contact", None, { + let this = this.clone(); + move |cx| { + this.update(cx, |this, cx| { + this.remove_contact(contact.user.id, &contact.user.github_login, cx); + }); + } + }) + }); + + cx.focus_view(&context_menu); + let subscription = + cx.subscribe(&context_menu, |this, _, _: &DismissEvent, cx| { + if this.context_menu.as_ref().is_some_and(|context_menu| { + context_menu.0.focus_handle(cx).contains_focused(cx) + }) { + cx.focus_self(); + } + this.context_menu.take(); + cx.notify(); + }); + self.context_menu = Some((context_menu, position, subscription)); + + cx.notify(); + } + + fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { + if self.take_editing_state(cx) { + cx.focus_view(&self.filter_editor); + } else { + self.filter_editor.update(cx, |editor, cx| { + if editor.buffer().read(cx).len(cx) > 0 { + editor.set_text("", cx); + } + }); + } + + if self.context_menu.is_some() { + self.context_menu.take(); + cx.notify(); + } + + self.update_entries(false, cx); + } + + fn select_next(&mut self, _: &SelectNext, cx: &mut ViewContext) { + let ix = self.selection.map_or(0, |ix| ix + 1); + if ix < self.entries.len() { + self.selection = Some(ix); + } + + if let Some(ix) = self.selection { + self.scroll_to_item(ix) + } + cx.notify(); + } + + fn select_prev(&mut self, _: &SelectPrev, cx: &mut ViewContext) { + let ix = self.selection.take().unwrap_or(0); + if ix > 0 { + self.selection = Some(ix - 1); + } + + if let Some(ix) = self.selection { + self.scroll_to_item(ix) + } + cx.notify(); + } + + fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext) { + if self.confirm_channel_edit(cx) { + return; + } + + if let Some(selection) = self.selection { + if let Some(entry) = self.entries.get(selection) { + match entry { + ListEntry::Header(section) => match section { + Section::ActiveCall => Self::leave_call(cx), + Section::Channels => self.new_root_channel(cx), + Section::Contacts => self.toggle_contact_finder(cx), + Section::ContactRequests + | Section::Online + | Section::Offline + | Section::ChannelInvites => { + self.toggle_section_expanded(*section, cx); + } + }, + ListEntry::Contact { contact, calling } => { + if contact.online && !contact.busy && !calling { + self.call(contact.user.id, cx); + } + } + ListEntry::ParticipantProject { + project_id, + host_user_id, + .. + } => { + if let Some(workspace) = self.workspace.upgrade() { + let app_state = workspace.read(cx).app_state().clone(); + workspace::join_in_room_project( + *project_id, + *host_user_id, + app_state, + cx, + ) + .detach_and_prompt_err( + "Failed to join project", + cx, + |_, _| None, + ); + } + } + ListEntry::ParticipantScreen { peer_id, .. } => { + let Some(peer_id) = peer_id else { + return; + }; + if let Some(workspace) = self.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.open_shared_screen(*peer_id, cx) + }); + } + } + ListEntry::Channel { channel, .. } => { + let is_active = maybe!({ + let call_channel = ActiveCall::global(cx) + .read(cx) + .room()? + .read(cx) + .channel_id()?; + + Some(call_channel == channel.id) + }) + .unwrap_or(false); + if is_active { + self.open_channel_notes(channel.id, cx) + } else { + self.join_channel(channel.id, cx) + } + } + ListEntry::ContactPlaceholder => self.toggle_contact_finder(cx), + ListEntry::CallParticipant { user, peer_id, .. } => { + if Some(user) == self.user_store.read(cx).current_user().as_ref() { + Self::leave_call(cx); + } else if let Some(peer_id) = peer_id { + self.workspace + .update(cx, |workspace, cx| workspace.follow(*peer_id, cx)) + .ok(); + } + } + ListEntry::IncomingRequest(user) => { + self.respond_to_contact_request(user.id, true, cx) + } + ListEntry::ChannelInvite(channel) => { + self.respond_to_channel_invite(channel.id, true, cx) + } + ListEntry::ChannelNotes { channel_id } => { + self.open_channel_notes(*channel_id, cx) + } + ListEntry::ChannelChat { channel_id } => { + self.join_channel_chat(*channel_id, cx) + } + ListEntry::HostedProject { + id: _id, + name: _name, + } => { + // todo() + } + ListEntry::OutgoingRequest(_) => {} + ListEntry::ChannelEditor { .. } => {} + } + } + } + } + + fn insert_space(&mut self, _: &InsertSpace, cx: &mut ViewContext) { + if self.channel_editing_state.is_some() { + self.channel_name_editor.update(cx, |editor, cx| { + editor.insert(" ", cx); + }); + } + } + + fn confirm_channel_edit(&mut self, cx: &mut ViewContext) -> bool { + if let Some(editing_state) = &mut self.channel_editing_state { + match editing_state { + ChannelEditingState::Create { + location, + pending_name, + .. + } => { + if pending_name.is_some() { + return false; + } + let channel_name = self.channel_name_editor.read(cx).text(cx); + + *pending_name = Some(channel_name.clone()); + + let create = self.channel_store.update(cx, |channel_store, cx| { + channel_store.create_channel(&channel_name, *location, cx) + }); + if location.is_none() { + cx.spawn(|this, mut cx| async move { + let channel_id = create.await?; + this.update(&mut cx, |this, cx| { + this.show_channel_modal( + channel_id, + channel_modal::Mode::InviteMembers, + cx, + ) + }) + }) + .detach_and_prompt_err( + "Failed to create channel", + cx, + |_, _| None, + ); + } else { + create.detach_and_prompt_err("Failed to create channel", cx, |_, _| None); + } + cx.notify(); + } + ChannelEditingState::Rename { + location, + pending_name, + } => { + if pending_name.is_some() { + return false; + } + let channel_name = self.channel_name_editor.read(cx).text(cx); + *pending_name = Some(channel_name.clone()); + + self.channel_store + .update(cx, |channel_store, cx| { + channel_store.rename(*location, &channel_name, cx) + }) + .detach(); + cx.notify(); + } + } + cx.focus_self(); + true + } else { + false + } + } + + fn toggle_section_expanded(&mut self, section: Section, cx: &mut ViewContext) { + if let Some(ix) = self.collapsed_sections.iter().position(|s| *s == section) { + self.collapsed_sections.remove(ix); + } else { + self.collapsed_sections.push(section); + } + self.update_entries(false, cx); + } + + fn collapse_selected_channel( + &mut self, + _: &CollapseSelectedChannel, + cx: &mut ViewContext, + ) { + let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else { + return; + }; + + if self.is_channel_collapsed(channel_id) { + return; + } + + self.toggle_channel_collapsed(channel_id, cx); + } + + fn expand_selected_channel(&mut self, _: &ExpandSelectedChannel, cx: &mut ViewContext) { + let Some(id) = self.selected_channel().map(|channel| channel.id) else { + return; + }; + + if !self.is_channel_collapsed(id) { + return; + } + + self.toggle_channel_collapsed(id, cx) + } + + fn toggle_channel_collapsed(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + match self.collapsed_channels.binary_search(&channel_id) { + Ok(ix) => { + self.collapsed_channels.remove(ix); + } + Err(ix) => { + self.collapsed_channels.insert(ix, channel_id); + } + }; + self.serialize(cx); + self.update_entries(true, cx); + cx.notify(); + cx.focus_self(); + } + + fn is_channel_collapsed(&self, channel_id: ChannelId) -> bool { + self.collapsed_channels.binary_search(&channel_id).is_ok() + } + + fn leave_call(cx: &mut WindowContext) { + ActiveCall::global(cx) + .update(cx, |call, cx| call.hang_up(cx)) + .detach_and_prompt_err("Failed to hang up", cx, |_, _| None); + } + + fn toggle_contact_finder(&mut self, cx: &mut ViewContext) { + if let Some(workspace) = self.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.toggle_modal(cx, |cx| { + let mut finder = ContactFinder::new(self.user_store.clone(), cx); + finder.set_query(self.filter_editor.read(cx).text(cx), cx); + finder + }); + }); + } + } + + fn new_root_channel(&mut self, cx: &mut ViewContext) { + self.channel_editing_state = Some(ChannelEditingState::Create { + location: None, + pending_name: None, + }); + self.update_entries(false, cx); + self.select_channel_editor(); + cx.focus_view(&self.channel_name_editor); + cx.notify(); + } + + fn select_channel_editor(&mut self) { + self.selection = self.entries.iter().position(|entry| match entry { + ListEntry::ChannelEditor { .. } => true, + _ => false, + }); + } + + fn new_subchannel(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + self.collapsed_channels + .retain(|channel| *channel != channel_id); + self.channel_editing_state = Some(ChannelEditingState::Create { + location: Some(channel_id), + pending_name: None, + }); + self.update_entries(false, cx); + self.select_channel_editor(); + cx.focus_view(&self.channel_name_editor); + cx.notify(); + } + + fn manage_members(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + self.show_channel_modal(channel_id, channel_modal::Mode::ManageMembers, cx); + } + + fn remove_selected_channel(&mut self, _: &Remove, cx: &mut ViewContext) { + if let Some(channel) = self.selected_channel() { + self.remove_channel(channel.id, cx) + } + } + + fn rename_selected_channel(&mut self, _: &SecondaryConfirm, cx: &mut ViewContext) { + if let Some(channel) = self.selected_channel() { + self.rename_channel(channel.id, cx); + } + } + + fn rename_channel(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + let channel_store = self.channel_store.read(cx); + if !channel_store.is_channel_admin(channel_id) { + return; + } + if let Some(channel) = channel_store.channel_for_id(channel_id).cloned() { + self.channel_editing_state = Some(ChannelEditingState::Rename { + location: channel_id, + pending_name: None, + }); + self.channel_name_editor.update(cx, |editor, cx| { + editor.set_text(channel.name.clone(), cx); + editor.select_all(&Default::default(), cx); + }); + cx.focus_view(&self.channel_name_editor); + self.update_entries(false, cx); + self.select_channel_editor(); + } + } + + fn set_channel_visibility( + &mut self, + channel_id: ChannelId, + visibility: ChannelVisibility, + cx: &mut ViewContext, + ) { + self.channel_store + .update(cx, |channel_store, cx| { + channel_store.set_channel_visibility(channel_id, visibility, cx) + }) + .detach_and_prompt_err("Failed to set channel visibility", cx, |e, _| match e.error_code() { + ErrorCode::BadPublicNesting => + if e.error_tag("direction") == Some("parent") { + Some("To make a channel public, its parent channel must be public.".to_string()) + } else { + Some("To make a channel private, all of its subchannels must be private.".to_string()) + }, + _ => None + }); + } + + fn start_move_channel(&mut self, channel_id: ChannelId, _cx: &mut ViewContext) { + self.channel_clipboard = Some(ChannelMoveClipboard { channel_id }); + } + + fn start_move_selected_channel(&mut self, _: &StartMoveChannel, cx: &mut ViewContext) { + if let Some(channel) = self.selected_channel() { + self.start_move_channel(channel.id, cx); + } + } + + fn move_channel_on_clipboard( + &mut self, + to_channel_id: ChannelId, + cx: &mut ViewContext, + ) { + if let Some(clipboard) = self.channel_clipboard.take() { + self.move_channel(clipboard.channel_id, to_channel_id, cx) + } + } + + fn move_channel(&self, channel_id: ChannelId, to: ChannelId, cx: &mut ViewContext) { + self.channel_store + .update(cx, |channel_store, cx| { + channel_store.move_channel(channel_id, to, cx) + }) + .detach_and_prompt_err("Failed to move channel", cx, |e, _| match e.error_code() { + ErrorCode::BadPublicNesting => { + Some("Public channels must have public parents".into()) + } + ErrorCode::CircularNesting => Some("You cannot move a channel into itself".into()), + ErrorCode::WrongMoveTarget => { + Some("You cannot move a channel into a different root channel".into()) + } + _ => None, + }) + } + + fn open_channel_notes(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + if let Some(workspace) = self.workspace.upgrade() { + ChannelView::open(channel_id, None, workspace, cx).detach(); + } + } + + fn show_inline_context_menu(&mut self, _: &menu::SecondaryConfirm, cx: &mut ViewContext) { + let Some(bounds) = self + .selection + .and_then(|ix| self.list_state.bounds_for_item(ix)) + else { + return; + }; + + if let Some(channel) = self.selected_channel() { + self.deploy_channel_context_menu( + bounds.center(), + channel.id, + self.selection.unwrap(), + cx, + ); + cx.stop_propagation(); + return; + }; + + if let Some(contact) = self.selected_contact() { + self.deploy_contact_context_menu(bounds.center(), contact, cx); + cx.stop_propagation(); + return; + }; + } + + fn selected_channel(&self) -> Option<&Arc> { + self.selection + .and_then(|ix| self.entries.get(ix)) + .and_then(|entry| match entry { + ListEntry::Channel { channel, .. } => Some(channel), + _ => None, + }) + } + + fn selected_contact(&self) -> Option> { + self.selection + .and_then(|ix| self.entries.get(ix)) + .and_then(|entry| match entry { + ListEntry::Contact { contact, .. } => Some(contact.clone()), + _ => None, + }) + } + + fn show_channel_modal( + &mut self, + channel_id: ChannelId, + mode: channel_modal::Mode, + cx: &mut ViewContext, + ) { + let workspace = self.workspace.clone(); + let user_store = self.user_store.clone(); + let channel_store = self.channel_store.clone(); + + cx.spawn(|_, mut cx| async move { + workspace.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, |cx| { + ChannelModal::new( + user_store.clone(), + channel_store.clone(), + channel_id, + mode, + cx, + ) + }); + }) + }) + .detach(); + } + + fn leave_channel(&self, channel_id: ChannelId, cx: &mut ViewContext) { + let Some(user_id) = self.user_store.read(cx).current_user().map(|u| u.id) else { + return; + }; + let Some(channel) = self.channel_store.read(cx).channel_for_id(channel_id) else { + return; + }; + let prompt_message = format!("Are you sure you want to leave \"#{}\"?", channel.name); + let answer = cx.prompt( + PromptLevel::Warning, + &prompt_message, + None, + &["Leave", "Cancel"], + ); + cx.spawn(|this, mut cx| async move { + if answer.await? != 0 { + return Ok(()); + } + this.update(&mut cx, |this, cx| { + this.channel_store.update(cx, |channel_store, cx| { + channel_store.remove_member(channel_id, user_id, cx) + }) + })? + .await + }) + .detach_and_prompt_err("Failed to leave channel", cx, |_, _| None) + } + + fn remove_channel(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + let channel_store = self.channel_store.clone(); + if let Some(channel) = channel_store.read(cx).channel_for_id(channel_id) { + let prompt_message = format!( + "Are you sure you want to remove the channel \"{}\"?", + channel.name + ); + let answer = cx.prompt( + PromptLevel::Warning, + &prompt_message, + None, + &["Remove", "Cancel"], + ); + cx.spawn(|this, mut cx| async move { + if answer.await? == 0 { + channel_store + .update(&mut cx, |channels, _| channels.remove_channel(channel_id))? + .await + .notify_async_err(&mut cx); + this.update(&mut cx, |_, cx| cx.focus_self()).ok(); + } + anyhow::Ok(()) + }) + .detach(); + } + } + + fn remove_contact(&mut self, user_id: u64, github_login: &str, cx: &mut ViewContext) { + let user_store = self.user_store.clone(); + let prompt_message = format!( + "Are you sure you want to remove \"{}\" from your contacts?", + github_login + ); + let answer = cx.prompt( + PromptLevel::Warning, + &prompt_message, + None, + &["Remove", "Cancel"], + ); + cx.spawn(|_, mut cx| async move { + if answer.await? == 0 { + user_store + .update(&mut cx, |store, cx| store.remove_contact(user_id, cx))? + .await + .notify_async_err(&mut cx); + } + anyhow::Ok(()) + }) + .detach_and_prompt_err("Failed to remove contact", cx, |_, _| None); + } + + fn respond_to_contact_request( + &mut self, + user_id: u64, + accept: bool, + cx: &mut ViewContext, + ) { + self.user_store + .update(cx, |store, cx| { + store.respond_to_contact_request(user_id, accept, cx) + }) + .detach_and_prompt_err("Failed to respond to contact request", cx, |_, _| None); + } + + fn respond_to_channel_invite( + &mut self, + channel_id: ChannelId, + accept: bool, + cx: &mut ViewContext, + ) { + self.channel_store + .update(cx, |store, cx| { + store.respond_to_channel_invite(channel_id, accept, cx) + }) + .detach(); + } + + fn call(&mut self, recipient_user_id: u64, cx: &mut ViewContext) { + ActiveCall::global(cx) + .update(cx, |call, cx| { + call.invite(recipient_user_id, Some(self.project.clone()), cx) + }) + .detach_and_prompt_err("Call failed", cx, |_, _| None); + } + + fn join_channel(&self, channel_id: ChannelId, cx: &mut ViewContext) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + let Some(handle) = cx.window_handle().downcast::() else { + return; + }; + workspace::join_channel( + channel_id, + workspace.read(cx).app_state().clone(), + Some(handle), + cx, + ) + .detach_and_prompt_err("Failed to join channel", cx, |_, _| None) + } + + fn join_channel_chat(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + cx.window_context().defer(move |cx| { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = workspace.focus_panel::(cx) { + panel.update(cx, |panel, cx| { + panel + .select_channel(channel_id, None, cx) + .detach_and_notify_err(cx); + }); + } + }); + }); + } + + fn copy_channel_link(&mut self, channel_id: ChannelId, cx: &mut ViewContext) { + let channel_store = self.channel_store.read(cx); + let Some(channel) = channel_store.channel_for_id(channel_id) else { + return; + }; + let item = ClipboardItem::new(channel.link(cx)); + cx.write_to_clipboard(item) + } + + fn render_signed_out(&mut self, cx: &mut ViewContext) -> Div { + let collab_blurb = "Work with your team in realtime with collaborative editing, voice, shared notes and more."; + + v_flex() + .gap_6() + .p_4() + .child(Label::new(collab_blurb)) + .child( + v_flex() + .gap_2() + .child( + Button::new("sign_in", "Sign in") + .icon_color(Color::Muted) + .icon(IconName::Github) + .icon_position(IconPosition::Start) + .style(ButtonStyle::Filled) + .full_width() + .on_click(cx.listener(|this, _, cx| { + let client = this.client.clone(); + cx.spawn(|_, mut cx| async move { + client + .authenticate_and_connect(true, &cx) + .await + .notify_async_err(&mut cx); + }) + .detach() + })), + ) + .child( + div().flex().w_full().items_center().child( + Label::new("Sign in to enable collaboration.") + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ) + } + + fn render_list_entry(&mut self, ix: usize, cx: &mut ViewContext) -> AnyElement { + let entry = &self.entries[ix]; + + let is_selected = self.selection == Some(ix); + match entry { + ListEntry::Header(section) => { + let is_collapsed = self.collapsed_sections.contains(section); + self.render_header(*section, is_selected, is_collapsed, cx) + .into_any_element() + } + ListEntry::Contact { contact, calling } => self + .render_contact(contact, *calling, is_selected, cx) + .into_any_element(), + ListEntry::ContactPlaceholder => self + .render_contact_placeholder(is_selected, cx) + .into_any_element(), + ListEntry::IncomingRequest(user) => self + .render_contact_request(user, true, is_selected, cx) + .into_any_element(), + ListEntry::OutgoingRequest(user) => self + .render_contact_request(user, false, is_selected, cx) + .into_any_element(), + ListEntry::Channel { + channel, + depth, + has_children, + } => self + .render_channel(channel, *depth, *has_children, is_selected, ix, cx) + .into_any_element(), + ListEntry::ChannelEditor { depth } => { + self.render_channel_editor(*depth, cx).into_any_element() + } + ListEntry::ChannelInvite(channel) => self + .render_channel_invite(channel, is_selected, cx) + .into_any_element(), + ListEntry::CallParticipant { + user, + peer_id, + is_pending, + role, + } => self + .render_call_participant(user, *peer_id, *is_pending, *role, is_selected, cx) + .into_any_element(), + ListEntry::ParticipantProject { + project_id, + worktree_root_names, + host_user_id, + is_last, + } => self + .render_participant_project( + *project_id, + &worktree_root_names, + *host_user_id, + *is_last, + is_selected, + cx, + ) + .into_any_element(), + ListEntry::ParticipantScreen { peer_id, is_last } => self + .render_participant_screen(*peer_id, *is_last, is_selected, cx) + .into_any_element(), + ListEntry::ChannelNotes { channel_id } => self + .render_channel_notes(*channel_id, is_selected, cx) + .into_any_element(), + ListEntry::ChannelChat { channel_id } => self + .render_channel_chat(*channel_id, is_selected, cx) + .into_any_element(), + + ListEntry::HostedProject { id, name } => self + .render_channel_project(*id, name, is_selected, cx) + .into_any_element(), + } + } + + fn render_signed_in(&mut self, cx: &mut ViewContext) -> Div { + v_flex() + .size_full() + .child(list(self.list_state.clone()).size_full()) + .child( + v_flex() + .child(div().mx_2().border_primary(cx).border_t_1()) + .child( + v_flex() + .p_2() + .child(self.render_filter_input(&self.filter_editor, cx)), + ), + ) + } + + fn render_filter_input( + &self, + editor: &View, + cx: &mut ViewContext, + ) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: if editor.read(cx).read_only(cx) { + cx.theme().colors().text_disabled + } else { + cx.theme().colors().text + }, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: rems(0.875).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(1.3), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }; + + EditorElement::new( + editor, + EditorStyle { + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } + + fn render_header( + &self, + section: Section, + is_selected: bool, + is_collapsed: bool, + cx: &ViewContext, + ) -> impl IntoElement { + let mut channel_link = None; + let mut channel_tooltip_text = None; + let mut channel_icon = None; + + let text = match section { + Section::ActiveCall => { + let channel_name = maybe!({ + let channel_id = ActiveCall::global(cx).read(cx).channel_id(cx)?; + + let channel = self.channel_store.read(cx).channel_for_id(channel_id)?; + + channel_link = Some(channel.link(cx)); + (channel_icon, channel_tooltip_text) = match channel.visibility { + proto::ChannelVisibility::Public => { + (Some("icons/public.svg"), Some("Copy public channel link.")) + } + proto::ChannelVisibility::Members => { + (Some("icons/hash.svg"), Some("Copy private channel link.")) + } + }; + + Some(channel.name.as_ref()) + }); + + if let Some(name) = channel_name { + SharedString::from(name.to_string()) + } else { + SharedString::from("Current Call") + } + } + Section::ContactRequests => SharedString::from("Requests"), + Section::Contacts => SharedString::from("Contacts"), + Section::Channels => SharedString::from("Channels"), + Section::ChannelInvites => SharedString::from("Invites"), + Section::Online => SharedString::from("Online"), + Section::Offline => SharedString::from("Offline"), + }; + + let button = match section { + Section::ActiveCall => channel_link.map(|channel_link| { + let channel_link_copy = channel_link.clone(); + IconButton::new("channel-link", IconName::Copy) + .icon_size(IconSize::Small) + .size(ButtonSize::None) + .visible_on_hover("section-header") + .on_click(move |_, cx| { + let item = ClipboardItem::new(channel_link_copy.clone()); + cx.write_to_clipboard(item) + }) + .tooltip(|cx| Tooltip::text("Copy channel link", cx)) + .into_any_element() + }), + Section::Contacts => Some( + IconButton::new("add-contact", IconName::Plus) + .on_click(cx.listener(|this, _, cx| this.toggle_contact_finder(cx))) + .tooltip(|cx| Tooltip::text("Search for new contact", cx)) + .into_any_element(), + ), + Section::Channels => Some( + IconButton::new("add-channel", IconName::Plus) + .on_click(cx.listener(|this, _, cx| this.new_root_channel(cx))) + .tooltip(|cx| Tooltip::text("Create a channel", cx)) + .into_any_element(), + ), + _ => None, + }; + + let can_collapse = match section { + Section::ActiveCall | Section::Channels | Section::Contacts => false, + Section::ChannelInvites + | Section::ContactRequests + | Section::Online + | Section::Offline => true, + }; + + h_flex().w_full().group("section-header").child( + ListHeader::new(text) + .when(can_collapse, |header| { + header + .toggle(Some(!is_collapsed)) + .on_toggle(cx.listener(move |this, _, cx| { + this.toggle_section_expanded(section, cx); + })) + }) + .inset(true) + .end_slot::(button) + .selected(is_selected), + ) + } + + fn render_contact( + &self, + contact: &Arc, + calling: bool, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + let online = contact.online; + let busy = contact.busy || calling; + let github_login = SharedString::from(contact.user.github_login.clone()); + let item = ListItem::new(github_login.clone()) + .indent_level(1) + .indent_step_size(px(20.)) + .selected(is_selected) + .child( + h_flex() + .w_full() + .justify_between() + .child(Label::new(github_login.clone())) + .when(calling, |el| { + el.child(Label::new("Calling").color(Color::Muted)) + }) + .when(!calling, |el| { + el.child( + IconButton::new("contact context menu", IconName::Ellipsis) + .icon_color(Color::Muted) + .visible_on_hover("") + .on_click(cx.listener({ + let contact = contact.clone(); + move |this, event: &ClickEvent, cx| { + this.deploy_contact_context_menu( + event.down.position, + contact.clone(), + cx, + ); + } + })), + ) + }), + ) + .on_secondary_mouse_down(cx.listener({ + let contact = contact.clone(); + move |this, event: &MouseDownEvent, cx| { + this.deploy_contact_context_menu(event.position, contact.clone(), cx); + } + })) + .start_slot( + // todo handle contacts with no avatar + Avatar::new(contact.user.avatar_uri.clone()) + .indicator::(if online { + Some(AvatarAvailabilityIndicator::new(match busy { + true => ui::Availability::Busy, + false => ui::Availability::Free, + })) + } else { + None + }), + ); + + div() + .id(github_login.clone()) + .group("") + .child(item) + .tooltip(move |cx| { + let text = if !online { + format!(" {} is offline", &github_login) + } else if busy { + format!(" {} is on a call", &github_login) + } else { + let room = ActiveCall::global(cx).read(cx).room(); + if room.is_some() { + format!("Invite {} to join call", &github_login) + } else { + format!("Call {}", &github_login) + } + }; + Tooltip::text(text, cx) + }) + } + + fn render_contact_request( + &self, + user: &Arc, + is_incoming: bool, + is_selected: bool, + cx: &mut ViewContext, + ) -> impl IntoElement { + let github_login = SharedString::from(user.github_login.clone()); + let user_id = user.id; + let is_response_pending = self.user_store.read(cx).is_contact_request_pending(&user); + let color = if is_response_pending { + Color::Muted + } else { + Color::Default + }; + + let controls = if is_incoming { + vec![ + IconButton::new("decline-contact", IconName::Close) + .on_click(cx.listener(move |this, _, cx| { + this.respond_to_contact_request(user_id, false, cx); + })) + .icon_color(color) + .tooltip(|cx| Tooltip::text("Decline invite", cx)), + IconButton::new("accept-contact", IconName::Check) + .on_click(cx.listener(move |this, _, cx| { + this.respond_to_contact_request(user_id, true, cx); + })) + .icon_color(color) + .tooltip(|cx| Tooltip::text("Accept invite", cx)), + ] + } else { + let github_login = github_login.clone(); + vec![IconButton::new("remove_contact", IconName::Close) + .on_click(cx.listener(move |this, _, cx| { + this.remove_contact(user_id, &github_login, cx); + })) + .icon_color(color) + .tooltip(|cx| Tooltip::text("Cancel invite", cx))] + }; + + ListItem::new(github_login.clone()) + .indent_level(1) + .indent_step_size(px(20.)) + .selected(is_selected) + .child( + h_flex() + .w_full() + .justify_between() + .child(Label::new(github_login.clone())) + .child(h_flex().children(controls)), + ) + .start_slot(Avatar::new(user.avatar_uri.clone())) + } + + fn render_channel_invite( + &self, + channel: &Arc, + is_selected: bool, + cx: &mut ViewContext, + ) -> ListItem { + let channel_id = channel.id; + let response_is_pending = self + .channel_store + .read(cx) + .has_pending_channel_invite_response(&channel); + let color = if response_is_pending { + Color::Muted + } else { + Color::Default + }; + + let controls = [ + IconButton::new("reject-invite", IconName::Close) + .on_click(cx.listener(move |this, _, cx| { + this.respond_to_channel_invite(channel_id, false, cx); + })) + .icon_color(color) + .tooltip(|cx| Tooltip::text("Decline invite", cx)), + IconButton::new("accept-invite", IconName::Check) + .on_click(cx.listener(move |this, _, cx| { + this.respond_to_channel_invite(channel_id, true, cx); + })) + .icon_color(color) + .tooltip(|cx| Tooltip::text("Accept invite", cx)), + ]; + + ListItem::new(("channel-invite", channel.id.0 as usize)) + .selected(is_selected) + .child( + h_flex() + .w_full() + .justify_between() + .child(Label::new(channel.name.clone())) + .child(h_flex().children(controls)), + ) + .start_slot( + Icon::new(IconName::Hash) + .size(IconSize::Small) + .color(Color::Muted), + ) + } + + fn render_contact_placeholder( + &self, + is_selected: bool, + cx: &mut ViewContext, + ) -> ListItem { + ListItem::new("contact-placeholder") + .child(Icon::new(IconName::Plus)) + .child(Label::new("Add a Contact")) + .selected(is_selected) + .on_click(cx.listener(|this, _, cx| this.toggle_contact_finder(cx))) + } + + fn render_channel( + &self, + channel: &Channel, + depth: usize, + has_children: bool, + is_selected: bool, + ix: usize, + cx: &mut ViewContext, + ) -> impl IntoElement { + let channel_id = channel.id; + + let is_active = maybe!({ + let call_channel = ActiveCall::global(cx) + .read(cx) + .room()? + .read(cx) + .channel_id()?; + Some(call_channel == channel_id) + }) + .unwrap_or(false); + let channel_store = self.channel_store.read(cx); + let is_public = channel_store + .channel_for_id(channel_id) + .map(|channel| channel.visibility) + == Some(proto::ChannelVisibility::Public); + let disclosed = + has_children.then(|| self.collapsed_channels.binary_search(&channel.id).is_err()); + + let has_messages_notification = channel_store.has_new_messages(channel_id); + let has_notes_notification = channel_store.has_channel_buffer_changed(channel_id); + + const FACEPILE_LIMIT: usize = 3; + let participants = self.channel_store.read(cx).channel_participants(channel_id); + + let face_pile = if participants.is_empty() { + None + } else { + let extra_count = participants.len().saturating_sub(FACEPILE_LIMIT); + let result = FacePile::new( + participants + .iter() + .map(|user| Avatar::new(user.avatar_uri.clone()).into_any_element()) + .take(FACEPILE_LIMIT) + .chain(if extra_count > 0 { + Some( + div() + .ml_2() + .child(Label::new(format!("+{extra_count}"))) + .into_any_element(), + ) + } else { + None + }) + .collect::>(), + ); + + Some(result) + }; + + let width = self.width.unwrap_or(px(240.)); + let root_id = channel.root_id(); + + div() + .h_6() + .id(channel_id.0 as usize) + .group("") + .flex() + .w_full() + .when(!channel.is_root_channel(), |el| { + el.on_drag(channel.clone(), move |channel, cx| { + cx.new_view(|_| DraggedChannelView { + channel: channel.clone(), + width, + }) + }) + }) + .drag_over::({ + move |style, dragged_channel: &Channel, cx| { + if dragged_channel.root_id() == root_id { + style.bg(cx.theme().colors().ghost_element_hover) + } else { + style + } + } + }) + .on_drop(cx.listener(move |this, dragged_channel: &Channel, cx| { + if dragged_channel.root_id() != root_id { + return; + } + this.move_channel(dragged_channel.id, channel_id, cx); + })) + .child( + ListItem::new(channel_id.0 as usize) + // Add one level of depth for the disclosure arrow. + .indent_level(depth + 1) + .indent_step_size(px(20.)) + .selected(is_selected || is_active) + .toggle(disclosed) + .on_toggle( + cx.listener(move |this, _, cx| { + this.toggle_channel_collapsed(channel_id, cx) + }), + ) + .on_click(cx.listener(move |this, _, cx| { + if is_active { + this.open_channel_notes(channel_id, cx) + } else { + this.join_channel(channel_id, cx) + } + })) + .on_secondary_mouse_down(cx.listener( + move |this, event: &MouseDownEvent, cx| { + this.deploy_channel_context_menu(event.position, channel_id, ix, cx) + }, + )) + .start_slot( + div() + .relative() + .child( + Icon::new(if is_public { + IconName::Public + } else { + IconName::Hash + }) + .size(IconSize::Small) + .color(Color::Muted), + ) + .children(has_notes_notification.then(|| { + div() + .w_1p5() + .absolute() + .right(px(-1.)) + .top(px(-1.)) + .child(Indicator::dot().color(Color::Info)) + })), + ) + .child( + h_flex() + .id(channel_id.0 as usize) + .child(Label::new(channel.name.clone())) + .children(face_pile.map(|face_pile| face_pile.p_1())), + ), + ) + .child( + h_flex().absolute().right(rems(0.)).h_full().child( + h_flex() + .h_full() + .gap_1() + .px_1() + .child( + IconButton::new("channel_chat", IconName::MessageBubbles) + .style(ButtonStyle::Filled) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .icon_color(if has_messages_notification { + Color::Default + } else { + Color::Muted + }) + .on_click(cx.listener(move |this, _, cx| { + this.join_channel_chat(channel_id, cx) + })) + .tooltip(|cx| Tooltip::text("Open channel chat", cx)) + .visible_on_hover(""), + ) + .child( + IconButton::new("channel_notes", IconName::File) + .style(ButtonStyle::Filled) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .icon_color(if has_notes_notification { + Color::Default + } else { + Color::Muted + }) + .on_click(cx.listener(move |this, _, cx| { + this.open_channel_notes(channel_id, cx) + })) + .tooltip(|cx| Tooltip::text("Open channel notes", cx)) + .visible_on_hover(""), + ), + ), + ) + .tooltip({ + let channel_store = self.channel_store.clone(); + move |cx| { + cx.new_view(|_| JoinChannelTooltip { + channel_store: channel_store.clone(), + channel_id, + has_notes_notification, + }) + .into() + } + }) + } + + fn render_channel_editor(&self, depth: usize, _cx: &mut ViewContext) -> impl IntoElement { + let item = ListItem::new("channel-editor") + .inset(false) + // Add one level of depth for the disclosure arrow. + .indent_level(depth + 1) + .indent_step_size(px(20.)) + .start_slot( + Icon::new(IconName::Hash) + .size(IconSize::Small) + .color(Color::Muted), + ); + + if let Some(pending_name) = self + .channel_editing_state + .as_ref() + .and_then(|state| state.pending_name()) + { + item.child(Label::new(pending_name)) + } else { + item.child(self.channel_name_editor.clone()) + } + } +} + +fn render_tree_branch(is_last: bool, overdraw: bool, cx: &mut WindowContext) -> impl IntoElement { + let rem_size = cx.rem_size(); + let line_height = cx.text_style().line_height_in_pixels(rem_size); + let width = rem_size * 1.5; + let thickness = px(1.); + let color = cx.theme().colors().text; + + canvas( + |_, _| {}, + move |bounds, _, cx| { + let start_x = (bounds.left() + bounds.right() - thickness) / 2.; + let start_y = (bounds.top() + bounds.bottom() - thickness) / 2.; + let right = bounds.right(); + let top = bounds.top(); + + cx.paint_quad(fill( + Bounds::from_corners( + point(start_x, top), + point( + start_x + thickness, + if is_last { + start_y + } else { + bounds.bottom() + if overdraw { px(1.) } else { px(0.) } + }, + ), + ), + color, + )); + cx.paint_quad(fill( + Bounds::from_corners(point(start_x, start_y), point(right, start_y + thickness)), + color, + )); + }, + ) + .w(width) + .h(line_height) +} + +impl Render for CollabPanel { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .key_context("CollabPanel") + .on_action(cx.listener(CollabPanel::cancel)) + .on_action(cx.listener(CollabPanel::select_next)) + .on_action(cx.listener(CollabPanel::select_prev)) + .on_action(cx.listener(CollabPanel::confirm)) + .on_action(cx.listener(CollabPanel::insert_space)) + .on_action(cx.listener(CollabPanel::remove_selected_channel)) + .on_action(cx.listener(CollabPanel::show_inline_context_menu)) + .on_action(cx.listener(CollabPanel::rename_selected_channel)) + .on_action(cx.listener(CollabPanel::collapse_selected_channel)) + .on_action(cx.listener(CollabPanel::expand_selected_channel)) + .on_action(cx.listener(CollabPanel::start_move_selected_channel)) + .track_focus(&self.focus_handle) + .size_full() + .child(if self.user_store.read(cx).current_user().is_none() { + self.render_signed_out(cx) + } else { + self.render_signed_in(cx) + }) + .children(self.context_menu.as_ref().map(|(menu, position, _)| { + deferred( + anchored() + .position(*position) + .anchor(gpui::AnchorCorner::TopLeft) + .child(menu.clone()), + ) + .with_priority(1) + })) + } +} + +impl EventEmitter for CollabPanel {} + +impl Panel for CollabPanel { + fn position(&self, cx: &gpui::WindowContext) -> DockPosition { + CollaborationPanelSettings::get_global(cx).dock + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + matches!(position, DockPosition::Left | DockPosition::Right) + } + + fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext) { + settings::update_settings_file::( + self.fs.clone(), + cx, + move |settings| settings.dock = Some(position), + ); + } + + fn size(&self, cx: &gpui::WindowContext) -> Pixels { + self.width + .unwrap_or_else(|| CollaborationPanelSettings::get_global(cx).default_width) + } + + fn set_size(&mut self, size: Option, cx: &mut ViewContext) { + self.width = size; + self.serialize(cx); + cx.notify(); + } + + fn icon(&self, cx: &gpui::WindowContext) -> Option { + CollaborationPanelSettings::get_global(cx) + .button + .then(|| ui::IconName::Collab) + } + + fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { + Some("Collab Panel") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } + + fn persistent_name() -> &'static str { + "CollabPanel" + } +} + +impl FocusableView for CollabPanel { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.filter_editor.focus_handle(cx).clone() + } +} + +impl PartialEq for ListEntry { + fn eq(&self, other: &Self) -> bool { + match self { + ListEntry::Header(section_1) => { + if let ListEntry::Header(section_2) = other { + return section_1 == section_2; + } + } + ListEntry::CallParticipant { user: user_1, .. } => { + if let ListEntry::CallParticipant { user: user_2, .. } = other { + return user_1.id == user_2.id; + } + } + ListEntry::ParticipantProject { + project_id: project_id_1, + .. + } => { + if let ListEntry::ParticipantProject { + project_id: project_id_2, + .. + } = other + { + return project_id_1 == project_id_2; + } + } + ListEntry::ParticipantScreen { + peer_id: peer_id_1, .. + } => { + if let ListEntry::ParticipantScreen { + peer_id: peer_id_2, .. + } = other + { + return peer_id_1 == peer_id_2; + } + } + ListEntry::Channel { + channel: channel_1, .. + } => { + if let ListEntry::Channel { + channel: channel_2, .. + } = other + { + return channel_1.id == channel_2.id; + } + } + ListEntry::HostedProject { id, .. } => { + if let ListEntry::HostedProject { id: other_id, .. } = other { + return id == other_id; + } + } + ListEntry::ChannelNotes { channel_id } => { + if let ListEntry::ChannelNotes { + channel_id: other_id, + } = other + { + return channel_id == other_id; + } + } + ListEntry::ChannelChat { channel_id } => { + if let ListEntry::ChannelChat { + channel_id: other_id, + } = other + { + return channel_id == other_id; + } + } + ListEntry::ChannelInvite(channel_1) => { + if let ListEntry::ChannelInvite(channel_2) = other { + return channel_1.id == channel_2.id; + } + } + ListEntry::IncomingRequest(user_1) => { + if let ListEntry::IncomingRequest(user_2) = other { + return user_1.id == user_2.id; + } + } + ListEntry::OutgoingRequest(user_1) => { + if let ListEntry::OutgoingRequest(user_2) = other { + return user_1.id == user_2.id; + } + } + ListEntry::Contact { + contact: contact_1, .. + } => { + if let ListEntry::Contact { + contact: contact_2, .. + } = other + { + return contact_1.user.id == contact_2.user.id; + } + } + ListEntry::ChannelEditor { depth } => { + if let ListEntry::ChannelEditor { depth: other_depth } = other { + return depth == other_depth; + } + } + ListEntry::ContactPlaceholder => { + if let ListEntry::ContactPlaceholder = other { + return true; + } + } + } + false + } +} + +struct DraggedChannelView { + channel: Channel, + width: Pixels, +} + +impl Render for DraggedChannelView { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let ui_font = ThemeSettings::get_global(cx).ui_font.family.clone(); + h_flex() + .font_family(ui_font) + .bg(cx.theme().colors().background) + .w(self.width) + .p_1() + .gap_1() + .child( + Icon::new( + if self.channel.visibility == proto::ChannelVisibility::Public { + IconName::Public + } else { + IconName::Hash + }, + ) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(Label::new(self.channel.name.clone())) + } +} + +struct JoinChannelTooltip { + channel_store: Model, + channel_id: ChannelId, + #[allow(unused)] + has_notes_notification: bool, +} + +impl Render for JoinChannelTooltip { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + tooltip_container(cx, |container, cx| { + let participants = self + .channel_store + .read(cx) + .channel_participants(self.channel_id); + + container + .child(Label::new("Join channel")) + .children(participants.iter().map(|participant| { + h_flex() + .gap_2() + .child(Avatar::new(participant.avatar_uri.clone())) + .child(Label::new(participant.github_login.clone())) + })) + }) + } +} diff --git a/crates/collab_ui/src/collab_panel/channel_modal.rs b/crates/collab_ui/src/collab_panel/channel_modal.rs new file mode 100644 index 0000000..4e943d3 --- /dev/null +++ b/crates/collab_ui/src/collab_panel/channel_modal.rs @@ -0,0 +1,620 @@ +use channel::{ChannelMembership, ChannelStore}; +use client::{ + proto::{self, ChannelRole, ChannelVisibility}, + ChannelId, User, UserId, UserStore, +}; +use fuzzy::{match_strings, StringMatchCandidate}; +use gpui::{ + actions, anchored, deferred, div, AppContext, ClipboardItem, DismissEvent, EventEmitter, + FocusableView, Model, ParentElement, Render, Styled, Subscription, Task, View, ViewContext, + VisualContext, WeakView, +}; +use picker::{Picker, PickerDelegate}; +use std::sync::Arc; +use ui::{prelude::*, Avatar, CheckboxWithLabel, ContextMenu, ListItem, ListItemSpacing}; +use util::TryFutureExt; +use workspace::{notifications::DetachAndPromptErr, ModalView}; + +actions!( + channel_modal, + [ + SelectNextControl, + ToggleMode, + ToggleMemberAdmin, + RemoveMember + ] +); + +pub struct ChannelModal { + picker: View>, + channel_store: Model, + channel_id: ChannelId, +} + +impl ChannelModal { + pub fn new( + user_store: Model, + channel_store: Model, + channel_id: ChannelId, + mode: Mode, + cx: &mut ViewContext, + ) -> Self { + cx.observe(&channel_store, |_, _, cx| cx.notify()).detach(); + let channel_modal = cx.view().downgrade(); + let picker = cx.new_view(|cx| { + Picker::uniform_list( + ChannelModalDelegate { + channel_modal, + matching_users: Vec::new(), + matching_member_indices: Vec::new(), + selected_index: 0, + user_store: user_store.clone(), + channel_store: channel_store.clone(), + channel_id, + match_candidates: Vec::new(), + context_menu: None, + members: Vec::new(), + has_all_members: false, + mode, + }, + cx, + ) + .modal(false) + }); + + Self { + picker, + channel_store, + channel_id, + } + } + + fn toggle_mode(&mut self, _: &ToggleMode, cx: &mut ViewContext) { + let mode = match self.picker.read(cx).delegate.mode { + Mode::ManageMembers => Mode::InviteMembers, + Mode::InviteMembers => Mode::ManageMembers, + }; + self.set_mode(mode, cx); + } + + fn set_mode(&mut self, mode: Mode, cx: &mut ViewContext) { + self.picker.update(cx, |picker, cx| { + let delegate = &mut picker.delegate; + delegate.mode = mode; + delegate.selected_index = 0; + picker.set_query("", cx); + picker.update_matches(picker.query(cx), cx); + cx.notify() + }); + cx.notify() + } + + fn set_channel_visibility(&mut self, selection: &Selection, cx: &mut ViewContext) { + self.channel_store.update(cx, |channel_store, cx| { + channel_store + .set_channel_visibility( + self.channel_id, + match selection { + Selection::Unselected => ChannelVisibility::Members, + Selection::Selected => ChannelVisibility::Public, + Selection::Indeterminate => return, + }, + cx, + ) + .detach_and_log_err(cx) + }); + } + + fn dismiss(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { + cx.emit(DismissEvent); + } +} + +impl EventEmitter for ChannelModal {} +impl ModalView for ChannelModal {} + +impl FocusableView for ChannelModal { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for ChannelModal { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let channel_store = self.channel_store.read(cx); + let Some(channel) = channel_store.channel_for_id(self.channel_id) else { + return div(); + }; + let channel_name = channel.name.clone(); + let channel_id = channel.id; + let visibility = channel.visibility; + let mode = self.picker.read(cx).delegate.mode; + + v_flex() + .key_context("ChannelModal") + .on_action(cx.listener(Self::toggle_mode)) + .on_action(cx.listener(Self::dismiss)) + .elevation_3(cx) + .w(rems(34.)) + .child( + v_flex() + .px_2() + .py_1() + .gap_2() + .child( + h_flex() + .w_px() + .flex_1() + .gap_1() + .child(Icon::new(IconName::Hash).size(IconSize::Medium)) + .child(Label::new(channel_name)), + ) + .child( + h_flex() + .w_full() + .h(rems_from_px(22.)) + .justify_between() + .line_height(rems(1.25)) + .child(CheckboxWithLabel::new( + "is-public", + Label::new("Public").size(LabelSize::Small), + if visibility == ChannelVisibility::Public { + ui::Selection::Selected + } else { + ui::Selection::Unselected + }, + cx.listener(Self::set_channel_visibility), + )) + .children( + Some( + Button::new("copy-link", "Copy Link") + .label_size(LabelSize::Small) + .on_click(cx.listener(move |this, _, cx| { + if let Some(channel) = this + .channel_store + .read(cx) + .channel_for_id(channel_id) + { + let item = ClipboardItem::new(channel.link(cx)); + cx.write_to_clipboard(item); + } + })), + ) + .filter(|_| visibility == ChannelVisibility::Public), + ), + ) + .child( + h_flex() + .child( + div() + .id("manage-members") + .px_2() + .py_1() + .cursor_pointer() + .border_b_2() + .when(mode == Mode::ManageMembers, |this| { + this.border_color(cx.theme().colors().border) + }) + .child(Label::new("Manage Members")) + .on_click(cx.listener(|this, _, cx| { + this.set_mode(Mode::ManageMembers, cx); + })), + ) + .child( + div() + .id("invite-members") + .px_2() + .py_1() + .cursor_pointer() + .border_b_2() + .when(mode == Mode::InviteMembers, |this| { + this.border_color(cx.theme().colors().border) + }) + .child(Label::new("Invite Members")) + .on_click(cx.listener(|this, _, cx| { + this.set_mode(Mode::InviteMembers, cx); + })), + ), + ), + ) + .child(self.picker.clone()) + } +} + +#[derive(Copy, Clone, PartialEq)] +pub enum Mode { + ManageMembers, + InviteMembers, +} + +pub struct ChannelModalDelegate { + channel_modal: WeakView, + matching_users: Vec>, + matching_member_indices: Vec, + user_store: Model, + channel_store: Model, + channel_id: ChannelId, + selected_index: usize, + mode: Mode, + match_candidates: Vec, + members: Vec, + has_all_members: bool, + context_menu: Option<(View, Subscription)>, +} + +impl PickerDelegate for ChannelModalDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Search collaborator by username...".into() + } + + fn match_count(&self) -> usize { + match self.mode { + Mode::ManageMembers => self.matching_member_indices.len(), + Mode::InviteMembers => self.matching_users.len(), + } + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches(&mut self, query: String, cx: &mut ViewContext>) -> Task<()> { + match self.mode { + Mode::ManageMembers => { + if self.has_all_members { + self.match_candidates.clear(); + self.match_candidates + .extend(self.members.iter().enumerate().map(|(id, member)| { + StringMatchCandidate { + id, + string: member.user.github_login.clone(), + char_bag: member.user.github_login.chars().collect(), + } + })); + + let matches = cx.background_executor().block(match_strings( + &self.match_candidates, + &query, + true, + usize::MAX, + &Default::default(), + cx.background_executor().clone(), + )); + + cx.spawn(|picker, mut cx| async move { + picker + .update(&mut cx, |picker, cx| { + let delegate = &mut picker.delegate; + delegate.matching_member_indices.clear(); + delegate + .matching_member_indices + .extend(matches.into_iter().map(|m| m.candidate_id)); + cx.notify(); + }) + .ok(); + }) + } else { + let search_members = self.channel_store.update(cx, |store, cx| { + store.fuzzy_search_members(self.channel_id, query.clone(), 100, cx) + }); + cx.spawn(|picker, mut cx| async move { + async { + let members = search_members.await?; + picker.update(&mut cx, |picker, cx| { + picker.delegate.has_all_members = + query == "" && members.len() < 100; + picker.delegate.matching_member_indices = + (0..members.len()).collect(); + picker.delegate.members = members; + cx.notify(); + })?; + anyhow::Ok(()) + } + .log_err() + .await; + }) + } + } + Mode::InviteMembers => { + let search_users = self + .user_store + .update(cx, |store, cx| store.fuzzy_search_users(query, cx)); + cx.spawn(|picker, mut cx| async move { + async { + let users = search_users.await?; + picker.update(&mut cx, |picker, cx| { + picker.delegate.matching_users = users; + cx.notify(); + })?; + anyhow::Ok(()) + } + .log_err() + .await; + }) + } + } + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(selected_user) = self.user_at_index(self.selected_index) { + if Some(selected_user.id) == self.user_store.read(cx).current_user().map(|user| user.id) + { + return; + } + match self.mode { + Mode::ManageMembers => self.show_context_menu(self.selected_index, cx), + Mode::InviteMembers => match self.member_status(selected_user.id, cx) { + Some(proto::channel_member::Kind::Invitee) => { + self.remove_member(selected_user.id, cx); + } + Some(proto::channel_member::Kind::Member) => {} + None => self.invite_member(selected_user, cx), + }, + } + } + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + if self.context_menu.is_none() { + self.channel_modal + .update(cx, |_, cx| { + cx.emit(DismissEvent); + }) + .ok(); + } + } + + fn render_match( + &self, + ix: usize, + selected: bool, + cx: &mut ViewContext>, + ) -> Option { + let user = self.user_at_index(ix)?; + let membership = self.member_at_index(ix); + let request_status = self.member_status(user.id, cx); + let is_me = self.user_store.read(cx).current_user().map(|user| user.id) == Some(user.id); + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .start_slot(Avatar::new(user.avatar_uri.clone())) + .child(Label::new(user.github_login.clone())) + .end_slot(h_flex().gap_2().map(|slot| { + match self.mode { + Mode::ManageMembers => slot + .children( + if request_status == Some(proto::channel_member::Kind::Invitee) { + Some(Label::new("Invited")) + } else { + None + }, + ) + .children(match membership.map(|m| m.role) { + Some(ChannelRole::Admin) => Some(Label::new("Admin")), + Some(ChannelRole::Guest) => Some(Label::new("Guest")), + _ => None, + }) + .when(!is_me, |el| { + el.child(IconButton::new("ellipsis", IconName::Ellipsis)) + }) + .when(is_me, |el| el.child(Label::new("You").color(Color::Muted))) + .children( + if let (Some((menu, _)), true) = (&self.context_menu, selected) { + Some( + deferred( + anchored() + .anchor(gpui::AnchorCorner::TopRight) + .child(menu.clone()), + ) + .with_priority(1), + ) + } else { + None + }, + ), + Mode::InviteMembers => match request_status { + Some(proto::channel_member::Kind::Invitee) => { + slot.children(Some(Label::new("Invited"))) + } + Some(proto::channel_member::Kind::Member) => { + slot.children(Some(Label::new("Member"))) + } + _ => slot, + }, + } + })), + ) + } +} + +impl ChannelModalDelegate { + fn member_status( + &self, + user_id: UserId, + cx: &AppContext, + ) -> Option { + self.members + .iter() + .find_map(|membership| (membership.user.id == user_id).then_some(membership.kind)) + .or_else(|| { + self.channel_store + .read(cx) + .has_pending_channel_invite(self.channel_id, user_id) + .then_some(proto::channel_member::Kind::Invitee) + }) + } + + fn member_at_index(&self, ix: usize) -> Option<&ChannelMembership> { + self.matching_member_indices + .get(ix) + .and_then(|ix| self.members.get(*ix)) + } + + fn user_at_index(&self, ix: usize) -> Option> { + match self.mode { + Mode::ManageMembers => self.matching_member_indices.get(ix).and_then(|ix| { + let channel_membership = self.members.get(*ix)?; + Some(channel_membership.user.clone()) + }), + Mode::InviteMembers => self.matching_users.get(ix).cloned(), + } + } + + fn set_user_role( + &mut self, + user_id: UserId, + new_role: ChannelRole, + cx: &mut ViewContext>, + ) -> Option<()> { + let update = self.channel_store.update(cx, |store, cx| { + store.set_member_role(self.channel_id, user_id, new_role, cx) + }); + cx.spawn(|picker, mut cx| async move { + update.await?; + picker.update(&mut cx, |picker, cx| { + let this = &mut picker.delegate; + if let Some(member) = this.members.iter_mut().find(|m| m.user.id == user_id) { + member.role = new_role; + } + cx.focus_self(); + cx.notify(); + }) + }) + .detach_and_prompt_err("Failed to update role", cx, |_, _| None); + Some(()) + } + + fn remove_member(&mut self, user_id: UserId, cx: &mut ViewContext>) -> Option<()> { + let update = self.channel_store.update(cx, |store, cx| { + store.remove_member(self.channel_id, user_id, cx) + }); + cx.spawn(|picker, mut cx| async move { + update.await?; + picker.update(&mut cx, |picker, cx| { + let this = &mut picker.delegate; + if let Some(ix) = this.members.iter_mut().position(|m| m.user.id == user_id) { + this.members.remove(ix); + this.matching_member_indices.retain_mut(|member_ix| { + if *member_ix == ix { + return false; + } else if *member_ix > ix { + *member_ix -= 1; + } + true + }) + } + + this.selected_index = this + .selected_index + .min(this.matching_member_indices.len().saturating_sub(1)); + + picker.focus(cx); + cx.notify(); + }) + }) + .detach_and_prompt_err("Failed to remove member", cx, |_, _| None); + Some(()) + } + + fn invite_member(&mut self, user: Arc, cx: &mut ViewContext>) { + let invite_member = self.channel_store.update(cx, |store, cx| { + store.invite_member(self.channel_id, user.id, ChannelRole::Member, cx) + }); + + cx.spawn(|this, mut cx| async move { + invite_member.await?; + + this.update(&mut cx, |this, cx| { + let new_member = ChannelMembership { + user, + kind: proto::channel_member::Kind::Invitee, + role: ChannelRole::Member, + }; + let members = &mut this.delegate.members; + match members.binary_search_by_key(&new_member.sort_key(), |k| k.sort_key()) { + Ok(ix) | Err(ix) => members.insert(ix, new_member), + } + + cx.notify(); + }) + }) + .detach_and_prompt_err("Failed to invite member", cx, |_, _| None); + } + + fn show_context_menu(&mut self, ix: usize, cx: &mut ViewContext>) { + let Some(membership) = self.member_at_index(ix) else { + return; + }; + let user_id = membership.user.id; + let picker = cx.view().clone(); + let context_menu = ContextMenu::build(cx, |mut menu, _cx| { + let role = membership.role; + + if role == ChannelRole::Admin || role == ChannelRole::Member { + let picker = picker.clone(); + menu = menu.entry("Demote to Guest", None, move |cx| { + picker.update(cx, |picker, cx| { + picker + .delegate + .set_user_role(user_id, ChannelRole::Guest, cx); + }) + }); + } + + if role == ChannelRole::Admin || role == ChannelRole::Guest { + let picker = picker.clone(); + let label = if role == ChannelRole::Guest { + "Promote to Member" + } else { + "Demote to Member" + }; + + menu = menu.entry(label, None, move |cx| { + picker.update(cx, |picker, cx| { + picker + .delegate + .set_user_role(user_id, ChannelRole::Member, cx); + }) + }); + } + + if role == ChannelRole::Member || role == ChannelRole::Guest { + let picker = picker.clone(); + menu = menu.entry("Promote to Admin", None, move |cx| { + picker.update(cx, |picker, cx| { + picker + .delegate + .set_user_role(user_id, ChannelRole::Admin, cx); + }) + }); + }; + + menu = menu.separator(); + menu = menu.entry("Remove from Channel", None, { + let picker = picker.clone(); + move |cx| { + picker.update(cx, |picker, cx| { + picker.delegate.remove_member(user_id, cx); + }) + } + }); + menu + }); + cx.focus_view(&context_menu); + let subscription = cx.subscribe(&context_menu, |picker, _, _: &DismissEvent, cx| { + picker.delegate.context_menu = None; + picker.focus(cx); + cx.notify(); + }); + self.context_menu = Some((context_menu, subscription)); + } +} diff --git a/crates/collab_ui/src/collab_panel/contact_finder.rs b/crates/collab_ui/src/collab_panel/contact_finder.rs new file mode 100644 index 0000000..ff58c83 --- /dev/null +++ b/crates/collab_ui/src/collab_panel/contact_finder.rs @@ -0,0 +1,161 @@ +use client::{ContactRequestStatus, User, UserStore}; +use gpui::{ + AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model, ParentElement as _, + Render, Styled, Task, View, ViewContext, VisualContext, WeakView, +}; +use picker::{Picker, PickerDelegate}; +use std::sync::Arc; +use theme::ActiveTheme as _; +use ui::{prelude::*, Avatar, ListItem, ListItemSpacing}; +use util::{ResultExt as _, TryFutureExt}; +use workspace::ModalView; + +pub struct ContactFinder { + picker: View>, +} + +impl ContactFinder { + pub fn new(user_store: Model, cx: &mut ViewContext) -> Self { + let delegate = ContactFinderDelegate { + parent: cx.view().downgrade(), + user_store, + potential_contacts: Arc::from([]), + selected_index: 0, + }; + let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx).modal(false)); + + Self { picker } + } + + pub fn set_query(&mut self, query: String, cx: &mut ViewContext) { + self.picker.update(cx, |picker, cx| { + picker.set_query(query, cx); + }); + } +} + +impl Render for ContactFinder { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .elevation_3(cx) + .child( + v_flex() + .px_2() + .py_1() + .bg(cx.theme().colors().element_background) + // HACK: Prevent the background color from overflowing the parent container. + .rounded_t(px(8.)) + .child(Label::new("Contacts")) + .child(h_flex().child(Label::new("Invite new contacts"))), + ) + .child(self.picker.clone()) + .w(rems(34.)) + } +} + +pub struct ContactFinderDelegate { + parent: WeakView, + potential_contacts: Arc<[Arc]>, + user_store: Model, + selected_index: usize, +} + +impl EventEmitter for ContactFinder {} +impl ModalView for ContactFinder {} + +impl FocusableView for ContactFinder { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl PickerDelegate for ContactFinderDelegate { + type ListItem = ListItem; + + fn match_count(&self) -> usize { + self.potential_contacts.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Search collaborator by username...".into() + } + + fn update_matches(&mut self, query: String, cx: &mut ViewContext>) -> Task<()> { + let search_users = self + .user_store + .update(cx, |store, cx| store.fuzzy_search_users(query, cx)); + + cx.spawn(|picker, mut cx| async move { + async { + let potential_contacts = search_users.await?; + picker.update(&mut cx, |picker, cx| { + picker.delegate.potential_contacts = potential_contacts.into(); + cx.notify(); + })?; + anyhow::Ok(()) + } + .log_err() + .await; + }) + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(user) = self.potential_contacts.get(self.selected_index) { + let user_store = self.user_store.read(cx); + match user_store.contact_request_status(user) { + ContactRequestStatus::None | ContactRequestStatus::RequestReceived => { + self.user_store + .update(cx, |store, cx| store.request_contact(user.id, cx)) + .detach(); + } + ContactRequestStatus::RequestSent => { + self.user_store + .update(cx, |store, cx| store.remove_contact(user.id, cx)) + .detach(); + } + _ => {} + } + } + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.parent + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + cx: &mut ViewContext>, + ) -> Option { + let user = &self.potential_contacts[ix]; + let request_status = self.user_store.read(cx).contact_request_status(user); + + let icon_path = match request_status { + ContactRequestStatus::None | ContactRequestStatus::RequestReceived => { + Some("icons/check.svg") + } + ContactRequestStatus::RequestSent => Some("icons/x.svg"), + ContactRequestStatus::RequestAccepted => None, + }; + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .start_slot(Avatar::new(user.avatar_uri.clone())) + .child(Label::new(user.github_login.clone())) + .end_slot::(icon_path.map(|icon_path| Icon::from_path(icon_path))), + ) + } +} diff --git a/crates/collab_ui/src/collab_titlebar_item.rs b/crates/collab_ui/src/collab_titlebar_item.rs new file mode 100644 index 0000000..314c6e5 --- /dev/null +++ b/crates/collab_ui/src/collab_titlebar_item.rs @@ -0,0 +1,774 @@ +use crate::face_pile::FacePile; +use auto_update::AutoUpdateStatus; +use call::{ActiveCall, ParticipantLocation, Room}; +use client::{proto::PeerId, Client, User, UserStore}; +use gpui::{ + actions, canvas, div, point, px, Action, AnyElement, AppContext, Element, Hsla, + InteractiveElement, IntoElement, Model, ParentElement, Path, Render, + StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, +}; +use project::{Project, RepositoryEntry}; +use recent_projects::RecentProjects; +use rpc::proto::{self, DevServerStatus}; +use std::sync::Arc; +use theme::ActiveTheme; +use ui::{ + h_flex, popover_menu, prelude::*, Avatar, AvatarAudioStatusIndicator, Button, ButtonLike, + ButtonStyle, ContextMenu, Icon, IconButton, IconName, Indicator, TintColor, TitleBar, Tooltip, +}; +use util::ResultExt; +use vcs_menu::{build_branch_list, BranchList, OpenRecent as ToggleVcsMenu}; +use workspace::{notifications::NotifyResultExt, Workspace}; + +const MAX_PROJECT_NAME_LENGTH: usize = 40; +const MAX_BRANCH_NAME_LENGTH: usize = 40; + +actions!( + collab, + [ + ShareProject, + UnshareProject, + ToggleUserMenu, + ToggleProjectMenu, + SwitchBranch + ] +); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(|workspace: &mut Workspace, cx| { + let titlebar_item = cx.new_view(|cx| CollabTitlebarItem::new(workspace, cx)); + workspace.set_titlebar_item(titlebar_item.into(), cx) + }) + .detach(); +} + +pub struct CollabTitlebarItem { + project: Model, + user_store: Model, + client: Arc, + workspace: WeakView, + _subscriptions: Vec, +} + +impl Render for CollabTitlebarItem { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let room = ActiveCall::global(cx).read(cx).room().cloned(); + let current_user = self.user_store.read(cx).current_user(); + let client = self.client.clone(); + let project_id = self.project.read(cx).remote_id(); + let workspace = self.workspace.upgrade(); + + TitleBar::new("collab-titlebar", Box::new(workspace::CloseWindow)) + // note: on windows titlebar behaviour is handled by the platform implementation + .when(cfg!(not(windows)), |this| { + this.on_click(|event, cx| { + if event.up.click_count == 2 { + cx.zoom_window(); + } + }) + }) + // left side + .child( + h_flex() + .gap_1() + .children(self.render_project_host(cx)) + .child(self.render_project_name(cx)) + .children(self.render_project_branch(cx)) + .on_mouse_move(|_, cx| cx.stop_propagation()), + ) + .child( + h_flex() + .id("collaborator-list") + .w_full() + .gap_1() + .overflow_x_scroll() + .when_some( + current_user.clone().zip(client.peer_id()).zip(room.clone()), + |this, ((current_user, peer_id), room)| { + let player_colors = cx.theme().players(); + let room = room.read(cx); + let mut remote_participants = + room.remote_participants().values().collect::>(); + remote_participants.sort_by_key(|p| p.participant_index.0); + + let current_user_face_pile = self.render_collaborator( + ¤t_user, + peer_id, + true, + room.is_speaking(), + room.is_muted(), + None, + &room, + project_id, + ¤t_user, + cx, + ); + + this.children(current_user_face_pile.map(|face_pile| { + v_flex() + .on_mouse_move(|_, cx| cx.stop_propagation()) + .child(face_pile) + .child(render_color_ribbon(player_colors.local().cursor)) + })) + .children( + remote_participants.iter().filter_map(|collaborator| { + let player_color = player_colors + .color_for_participant(collaborator.participant_index.0); + let is_following = workspace + .as_ref()? + .read(cx) + .is_being_followed(collaborator.peer_id); + let is_present = project_id.map_or(false, |project_id| { + collaborator.location + == ParticipantLocation::SharedProject { project_id } + }); + + let face_pile = self.render_collaborator( + &collaborator.user, + collaborator.peer_id, + is_present, + collaborator.speaking, + collaborator.muted, + is_following.then_some(player_color.selection), + &room, + project_id, + ¤t_user, + cx, + )?; + + Some( + v_flex() + .id(("collaborator", collaborator.user.id)) + .child(face_pile) + .child(render_color_ribbon(player_color.cursor)) + .cursor_pointer() + .on_click({ + let peer_id = collaborator.peer_id; + cx.listener(move |this, _, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace.follow(peer_id, cx); + }) + .ok(); + }) + }) + .tooltip({ + let login = collaborator.user.github_login.clone(); + move |cx| { + Tooltip::text(format!("Follow {login}"), cx) + } + }), + ) + }), + ) + }, + ), + ) + // right side + .child( + h_flex() + .gap_1() + .pr_1() + .on_mouse_move(|_, cx| cx.stop_propagation()) + .when_some(room, |this, room| { + let room = room.read(cx); + let project = self.project.read(cx); + let is_local = project.is_local(); + let is_dev_server_project = project.dev_server_project_id().is_some(); + let is_shared = (is_local || is_dev_server_project) && project.is_shared(); + let is_muted = room.is_muted(); + let is_deafened = room.is_deafened().unwrap_or(false); + let is_screen_sharing = room.is_screen_sharing(); + let can_use_microphone = room.can_use_microphone(); + let can_share_projects = room.can_share_projects(); + + this.when( + (is_local || is_dev_server_project) && can_share_projects, + |this| { + this.child( + Button::new( + "toggle_sharing", + if is_shared { "Unshare" } else { "Share" }, + ) + .tooltip(move |cx| { + Tooltip::text( + if is_shared { + "Stop sharing project with call participants" + } else { + "Share project with call participants" + }, + cx, + ) + }) + .style(ButtonStyle::Subtle) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .selected(is_shared) + .label_size(LabelSize::Small) + .on_click(cx.listener( + move |this, _, cx| { + if is_shared { + this.unshare_project(&Default::default(), cx); + } else { + this.share_project(&Default::default(), cx); + } + }, + )), + ) + }, + ) + .child( + div() + .child( + IconButton::new("leave-call", ui::IconName::Exit) + .style(ButtonStyle::Subtle) + .tooltip(|cx| Tooltip::text("Leave call", cx)) + .icon_size(IconSize::Small) + .on_click(move |_, cx| { + ActiveCall::global(cx) + .update(cx, |call, cx| call.hang_up(cx)) + .detach_and_log_err(cx); + }), + ) + .pr_2(), + ) + .when(can_use_microphone, |this| { + this.child( + IconButton::new( + "mute-microphone", + if is_muted { + ui::IconName::MicMute + } else { + ui::IconName::Mic + }, + ) + .tooltip(move |cx| { + Tooltip::text( + if is_muted { + "Unmute microphone" + } else { + "Mute microphone" + }, + cx, + ) + }) + .style(ButtonStyle::Subtle) + .icon_size(IconSize::Small) + .selected(is_muted) + .selected_style(ButtonStyle::Tinted(TintColor::Negative)) + .on_click(move |_, cx| crate::toggle_mute(&Default::default(), cx)), + ) + }) + .child( + IconButton::new( + "mute-sound", + if is_deafened { + ui::IconName::AudioOff + } else { + ui::IconName::AudioOn + }, + ) + .style(ButtonStyle::Subtle) + .selected_style(ButtonStyle::Tinted(TintColor::Negative)) + .icon_size(IconSize::Small) + .selected(is_deafened) + .tooltip(move |cx| { + if can_use_microphone { + Tooltip::with_meta( + "Deafen Audio", + None, + "Mic will be muted", + cx, + ) + } else { + Tooltip::text("Deafen Audio", cx) + } + }) + .on_click(move |_, cx| crate::toggle_deafen(&Default::default(), cx)), + ) + .when(can_share_projects, |this| { + this.child( + IconButton::new("screen-share", ui::IconName::Screen) + .style(ButtonStyle::Subtle) + .icon_size(IconSize::Small) + .selected(is_screen_sharing) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .tooltip(move |cx| { + Tooltip::text( + if is_screen_sharing { + "Stop Sharing Screen" + } else { + "Share Screen" + }, + cx, + ) + }) + .on_click(move |_, cx| { + crate::toggle_screen_sharing(&Default::default(), cx) + }), + ) + }) + .child(div().pr_2()) + }) + .map(|el| { + let status = self.client.status(); + let status = &*status.borrow(); + if matches!(status, client::Status::Connected { .. }) { + el.child(self.render_user_menu_button(cx)) + } else { + el.children(self.render_connection_status(status, cx)) + .child(self.render_sign_in_button(cx)) + .child(self.render_user_menu_button(cx)) + } + }), + ) + } +} + +fn render_color_ribbon(color: Hsla) -> impl Element { + canvas( + move |_, _| {}, + move |bounds, _, cx| { + let height = bounds.size.height; + let horizontal_offset = height; + let vertical_offset = px(height.0 / 2.0); + let mut path = Path::new(bounds.lower_left()); + path.curve_to( + bounds.origin + point(horizontal_offset, vertical_offset), + bounds.origin + point(px(0.0), vertical_offset), + ); + path.line_to(bounds.upper_right() + point(-horizontal_offset, vertical_offset)); + path.curve_to( + bounds.lower_right(), + bounds.upper_right() + point(px(0.0), vertical_offset), + ); + path.line_to(bounds.lower_left()); + cx.paint_path(path, color); + }, + ) + .h_1() + .w_full() +} + +impl CollabTitlebarItem { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let project = workspace.project().clone(); + let user_store = workspace.app_state().user_store.clone(); + let client = workspace.app_state().client.clone(); + let active_call = ActiveCall::global(cx); + let mut subscriptions = Vec::new(); + subscriptions.push( + cx.observe(&workspace.weak_handle().upgrade().unwrap(), |_, _, cx| { + cx.notify() + }), + ); + subscriptions.push(cx.observe(&project, |_, _, cx| cx.notify())); + subscriptions.push(cx.observe(&active_call, |this, _, cx| this.active_call_changed(cx))); + subscriptions.push(cx.observe_window_activation(Self::window_activation_changed)); + subscriptions.push(cx.observe(&user_store, |_, _, cx| cx.notify())); + + Self { + workspace: workspace.weak_handle(), + project, + user_store, + client, + _subscriptions: subscriptions, + } + } + + // resolve if you are in a room -> render_project_owner + // render_project_owner -> resolve if you are in a room -> Option + + pub fn render_project_host(&self, cx: &mut ViewContext) -> Option { + if let Some(dev_server) = + self.project + .read(cx) + .dev_server_project_id() + .and_then(|dev_server_project_id| { + dev_server_projects::Store::global(cx) + .read(cx) + .dev_server_for_project(dev_server_project_id) + }) + { + return Some( + ButtonLike::new("dev_server_trigger") + .child(Indicator::dot().color( + if dev_server.status == DevServerStatus::Online { + Color::Created + } else { + Color::Disabled + }, + )) + .child( + Label::new(dev_server.name.clone()) + .size(LabelSize::Small) + .line_height_style(LineHeightStyle::UiLabel), + ) + .tooltip(move |cx| Tooltip::text("Project is hosted on a dev server", cx)) + .on_click(cx.listener(|this, _, cx| { + if let Some(workspace) = this.workspace.upgrade() { + recent_projects::DevServerProjects::open(workspace, cx) + } + })) + .into_any_element(), + ); + } + + let host = self.project.read(cx).host()?; + let host_user = self.user_store.read(cx).get_cached_user(host.user_id)?; + let participant_index = self + .user_store + .read(cx) + .participant_indices() + .get(&host_user.id)?; + Some( + Button::new("project_owner_trigger", host_user.github_login.clone()) + .color(Color::Player(participant_index.0)) + .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small) + .tooltip(move |cx| { + Tooltip::text( + format!( + "{} is sharing this project. Click to follow.", + host_user.github_login.clone() + ), + cx, + ) + }) + .on_click({ + let host_peer_id = host.peer_id; + cx.listener(move |this, _, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace.follow(host_peer_id, cx); + }) + .log_err(); + }) + }) + .into_any_element(), + ) + } + + pub fn render_project_name(&self, cx: &mut ViewContext) -> impl IntoElement { + let name = { + let mut names = self.project.read(cx).visible_worktrees(cx).map(|worktree| { + let worktree = worktree.read(cx); + worktree.root_name() + }); + + names.next() + }; + let is_project_selected = name.is_some(); + let name = if let Some(name) = name { + util::truncate_and_trailoff(name, MAX_PROJECT_NAME_LENGTH) + } else { + "Open recent project".to_string() + }; + + let workspace = self.workspace.clone(); + Button::new("project_name_trigger", name) + .when(!is_project_selected, |b| b.color(Color::Muted)) + .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small) + .tooltip(move |cx| { + Tooltip::for_action( + "Recent Projects", + &recent_projects::OpenRecent { + create_new_window: false, + }, + cx, + ) + }) + .on_click(cx.listener(move |_, _, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + RecentProjects::open(workspace, false, cx); + }) + } + })) + } + + pub fn render_project_branch(&self, cx: &mut ViewContext) -> Option { + let entry = { + let mut names_and_branches = + self.project.read(cx).visible_worktrees(cx).map(|worktree| { + let worktree = worktree.read(cx); + worktree.root_git_entry() + }); + + names_and_branches.next().flatten() + }; + let workspace = self.workspace.upgrade()?; + let branch_name = entry + .as_ref() + .and_then(RepositoryEntry::branch) + .map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?; + Some( + popover_menu("project_branch_trigger") + .trigger( + Button::new("project_branch_trigger", branch_name) + .color(Color::Muted) + .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small) + .tooltip(move |cx| { + Tooltip::with_meta( + "Recent Branches", + Some(&ToggleVcsMenu), + "Local branches only", + cx, + ) + }), + ) + .menu(move |cx| Self::render_vcs_popover(workspace.clone(), cx)), + ) + } + + #[allow(clippy::too_many_arguments)] + fn render_collaborator( + &self, + user: &Arc, + peer_id: PeerId, + is_present: bool, + is_speaking: bool, + is_muted: bool, + leader_selection_color: Option, + room: &Room, + project_id: Option, + current_user: &Arc, + cx: &ViewContext, + ) -> Option
{ + if room.role_for_user(user.id) == Some(proto::ChannelRole::Guest) { + return None; + } + + const FACEPILE_LIMIT: usize = 3; + let followers = project_id.map_or(&[] as &[_], |id| room.followers_for(peer_id, id)); + let extra_count = followers.len().saturating_sub(FACEPILE_LIMIT); + + Some( + div() + .m_0p5() + .p_0p5() + // When the collaborator is not followed, still draw this wrapper div, but leave + // it transparent, so that it does not shift the layout when following. + .when_some(leader_selection_color, |div, color| { + div.rounded_md().bg(color) + }) + .child( + FacePile::empty() + .child( + Avatar::new(user.avatar_uri.clone()) + .grayscale(!is_present) + .border_color(if is_speaking { + cx.theme().status().info + } else { + // We draw the border in a transparent color rather to avoid + // the layout shift that would come with adding/removing the border. + gpui::transparent_black() + }) + .when(is_muted, |avatar| { + avatar.indicator( + AvatarAudioStatusIndicator::new(ui::AudioStatus::Muted) + .tooltip({ + let github_login = user.github_login.clone(); + move |cx| { + Tooltip::text( + format!("{} is muted", github_login), + cx, + ) + } + }), + ) + }), + ) + .children(followers.iter().take(FACEPILE_LIMIT).filter_map( + |follower_peer_id| { + let follower = room + .remote_participants() + .values() + .find_map(|p| { + (p.peer_id == *follower_peer_id).then_some(&p.user) + }) + .or_else(|| { + (self.client.peer_id() == Some(*follower_peer_id)) + .then_some(current_user) + })? + .clone(); + + Some(div().mt(-px(4.)).child( + Avatar::new(follower.avatar_uri.clone()).size(rems(0.75)), + )) + }, + )) + .children(if extra_count > 0 { + Some( + div() + .ml_1() + .child(Label::new(format!("+{extra_count}"))) + .into_any_element(), + ) + } else { + None + }), + ), + ) + } + + fn window_activation_changed(&mut self, cx: &mut ViewContext) { + if cx.is_window_active() { + ActiveCall::global(cx) + .update(cx, |call, cx| call.set_location(Some(&self.project), cx)) + .detach_and_log_err(cx); + } else if cx.active_window().is_none() { + ActiveCall::global(cx) + .update(cx, |call, cx| call.set_location(None, cx)) + .detach_and_log_err(cx); + } + self.workspace + .update(cx, |workspace, cx| { + workspace.update_active_view_for_followers(cx); + }) + .ok(); + } + + fn active_call_changed(&mut self, cx: &mut ViewContext) { + cx.notify(); + } + + fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext) { + let active_call = ActiveCall::global(cx); + let project = self.project.clone(); + active_call + .update(cx, |call, cx| call.share_project(project, cx)) + .detach_and_log_err(cx); + } + + fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext) { + let active_call = ActiveCall::global(cx); + let project = self.project.clone(); + active_call + .update(cx, |call, cx| call.unshare_project(project, cx)) + .log_err(); + } + + pub fn render_vcs_popover( + workspace: View, + cx: &mut WindowContext<'_>, + ) -> Option> { + let view = build_branch_list(workspace, cx).log_err()?; + let focus_handle = view.focus_handle(cx); + cx.focus(&focus_handle); + Some(view) + } + + fn render_connection_status( + &self, + status: &client::Status, + cx: &mut ViewContext, + ) -> Option { + match status { + client::Status::ConnectionError + | client::Status::ConnectionLost + | client::Status::Reauthenticating { .. } + | client::Status::Reconnecting { .. } + | client::Status::ReconnectionError { .. } => Some( + div() + .id("disconnected") + .child(Icon::new(IconName::Disconnected).size(IconSize::Small)) + .tooltip(|cx| Tooltip::text("Disconnected", cx)) + .into_any_element(), + ), + client::Status::UpgradeRequired => { + let auto_updater = auto_update::AutoUpdater::get(cx); + let label = match auto_updater.map(|auto_update| auto_update.read(cx).status()) { + Some(AutoUpdateStatus::Updated { .. }) => "Please restart Zed to Collaborate", + Some(AutoUpdateStatus::Installing) + | Some(AutoUpdateStatus::Downloading) + | Some(AutoUpdateStatus::Checking) => "Updating...", + Some(AutoUpdateStatus::Idle) | Some(AutoUpdateStatus::Errored) | None => { + "Please update Zed to Collaborate" + } + }; + + Some( + Button::new("connection-status", label) + .label_size(LabelSize::Small) + .on_click(|_, cx| { + if let Some(auto_updater) = auto_update::AutoUpdater::get(cx) { + if auto_updater.read(cx).status().is_updated() { + workspace::restart(&Default::default(), cx); + return; + } + } + auto_update::check(&Default::default(), cx); + }) + .into_any_element(), + ) + } + _ => None, + } + } + + pub fn render_sign_in_button(&mut self, _: &mut ViewContext) -> Button { + let client = self.client.clone(); + Button::new("sign_in", "Sign in") + .label_size(LabelSize::Small) + .on_click(move |_, cx| { + let client = client.clone(); + cx.spawn(move |mut cx| async move { + client + .authenticate_and_connect(true, &cx) + .await + .notify_async_err(&mut cx); + }) + .detach(); + }) + } + + pub fn render_user_menu_button(&mut self, cx: &mut ViewContext) -> impl Element { + if let Some(user) = self.user_store.read(cx).current_user() { + popover_menu("user-menu") + .menu(|cx| { + ContextMenu::build(cx, |menu, _| { + menu.action("Settings", zed_actions::OpenSettings.boxed_clone()) + .action("Extensions", extensions_ui::Extensions.boxed_clone()) + .action("Themes...", theme_selector::Toggle::default().boxed_clone()) + .separator() + .action("Sign Out", client::SignOut.boxed_clone()) + }) + .into() + }) + .trigger( + ButtonLike::new("user-menu") + .child( + h_flex() + .gap_0p5() + .child(Avatar::new(user.avatar_uri.clone())) + .child(Icon::new(IconName::ChevronDown).color(Color::Muted)), + ) + .style(ButtonStyle::Subtle) + .tooltip(move |cx| Tooltip::text("Toggle User Menu", cx)), + ) + .anchor(gpui::AnchorCorner::TopRight) + } else { + popover_menu("user-menu") + .menu(|cx| { + ContextMenu::build(cx, |menu, _| { + menu.action("Settings", zed_actions::OpenSettings.boxed_clone()) + .action("Extensions", extensions_ui::Extensions.boxed_clone()) + .action("Themes...", theme_selector::Toggle::default().boxed_clone()) + }) + .into() + }) + .trigger( + ButtonLike::new("user-menu") + .child( + h_flex() + .gap_0p5() + .child(Icon::new(IconName::ChevronDown).color(Color::Muted)), + ) + .style(ButtonStyle::Subtle) + .tooltip(move |cx| Tooltip::text("Toggle User Menu", cx)), + ) + } + } +} diff --git a/crates/collab_ui/src/collab_ui.rs b/crates/collab_ui/src/collab_ui.rs new file mode 100644 index 0000000..94f7c7b --- /dev/null +++ b/crates/collab_ui/src/collab_ui.rs @@ -0,0 +1,130 @@ +pub mod channel_view; +pub mod chat_panel; +pub mod collab_panel; +mod collab_titlebar_item; +mod face_pile; +pub mod notification_panel; +pub mod notifications; +mod panel_settings; + +use std::{rc::Rc, sync::Arc}; + +use call::{report_call_event_for_room, ActiveCall}; +pub use collab_panel::CollabPanel; +pub use collab_titlebar_item::CollabTitlebarItem; +use gpui::{ + actions, point, AppContext, DevicePixels, Pixels, PlatformDisplay, Size, Task, + WindowBackgroundAppearance, WindowBounds, WindowContext, WindowKind, WindowOptions, +}; +use panel_settings::MessageEditorSettings; +pub use panel_settings::{ + ChatPanelSettings, CollaborationPanelSettings, NotificationPanelSettings, +}; +use release_channel::ReleaseChannel; +use settings::Settings; +use workspace::{notifications::DetachAndPromptErr, AppState}; + +actions!( + collab, + [ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall] +); + +pub fn init(app_state: &Arc, cx: &mut AppContext) { + CollaborationPanelSettings::register(cx); + ChatPanelSettings::register(cx); + NotificationPanelSettings::register(cx); + MessageEditorSettings::register(cx); + + vcs_menu::init(cx); + collab_titlebar_item::init(cx); + collab_panel::init(cx); + channel_view::init(cx); + chat_panel::init(cx); + notification_panel::init(cx); + notifications::init(&app_state, cx); +} + +pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut WindowContext) { + let call = ActiveCall::global(cx).read(cx); + if let Some(room) = call.room().cloned() { + let client = call.client(); + let toggle_screen_sharing = room.update(cx, |room, cx| { + if room.is_screen_sharing() { + report_call_event_for_room( + "disable screen share", + room.id(), + room.channel_id(), + &client, + ); + Task::ready(room.unshare_screen(cx)) + } else { + report_call_event_for_room( + "enable screen share", + room.id(), + room.channel_id(), + &client, + ); + room.share_screen(cx) + } + }); + toggle_screen_sharing.detach_and_prompt_err("Sharing Screen Failed", cx, |e, _| Some(format!("{:?}\n\nPlease check that you have given Zed permissions to record your screen in Settings.", e))); + } +} + +pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) { + let call = ActiveCall::global(cx).read(cx); + if let Some(room) = call.room().cloned() { + let client = call.client(); + room.update(cx, |room, cx| { + let operation = if room.is_muted() { + "enable microphone" + } else { + "disable microphone" + }; + report_call_event_for_room(operation, room.id(), room.channel_id(), &client); + + room.toggle_mute(cx) + }); + } +} + +pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) { + if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() { + room.update(cx, |room, cx| room.toggle_deafen(cx)); + } +} + +fn notification_window_options( + screen: Rc, + window_size: Size, + cx: &AppContext, +) -> WindowOptions { + let notification_margin_width = DevicePixels::from(16); + let notification_margin_height = DevicePixels::from(-0) - DevicePixels::from(48); + + let screen_bounds = screen.bounds(); + let size: Size = window_size.into(); + + let bounds = gpui::Bounds:: { + origin: screen_bounds.upper_right() + - point( + size.width + notification_margin_width, + notification_margin_height, + ), + size: window_size.into(), + }; + + let app_id = ReleaseChannel::global(cx).app_id(); + + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + titlebar: None, + focus: false, + show: true, + kind: WindowKind::PopUp, + is_movable: false, + display_id: Some(screen.id()), + window_background: WindowBackgroundAppearance::default(), + app_id: Some(app_id.to_owned()), + } +} diff --git a/crates/collab_ui/src/face_pile.rs b/crates/collab_ui/src/face_pile.rs new file mode 100644 index 0000000..bce52f0 --- /dev/null +++ b/crates/collab_ui/src/face_pile.rs @@ -0,0 +1,49 @@ +use gpui::AnyElement; +use smallvec::SmallVec; +use ui::prelude::*; + +#[derive(IntoElement)] +pub struct FacePile { + base: Div, + faces: SmallVec<[AnyElement; 2]>, +} + +impl FacePile { + pub fn empty() -> Self { + Self::new(SmallVec::new()) + } + + pub fn new(faces: SmallVec<[AnyElement; 2]>) -> Self { + Self { base: div(), faces } + } +} + +impl RenderOnce for FacePile { + fn render(self, _cx: &mut WindowContext) -> impl IntoElement { + // Lay the faces out in reverse so they overlap in the desired order (left to right, front to back) + self.base + .flex() + .flex_row_reverse() + .items_center() + .justify_start() + .children( + self.faces + .into_iter() + .enumerate() + .rev() + .map(|(ix, player)| div().when(ix > 0, |div| div.ml_neg_1()).child(player)), + ) + } +} + +impl ParentElement for FacePile { + fn extend(&mut self, elements: impl IntoIterator) { + self.faces.extend(elements); + } +} + +impl Styled for FacePile { + fn style(&mut self) -> &mut gpui::StyleRefinement { + self.base.style() + } +} diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs new file mode 100644 index 0000000..c8e58b1 --- /dev/null +++ b/crates/collab_ui/src/notification_panel.rs @@ -0,0 +1,783 @@ +use crate::{chat_panel::ChatPanel, NotificationPanelSettings}; +use anyhow::Result; +use channel::ChannelStore; +use client::{ChannelId, Client, Notification, User, UserStore}; +use collections::HashMap; +use db::kvp::KEY_VALUE_STORE; +use futures::StreamExt; +use gpui::{ + actions, div, img, list, px, AnyElement, AppContext, AsyncWindowContext, CursorStyle, + DismissEvent, Element, EventEmitter, FocusHandle, FocusableView, InteractiveElement, + IntoElement, ListAlignment, ListScrollEvent, ListState, Model, ParentElement, Render, + StatefulInteractiveElement, Styled, Task, View, ViewContext, VisualContext, WeakView, + WindowContext, +}; +use notifications::{NotificationEntry, NotificationEvent, NotificationStore}; +use project::Fs; +use rpc::proto; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsStore}; +use std::{sync::Arc, time::Duration}; +use time::{OffsetDateTime, UtcOffset}; +use ui::{h_flex, prelude::*, v_flex, Avatar, Button, Icon, IconButton, IconName, Label, Tooltip}; +use util::{ResultExt, TryFutureExt}; +use workspace::notifications::NotificationId; +use workspace::{ + dock::{DockPosition, Panel, PanelEvent}, + Workspace, +}; + +const LOADING_THRESHOLD: usize = 30; +const MARK_AS_READ_DELAY: Duration = Duration::from_secs(1); +const TOAST_DURATION: Duration = Duration::from_secs(5); +const NOTIFICATION_PANEL_KEY: &str = "NotificationPanel"; + +pub struct NotificationPanel { + client: Arc, + user_store: Model, + channel_store: Model, + notification_store: Model, + fs: Arc, + width: Option, + active: bool, + notification_list: ListState, + pending_serialization: Task>, + subscriptions: Vec, + workspace: WeakView, + current_notification_toast: Option<(u64, Task<()>)>, + local_timezone: UtcOffset, + focus_handle: FocusHandle, + mark_as_read_tasks: HashMap>>, + unseen_notifications: Vec, +} + +#[derive(Serialize, Deserialize)] +struct SerializedNotificationPanel { + width: Option, +} + +#[derive(Debug)] +pub enum Event { + DockPositionChanged, + Focus, + Dismissed, +} + +pub struct NotificationPresenter { + pub actor: Option>, + pub text: String, + pub icon: &'static str, + pub needs_response: bool, + pub can_navigate: bool, +} + +actions!(notification_panel, [ToggleFocus]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(|workspace: &mut Workspace, _| { + workspace.register_action(|workspace, _: &ToggleFocus, cx| { + workspace.toggle_panel_focus::(cx); + }); + }) + .detach(); +} + +impl NotificationPanel { + pub fn new(workspace: &mut Workspace, cx: &mut ViewContext) -> View { + let fs = workspace.app_state().fs.clone(); + let client = workspace.app_state().client.clone(); + let user_store = workspace.app_state().user_store.clone(); + let workspace_handle = workspace.weak_handle(); + + cx.new_view(|cx: &mut ViewContext| { + let mut status = client.status(); + cx.spawn(|this, mut cx| async move { + while let Some(_) = status.next().await { + if this + .update(&mut cx, |_, cx| { + cx.notify(); + }) + .is_err() + { + break; + } + } + }) + .detach(); + + let view = cx.view().downgrade(); + let notification_list = + ListState::new(0, ListAlignment::Top, px(1000.), move |ix, cx| { + view.upgrade() + .and_then(|view| { + view.update(cx, |this, cx| this.render_notification(ix, cx)) + }) + .unwrap_or_else(|| div().into_any()) + }); + notification_list.set_scroll_handler(cx.listener( + |this, event: &ListScrollEvent, cx| { + if event.count.saturating_sub(event.visible_range.end) < LOADING_THRESHOLD { + if let Some(task) = this + .notification_store + .update(cx, |store, cx| store.load_more_notifications(false, cx)) + { + task.detach(); + } + } + }, + )); + + let mut this = Self { + fs, + client, + user_store, + local_timezone: cx.local_timezone(), + channel_store: ChannelStore::global(cx), + notification_store: NotificationStore::global(cx), + notification_list, + pending_serialization: Task::ready(None), + workspace: workspace_handle, + focus_handle: cx.focus_handle(), + current_notification_toast: None, + subscriptions: Vec::new(), + active: false, + mark_as_read_tasks: HashMap::default(), + width: None, + unseen_notifications: Vec::new(), + }; + + let mut old_dock_position = this.position(cx); + this.subscriptions.extend([ + cx.observe(&this.notification_store, |_, _, cx| cx.notify()), + cx.subscribe(&this.notification_store, Self::on_notification_event), + cx.observe_global::(move |this: &mut Self, cx| { + let new_dock_position = this.position(cx); + if new_dock_position != old_dock_position { + old_dock_position = new_dock_position; + cx.emit(Event::DockPositionChanged); + } + cx.notify(); + }), + ]); + this + }) + } + + pub fn load( + workspace: WeakView, + cx: AsyncWindowContext, + ) -> Task>> { + cx.spawn(|mut cx| async move { + let serialized_panel = if let Some(panel) = cx + .background_executor() + .spawn(async move { KEY_VALUE_STORE.read_kvp(NOTIFICATION_PANEL_KEY) }) + .await + .log_err() + .flatten() + { + Some(serde_json::from_str::(&panel)?) + } else { + None + }; + + workspace.update(&mut cx, |workspace, cx| { + let panel = Self::new(workspace, cx); + if let Some(serialized_panel) = serialized_panel { + panel.update(cx, |panel, cx| { + panel.width = serialized_panel.width.map(|w| w.round()); + cx.notify(); + }); + } + panel + }) + }) + } + + fn serialize(&mut self, cx: &mut ViewContext) { + let width = self.width; + self.pending_serialization = cx.background_executor().spawn( + async move { + KEY_VALUE_STORE + .write_kvp( + NOTIFICATION_PANEL_KEY.into(), + serde_json::to_string(&SerializedNotificationPanel { width })?, + ) + .await?; + anyhow::Ok(()) + } + .log_err(), + ); + } + + fn render_notification(&mut self, ix: usize, cx: &mut ViewContext) -> Option { + let entry = self.notification_store.read(cx).notification_at(ix)?; + let notification_id = entry.id; + let now = OffsetDateTime::now_utc(); + let timestamp = entry.timestamp; + let NotificationPresenter { + actor, + text, + needs_response, + can_navigate, + .. + } = self.present_notification(entry, cx)?; + + let response = entry.response; + let notification = entry.notification.clone(); + + if self.active && !entry.is_read { + self.did_render_notification(notification_id, ¬ification, cx); + } + + let relative_timestamp = time_format::format_localized_timestamp( + timestamp, + now, + self.local_timezone, + time_format::TimestampFormat::Relative, + ); + + let absolute_timestamp = time_format::format_localized_timestamp( + timestamp, + now, + self.local_timezone, + time_format::TimestampFormat::Absolute, + ); + + Some( + div() + .id(ix) + .flex() + .flex_row() + .size_full() + .px_2() + .py_1() + .gap_2() + .hover(|style| style.bg(cx.theme().colors().element_hover)) + .when(can_navigate, |el| { + el.cursor(CursorStyle::PointingHand).on_click({ + let notification = notification.clone(); + cx.listener(move |this, _, cx| { + this.did_click_notification(¬ification, cx) + }) + }) + }) + .children(actor.map(|actor| { + img(actor.avatar_uri.clone()) + .flex_none() + .w_8() + .h_8() + .rounded_full() + })) + .child( + v_flex() + .gap_1() + .size_full() + .overflow_hidden() + .child(Label::new(text.clone())) + .child( + h_flex() + .child( + div() + .id("notification_timestamp") + .hover(|style| { + style + .bg(cx.theme().colors().element_selected) + .rounded_md() + }) + .child(Label::new(relative_timestamp).color(Color::Muted)) + .tooltip(move |cx| { + Tooltip::text(absolute_timestamp.clone(), cx) + }), + ) + .children(if let Some(is_accepted) = response { + Some(div().flex().flex_grow().justify_end().child(Label::new( + if is_accepted { + "You accepted" + } else { + "You declined" + }, + ))) + } else if needs_response { + Some( + h_flex() + .flex_grow() + .justify_end() + .child(Button::new("decline", "Decline").on_click({ + let notification = notification.clone(); + let view = cx.view().clone(); + move |_, cx| { + view.update(cx, |this, cx| { + this.respond_to_notification( + notification.clone(), + false, + cx, + ) + }); + } + })) + .child(Button::new("accept", "Accept").on_click({ + let notification = notification.clone(); + let view = cx.view().clone(); + move |_, cx| { + view.update(cx, |this, cx| { + this.respond_to_notification( + notification.clone(), + true, + cx, + ) + }); + } + })), + ) + } else { + None + }), + ), + ) + .into_any(), + ) + } + + fn present_notification( + &self, + entry: &NotificationEntry, + cx: &AppContext, + ) -> Option { + let user_store = self.user_store.read(cx); + let channel_store = self.channel_store.read(cx); + match entry.notification { + Notification::ContactRequest { sender_id } => { + let requester = user_store.get_cached_user(sender_id)?; + Some(NotificationPresenter { + icon: "icons/plus.svg", + text: format!("{} wants to add you as a contact", requester.github_login), + needs_response: user_store.has_incoming_contact_request(requester.id), + actor: Some(requester), + can_navigate: false, + }) + } + Notification::ContactRequestAccepted { responder_id } => { + let responder = user_store.get_cached_user(responder_id)?; + Some(NotificationPresenter { + icon: "icons/plus.svg", + text: format!("{} accepted your contact invite", responder.github_login), + needs_response: false, + actor: Some(responder), + can_navigate: false, + }) + } + Notification::ChannelInvitation { + ref channel_name, + channel_id, + inviter_id, + } => { + let inviter = user_store.get_cached_user(inviter_id)?; + Some(NotificationPresenter { + icon: "icons/hash.svg", + text: format!( + "{} invited you to join the #{channel_name} channel", + inviter.github_login + ), + needs_response: channel_store.has_channel_invitation(ChannelId(channel_id)), + actor: Some(inviter), + can_navigate: false, + }) + } + Notification::ChannelMessageMention { + sender_id, + channel_id, + message_id, + } => { + let sender = user_store.get_cached_user(sender_id)?; + let channel = channel_store.channel_for_id(ChannelId(channel_id))?; + let message = self + .notification_store + .read(cx) + .channel_message_for_id(message_id)?; + Some(NotificationPresenter { + icon: "icons/conversations.svg", + text: format!( + "{} mentioned you in #{}:\n{}", + sender.github_login, channel.name, message.body, + ), + needs_response: false, + actor: Some(sender), + can_navigate: true, + }) + } + } + } + + fn did_render_notification( + &mut self, + notification_id: u64, + notification: &Notification, + cx: &mut ViewContext, + ) { + let should_mark_as_read = match notification { + Notification::ContactRequestAccepted { .. } => true, + Notification::ContactRequest { .. } + | Notification::ChannelInvitation { .. } + | Notification::ChannelMessageMention { .. } => false, + }; + + if should_mark_as_read { + self.mark_as_read_tasks + .entry(notification_id) + .or_insert_with(|| { + let client = self.client.clone(); + cx.spawn(|this, mut cx| async move { + cx.background_executor().timer(MARK_AS_READ_DELAY).await; + client + .request(proto::MarkNotificationRead { notification_id }) + .await?; + this.update(&mut cx, |this, _| { + this.mark_as_read_tasks.remove(¬ification_id); + })?; + Ok(()) + }) + }); + } + } + + fn did_click_notification(&mut self, notification: &Notification, cx: &mut ViewContext) { + if let Notification::ChannelMessageMention { + message_id, + channel_id, + .. + } = notification.clone() + { + if let Some(workspace) = self.workspace.upgrade() { + cx.window_context().defer(move |cx| { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = workspace.focus_panel::(cx) { + panel.update(cx, |panel, cx| { + panel + .select_channel(ChannelId(channel_id), Some(message_id), cx) + .detach_and_log_err(cx); + }); + } + }); + }); + } + } + } + + fn is_showing_notification(&self, notification: &Notification, cx: &ViewContext) -> bool { + if !self.active { + return false; + } + + if let Notification::ChannelMessageMention { channel_id, .. } = ¬ification { + if let Some(workspace) = self.workspace.upgrade() { + return if let Some(panel) = workspace.read(cx).panel::(cx) { + let panel = panel.read(cx); + panel.is_scrolled_to_bottom() + && panel + .active_chat() + .map_or(false, |chat| chat.read(cx).channel_id.0 == *channel_id) + } else { + false + }; + } + } + + false + } + + fn on_notification_event( + &mut self, + _: Model, + event: &NotificationEvent, + cx: &mut ViewContext, + ) { + match event { + NotificationEvent::NewNotification { entry } => { + if !self.is_showing_notification(&entry.notification, cx) { + self.unseen_notifications.push(entry.clone()); + } + self.add_toast(entry, cx); + } + NotificationEvent::NotificationRemoved { entry } + | NotificationEvent::NotificationRead { entry } => { + self.unseen_notifications.retain(|n| n.id != entry.id); + self.remove_toast(entry.id, cx); + } + NotificationEvent::NotificationsUpdated { + old_range, + new_count, + } => { + self.notification_list.splice(old_range.clone(), *new_count); + cx.notify(); + } + } + } + + fn add_toast(&mut self, entry: &NotificationEntry, cx: &mut ViewContext) { + if self.is_showing_notification(&entry.notification, cx) { + return; + } + + let Some(NotificationPresenter { actor, text, .. }) = self.present_notification(entry, cx) + else { + return; + }; + + let notification_id = entry.id; + self.current_notification_toast = Some(( + notification_id, + cx.spawn(|this, mut cx| async move { + cx.background_executor().timer(TOAST_DURATION).await; + this.update(&mut cx, |this, cx| this.remove_toast(notification_id, cx)) + .ok(); + }), + )); + + self.workspace + .update(cx, |workspace, cx| { + let id = NotificationId::unique::(); + + workspace.dismiss_notification(&id, cx); + workspace.show_notification(id, cx, |cx| { + let workspace = cx.view().downgrade(); + cx.new_view(|_| NotificationToast { + notification_id, + actor, + text, + workspace, + }) + }) + }) + .ok(); + } + + fn remove_toast(&mut self, notification_id: u64, cx: &mut ViewContext) { + if let Some((current_id, _)) = &self.current_notification_toast { + if *current_id == notification_id { + self.current_notification_toast.take(); + self.workspace + .update(cx, |workspace, cx| { + let id = NotificationId::unique::(); + workspace.dismiss_notification(&id, cx) + }) + .ok(); + } + } + } + + fn respond_to_notification( + &mut self, + notification: Notification, + response: bool, + cx: &mut ViewContext, + ) { + self.notification_store.update(cx, |store, cx| { + store.respond_to_notification(notification, response, cx); + }); + } +} + +impl Render for NotificationPanel { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .size_full() + .child( + h_flex() + .justify_between() + .px_2() + .py_1() + // Match the height of the tab bar so they line up. + .h(rems(ui::Tab::CONTAINER_HEIGHT_IN_REMS)) + .border_b_1() + .border_color(cx.theme().colors().border) + .child(Label::new("Notifications")) + .child(Icon::new(IconName::Envelope)), + ) + .map(|this| { + if self.client.user_id().is_none() { + this.child( + v_flex() + .gap_2() + .p_4() + .child( + Button::new("sign_in_prompt_button", "Sign in") + .icon_color(Color::Muted) + .icon(IconName::Github) + .icon_position(IconPosition::Start) + .style(ButtonStyle::Filled) + .full_width() + .on_click({ + let client = self.client.clone(); + move |_, cx| { + let client = client.clone(); + cx.spawn(move |cx| async move { + client + .authenticate_and_connect(true, &cx) + .log_err() + .await; + }) + .detach() + } + }), + ) + .child( + div().flex().w_full().items_center().child( + Label::new("Sign in to view notifications.") + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ) + } else if self.notification_list.item_count() == 0 { + this.child( + v_flex().p_4().child( + div().flex().w_full().items_center().child( + Label::new("You have no notifications.") + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ) + } else { + this.child(list(self.notification_list.clone()).size_full()) + } + }) + } +} + +impl FocusableView for NotificationPanel { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl EventEmitter for NotificationPanel {} +impl EventEmitter for NotificationPanel {} + +impl Panel for NotificationPanel { + fn persistent_name() -> &'static str { + "NotificationPanel" + } + + fn position(&self, cx: &gpui::WindowContext) -> DockPosition { + NotificationPanelSettings::get_global(cx).dock + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + matches!(position, DockPosition::Left | DockPosition::Right) + } + + fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext) { + settings::update_settings_file::( + self.fs.clone(), + cx, + move |settings| settings.dock = Some(position), + ); + } + + fn size(&self, cx: &gpui::WindowContext) -> Pixels { + self.width + .unwrap_or_else(|| NotificationPanelSettings::get_global(cx).default_width) + } + + fn set_size(&mut self, size: Option, cx: &mut ViewContext) { + self.width = size; + self.serialize(cx); + cx.notify(); + } + + fn set_active(&mut self, active: bool, cx: &mut ViewContext) { + self.active = active; + + if self.active { + self.unseen_notifications = Vec::new(); + cx.notify(); + } + + if self.notification_store.read(cx).notification_count() == 0 { + cx.emit(Event::Dismissed); + } + } + + fn icon(&self, cx: &gpui::WindowContext) -> Option { + let show_button = NotificationPanelSettings::get_global(cx).button; + if !show_button { + return None; + } + + if self.unseen_notifications.is_empty() { + return Some(IconName::Bell); + } + + Some(IconName::BellDot) + } + + fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { + Some("Notification Panel") + } + + fn icon_label(&self, cx: &WindowContext) -> Option { + let count = self.notification_store.read(cx).unread_notification_count(); + if count == 0 { + None + } else { + Some(count.to_string()) + } + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } +} + +pub struct NotificationToast { + notification_id: u64, + actor: Option>, + text: String, + workspace: WeakView, +} + +impl NotificationToast { + fn focus_notification_panel(&self, cx: &mut ViewContext) { + let workspace = self.workspace.clone(); + let notification_id = self.notification_id; + cx.window_context().defer(move |cx| { + workspace + .update(cx, |workspace, cx| { + if let Some(panel) = workspace.focus_panel::(cx) { + panel.update(cx, |panel, cx| { + let store = panel.notification_store.read(cx); + if let Some(entry) = store.notification_for_id(notification_id) { + panel.did_click_notification(&entry.clone().notification, cx); + } + }); + } + }) + .ok(); + }) + } +} + +impl Render for NotificationToast { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let user = self.actor.clone(); + + h_flex() + .id("notification_panel_toast") + .elevation_3(cx) + .p_2() + .gap_2() + .children(user.map(|user| Avatar::new(user.avatar_uri.clone()))) + .child(Label::new(self.text.clone())) + .child( + IconButton::new("close", IconName::Close) + .on_click(cx.listener(|_, _, cx| cx.emit(DismissEvent))), + ) + .on_click(cx.listener(|this, _, cx| { + this.focus_notification_panel(cx); + cx.emit(DismissEvent); + })) + } +} + +impl EventEmitter for NotificationToast {} diff --git a/crates/collab_ui/src/notifications.rs b/crates/collab_ui/src/notifications.rs new file mode 100644 index 0000000..7759fef --- /dev/null +++ b/crates/collab_ui/src/notifications.rs @@ -0,0 +1,18 @@ +mod collab_notification; +pub mod incoming_call_notification; +pub mod project_shared_notification; + +#[cfg(feature = "stories")] +mod stories; + +use gpui::AppContext; +use std::sync::Arc; +use workspace::AppState; + +#[cfg(feature = "stories")] +pub use stories::*; + +pub fn init(app_state: &Arc, cx: &mut AppContext) { + incoming_call_notification::init(app_state, cx); + project_shared_notification::init(app_state, cx); +} diff --git a/crates/collab_ui/src/notifications/collab_notification.rs b/crates/collab_ui/src/notifications/collab_notification.rs new file mode 100644 index 0000000..14dae9c --- /dev/null +++ b/crates/collab_ui/src/notifications/collab_notification.rs @@ -0,0 +1,52 @@ +use gpui::{img, prelude::*, AnyElement, SharedUri}; +use smallvec::SmallVec; +use ui::prelude::*; + +#[derive(IntoElement)] +pub struct CollabNotification { + avatar_uri: SharedUri, + accept_button: Button, + dismiss_button: Button, + children: SmallVec<[AnyElement; 2]>, +} + +impl CollabNotification { + pub fn new( + avatar_uri: impl Into, + accept_button: Button, + dismiss_button: Button, + ) -> Self { + Self { + avatar_uri: avatar_uri.into(), + accept_button, + dismiss_button, + children: SmallVec::new(), + } + } +} + +impl ParentElement for CollabNotification { + fn extend(&mut self, elements: impl IntoIterator) { + self.children.extend(elements) + } +} + +impl RenderOnce for CollabNotification { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + h_flex() + .text_ui(cx) + .justify_between() + .size_full() + .overflow_hidden() + .elevation_3(cx) + .p_2() + .gap_2() + .child(img(self.avatar_uri).w_12().h_12().rounded_full()) + .child(v_flex().overflow_hidden().children(self.children)) + .child( + v_flex() + .child(self.accept_button) + .child(self.dismiss_button), + ) + } +} diff --git a/crates/collab_ui/src/notifications/incoming_call_notification.rs b/crates/collab_ui/src/notifications/incoming_call_notification.rs new file mode 100644 index 0000000..4279841 --- /dev/null +++ b/crates/collab_ui/src/notifications/incoming_call_notification.rs @@ -0,0 +1,145 @@ +use crate::notification_window_options; +use crate::notifications::collab_notification::CollabNotification; +use call::{ActiveCall, IncomingCall}; +use futures::StreamExt; +use gpui::{prelude::*, AppContext, WindowHandle}; +use settings::Settings; +use std::sync::{Arc, Weak}; +use theme::ThemeSettings; +use ui::{prelude::*, Button, Label}; +use util::ResultExt; +use workspace::AppState; + +pub fn init(app_state: &Arc, cx: &mut AppContext) { + let app_state = Arc::downgrade(app_state); + let mut incoming_call = ActiveCall::global(cx).read(cx).incoming(); + cx.spawn(|mut cx| async move { + let mut notification_windows: Vec> = Vec::new(); + while let Some(incoming_call) = incoming_call.next().await { + for window in notification_windows.drain(..) { + window + .update(&mut cx, |_, cx| { + cx.remove_window(); + }) + .log_err(); + } + + if let Some(incoming_call) = incoming_call { + let unique_screens = cx.update(|cx| cx.displays()).unwrap(); + let window_size = gpui::Size { + width: px(400.), + height: px(72.), + }; + + for screen in unique_screens { + if let Some(options) = cx + .update(|cx| notification_window_options(screen, window_size, cx)) + .log_err() + { + let window = cx + .open_window(options, |cx| { + cx.new_view(|_| { + IncomingCallNotification::new( + incoming_call.clone(), + app_state.clone(), + ) + }) + }) + .unwrap(); + notification_windows.push(window); + } + } + } + } + }) + .detach(); +} + +struct IncomingCallNotificationState { + call: IncomingCall, + app_state: Weak, +} + +pub struct IncomingCallNotification { + state: Arc, +} +impl IncomingCallNotificationState { + pub fn new(call: IncomingCall, app_state: Weak) -> Self { + Self { call, app_state } + } + + fn respond(&self, accept: bool, cx: &mut AppContext) { + let active_call = ActiveCall::global(cx); + if accept { + let join = active_call.update(cx, |active_call, cx| active_call.accept_incoming(cx)); + let caller_user_id = self.call.calling_user.id; + let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id); + let app_state = self.app_state.clone(); + let cx: &mut AppContext = cx; + cx.spawn(|cx| async move { + join.await?; + if let Some(project_id) = initial_project_id { + cx.update(|cx| { + if let Some(app_state) = app_state.upgrade() { + workspace::join_in_room_project( + project_id, + caller_user_id, + app_state, + cx, + ) + .detach_and_log_err(cx); + } + }) + .log_err(); + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } else { + active_call.update(cx, |active_call, cx| { + active_call.decline_incoming(cx).log_err(); + }); + } + } +} + +impl IncomingCallNotification { + pub fn new(call: IncomingCall, app_state: Weak) -> Self { + Self { + state: Arc::new(IncomingCallNotificationState::new(call, app_state)), + } + } +} + +impl Render for IncomingCallNotification { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + // TODO: Is there a better place for us to initialize the font? + let (ui_font, ui_font_size) = { + let theme_settings = ThemeSettings::get_global(cx); + ( + theme_settings.ui_font.family.clone(), + theme_settings.ui_font_size, + ) + }; + + cx.set_rem_size(ui_font_size); + + div().size_full().font_family(ui_font).child( + CollabNotification::new( + self.state.call.calling_user.avatar_uri.clone(), + Button::new("accept", "Accept").on_click({ + let state = self.state.clone(); + move |_, cx| state.respond(true, cx) + }), + Button::new("decline", "Decline").on_click({ + let state = self.state.clone(); + move |_, cx| state.respond(false, cx) + }), + ) + .child(v_flex().overflow_hidden().child(Label::new(format!( + "{} is sharing a project in Zed", + self.state.call.calling_user.github_login + )))), + ) + } +} diff --git a/crates/collab_ui/src/notifications/project_shared_notification.rs b/crates/collab_ui/src/notifications/project_shared_notification.rs new file mode 100644 index 0000000..87f53a5 --- /dev/null +++ b/crates/collab_ui/src/notifications/project_shared_notification.rs @@ -0,0 +1,158 @@ +use crate::notification_window_options; +use crate::notifications::collab_notification::CollabNotification; +use call::{room, ActiveCall}; +use client::User; +use collections::HashMap; +use gpui::{AppContext, Size}; +use settings::Settings; +use std::sync::{Arc, Weak}; +use theme::ThemeSettings; +use ui::{prelude::*, Button, Label}; +use workspace::AppState; + +pub fn init(app_state: &Arc, cx: &mut AppContext) { + let app_state = Arc::downgrade(app_state); + let active_call = ActiveCall::global(cx); + let mut notification_windows = HashMap::default(); + cx.subscribe(&active_call, move |_, event, cx| match event { + room::Event::RemoteProjectShared { + owner, + project_id, + worktree_root_names, + } => { + let window_size = Size { + width: px(400.), + height: px(72.), + }; + + for screen in cx.displays() { + let options = notification_window_options(screen, window_size, cx); + let window = cx.open_window(options, |cx| { + cx.new_view(|_| { + ProjectSharedNotification::new( + owner.clone(), + *project_id, + worktree_root_names.clone(), + app_state.clone(), + ) + }) + }); + notification_windows + .entry(*project_id) + .or_insert(Vec::new()) + .push(window); + } + } + + room::Event::RemoteProjectUnshared { project_id } + | room::Event::RemoteProjectJoined { project_id } + | room::Event::RemoteProjectInvitationDiscarded { project_id } => { + if let Some(windows) = notification_windows.remove(&project_id) { + for window in windows { + window + .update(cx, |_, cx| { + cx.remove_window(); + }) + .ok(); + } + } + } + + room::Event::RoomLeft { .. } => { + for (_, windows) in notification_windows.drain() { + for window in windows { + window + .update(cx, |_, cx| { + cx.remove_window(); + }) + .ok(); + } + } + } + _ => {} + }) + .detach(); +} + +pub struct ProjectSharedNotification { + project_id: u64, + worktree_root_names: Vec, + owner: Arc, + app_state: Weak, +} + +impl ProjectSharedNotification { + fn new( + owner: Arc, + project_id: u64, + worktree_root_names: Vec, + app_state: Weak, + ) -> Self { + Self { + project_id, + worktree_root_names, + owner, + app_state, + } + } + + fn join(&mut self, cx: &mut ViewContext) { + if let Some(app_state) = self.app_state.upgrade() { + workspace::join_in_room_project(self.project_id, self.owner.id, app_state, cx) + .detach_and_log_err(cx); + } + } + + fn dismiss(&mut self, cx: &mut ViewContext) { + if let Some(active_room) = + ActiveCall::global(cx).read_with(cx, |call, _| call.room().cloned()) + { + active_room.update(cx, |_, cx| { + cx.emit(room::Event::RemoteProjectInvitationDiscarded { + project_id: self.project_id, + }); + }); + } + } +} + +impl Render for ProjectSharedNotification { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + // TODO: Is there a better place for us to initialize the font? + let (ui_font, ui_font_size) = { + let theme_settings = ThemeSettings::get_global(cx); + ( + theme_settings.ui_font.family.clone(), + theme_settings.ui_font_size, + ) + }; + + cx.set_rem_size(ui_font_size); + + div().size_full().font_family(ui_font).child( + CollabNotification::new( + self.owner.avatar_uri.clone(), + Button::new("open", "Open").on_click(cx.listener(move |this, _event, cx| { + this.join(cx); + })), + Button::new("dismiss", "Dismiss").on_click(cx.listener(move |this, _event, cx| { + this.dismiss(cx); + })), + ) + .child(Label::new(self.owner.github_login.clone())) + .child(Label::new(format!( + "is sharing a project in Zed{}", + if self.worktree_root_names.is_empty() { + "" + } else { + ":" + } + ))) + .children(if self.worktree_root_names.is_empty() { + None + } else { + Some(Label::new(self.worktree_root_names.join(", "))) + }), + ) + } +} diff --git a/crates/collab_ui/src/notifications/stories.rs b/crates/collab_ui/src/notifications/stories.rs new file mode 100644 index 0000000..3651867 --- /dev/null +++ b/crates/collab_ui/src/notifications/stories.rs @@ -0,0 +1,3 @@ +mod collab_notification; + +pub use collab_notification::*; diff --git a/crates/collab_ui/src/notifications/stories/collab_notification.rs b/crates/collab_ui/src/notifications/stories/collab_notification.rs new file mode 100644 index 0000000..e67ce81 --- /dev/null +++ b/crates/collab_ui/src/notifications/stories/collab_notification.rs @@ -0,0 +1,50 @@ +use gpui::prelude::*; +use story::{StoryContainer, StoryItem, StorySection}; +use ui::prelude::*; + +use crate::notifications::collab_notification::CollabNotification; + +pub struct CollabNotificationStory; + +impl Render for CollabNotificationStory { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + let window_container = |width, height| div().w(px(width)).h(px(height)); + + StoryContainer::new( + "CollabNotification Story", + "crates/collab_ui/src/notifications/stories/collab_notification.rs", + ) + .child( + StorySection::new().child(StoryItem::new( + "Incoming Call Notification", + window_container(400., 72.).child( + CollabNotification::new( + "https://avatars.githubusercontent.com/u/1486634?v=4", + Button::new("accept", "Accept"), + Button::new("decline", "Decline"), + ) + .child( + v_flex() + .overflow_hidden() + .child(Label::new("maxdeviant is sharing a project in Zed")), + ), + ), + )), + ) + .child( + StorySection::new().child(StoryItem::new( + "Project Shared Notification", + window_container(400., 72.).child( + CollabNotification::new( + "https://avatars.githubusercontent.com/u/1714999?v=4", + Button::new("open", "Open"), + Button::new("dismiss", "Dismiss"), + ) + .child(Label::new("iamnbutler")) + .child(Label::new("is sharing a project in Zed:")) + .child(Label::new("zed")), + ), + )), + ) + } +} diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs new file mode 100644 index 0000000..f82cba9 --- /dev/null +++ b/crates/collab_ui/src/panel_settings.rs @@ -0,0 +1,104 @@ +use anyhow; +use gpui::Pixels; +use schemars::JsonSchema; +use serde_derive::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; +use workspace::dock::DockPosition; + +#[derive(Deserialize, Debug)] +pub struct CollaborationPanelSettings { + pub button: bool, + pub dock: DockPosition, + pub default_width: Pixels, +} + +#[derive(Deserialize, Debug)] +pub struct ChatPanelSettings { + pub button: bool, + pub dock: DockPosition, + pub default_width: Pixels, +} + +#[derive(Deserialize, Debug)] +pub struct NotificationPanelSettings { + pub button: bool, + pub dock: DockPosition, + pub default_width: Pixels, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct PanelSettingsContent { + /// Whether to show the panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Where to dock the panel. + /// + /// Default: left + pub dock: Option, + /// Default width of the panel in pixels. + /// + /// Default: 240 + pub default_width: Option, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct MessageEditorSettings { + /// Whether to automatically replace emoji shortcodes with emoji characters. + /// For example: typing `:wave:` gets replaced with `👋`. + /// + /// Default: false + pub auto_replace_emoji_shortcode: Option, +} + +impl Settings for CollaborationPanelSettings { + const KEY: Option<&'static str> = Some("collaboration_panel"); + + type FileContent = PanelSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} + +impl Settings for ChatPanelSettings { + const KEY: Option<&'static str> = Some("chat_panel"); + + type FileContent = PanelSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} + +impl Settings for NotificationPanelSettings { + const KEY: Option<&'static str> = Some("notification_panel"); + + type FileContent = PanelSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} + +impl Settings for MessageEditorSettings { + const KEY: Option<&'static str> = Some("message_editor"); + + type FileContent = MessageEditorSettings; + + fn load( + sources: SettingsSources, + _: &mut gpui::AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} diff --git a/crates/collections/Cargo.toml b/crates/collections/Cargo.toml new file mode 100644 index 0000000..b16b4c1 --- /dev/null +++ b/crates/collections/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "collections" +version = "0.1.0" +edition = "2021" +publish = false +license = "Apache-2.0" + +[lints] +workspace = true + +[lib] +path = "src/collections.rs" +doctest = false + +[features] +test-support = [] + +[dependencies] +rustc-hash = "1.1" diff --git a/crates/collections/LICENSE-APACHE b/crates/collections/LICENSE-APACHE new file mode 100644 index 0000000..1cd601d --- /dev/null +++ b/crates/collections/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/collections/src/collections.rs b/crates/collections/src/collections.rs new file mode 100644 index 0000000..25f6135 --- /dev/null +++ b/crates/collections/src/collections.rs @@ -0,0 +1,15 @@ +#[cfg(feature = "test-support")] +pub type HashMap = FxHashMap; + +#[cfg(feature = "test-support")] +pub type HashSet = FxHashSet; + +#[cfg(not(feature = "test-support"))] +pub type HashMap = std::collections::HashMap; + +#[cfg(not(feature = "test-support"))] +pub type HashSet = std::collections::HashSet; + +pub use rustc_hash::FxHasher; +pub use rustc_hash::{FxHashMap, FxHashSet}; +pub use std::collections::*; diff --git a/crates/color/Cargo.toml b/crates/color/Cargo.toml new file mode 100644 index 0000000..a68a5fb --- /dev/null +++ b/crates/color/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "color" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[features] +default = [] + +[lib] +path = "src/color.rs" +doctest = true + +[dependencies] +palette.workspace = true diff --git a/crates/color/LICENSE-GPL b/crates/color/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/color/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/color/src/color.rs b/crates/color/src/color.rs new file mode 100644 index 0000000..49bdc53 --- /dev/null +++ b/crates/color/src/color.rs @@ -0,0 +1,227 @@ +//! # Color +//! +//! The `color` crate provides a set utilities for working with colors. It is a wrapper around the [`palette`](https://docs.rs/palette) crate with some additional functionality. +//! +//! It is used to create a manipulate colors when building themes. +//! +//! === In development note === +//! +//! This crate is meant to sit between gpui and the theme/ui for all the color related stuff. +//! +//! It could be folded into gpui, ui or theme potentially but for now we'll continue +//! to develop it in isolation. +//! +//! Once we have a good idea of the needs of the theme system and color in gpui in general I see 3 paths: +//! 1. Use `palette` (or another color library) directly in gpui and everywhere else, rather than rolling our own color system. +//! 2. Keep this crate as a thin wrapper around `palette` and use it everywhere except gpui, and convert to gpui's color system when needed. +//! 3. Build the needed functionality into gpui and keep using its color system everywhere. +//! +//! I'm leaning towards 2 in the short term and 1 in the long term, but we'll need to discuss it more. +//! +//! === End development note === +use palette::{ + blend::Blend, convert::FromColorUnclamped, encoding, rgb::Rgb, Clamp, Mix, Srgb, WithAlpha, +}; + +/// The types of blend modes supported +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum BlendMode { + /// Multiplies the colors, resulting in a darker color. This mode is useful for creating shadows. + Multiply, + /// Lightens the color by adding the source and destination colors. It results in a lighter color. + Screen, + /// Combines Multiply and Screen blend modes. Parts of the image that are lighter than 50% gray are lightened, and parts that are darker are darkened. + Overlay, + /// Selects the darker of the base or blend color as the resulting color. Useful for darkening images without affecting the overall contrast. + Darken, + /// Selects the lighter of the base or blend color as the resulting color. Useful for lightening images without affecting the overall contrast. + Lighten, + /// Brightens the base color to reflect the blend color. The result is a lightened image. + Dodge, + /// Darkens the base color to reflect the blend color. The result is a darkened image. + Burn, + /// Similar to Overlay, but with a stronger effect. Hard Light can either multiply or screen colors, depending on the blend color. + HardLight, + /// A softer version of Hard Light. Soft Light either darkens or lightens colors, depending on the blend color. + SoftLight, + /// Subtracts the darker of the two constituent colors from the lighter color. Difference mode is useful for creating more vivid colors. + Difference, + /// Similar to Difference, but with a lower contrast. Exclusion mode produces an effect similar to Difference but with less intensity. + Exclusion, +} + +/// Converts a hexadecimal color string to a `palette::Hsla` color. +/// +/// This function supports the following hex formats: +/// `#RGB`, `#RGBA`, `#RRGGBB`, `#RRGGBBAA`. +pub fn hex_to_hsla(s: &str) -> Result { + let hex = s.trim_start_matches('#'); + + // Expand shorthand formats #RGB and #RGBA to #RRGGBB and #RRGGBBAA + let h = hex.as_bytes(); + let arr: [u8; 8] = match h.len() { + // #RGB => #RRGGBBAA + 3 => [h[0], h[0], h[1], h[1], h[2], h[2], b'f', b'f'], + // #RGBA => #RRGGBBAA + 4 => [h[0], h[0], h[1], h[1], h[2], h[2], h[3], h[3]], + // #RRGGBB => #RRGGBBAA + 6 => [h[0], h[1], h[2], h[3], h[4], h[5], b'f', b'f'], + // Already in #RRGGBBAA + 8 => h.try_into().unwrap(), + _ => return Err("Invalid hexadecimal string length".to_string()), + }; + + let hex = + std::str::from_utf8(&arr).map_err(|_| format!("Invalid hexadecimal string: {}", s))?; + let hex_val = + u32::from_str_radix(hex, 16).map_err(|_| format!("Invalid hexadecimal string: {}", s))?; + + Ok(RGBAColor { + r: ((hex_val >> 24) & 0xFF) as f32 / 255.0, + g: ((hex_val >> 16) & 0xFF) as f32 / 255.0, + b: ((hex_val >> 8) & 0xFF) as f32 / 255.0, + a: (hex_val & 0xFF) as f32 / 255.0, + }) +} + +// These derives implement to and from palette's color types. +#[derive(FromColorUnclamped, WithAlpha, Debug, Clone)] +#[palette(skip_derives(Rgb), rgb_standard = "encoding::Srgb")] +pub struct RGBAColor { + r: f32, + g: f32, + b: f32, + // Let Palette know this is our alpha channel. + #[palette(alpha)] + a: f32, +} + +impl FromColorUnclamped for RGBAColor { + fn from_color_unclamped(color: RGBAColor) -> RGBAColor { + color + } +} + +impl FromColorUnclamped> for RGBAColor +where + Srgb: FromColorUnclamped>, +{ + fn from_color_unclamped(color: Rgb) -> RGBAColor { + let srgb = Srgb::from_color_unclamped(color); + RGBAColor { + r: srgb.red, + g: srgb.green, + b: srgb.blue, + a: 1.0, + } + } +} + +impl FromColorUnclamped for Rgb +where + Rgb: FromColorUnclamped, +{ + fn from_color_unclamped(color: RGBAColor) -> Self { + Self::from_color_unclamped(Srgb::new(color.r, color.g, color.b)) + } +} + +impl Clamp for RGBAColor { + fn clamp(self) -> Self { + RGBAColor { + r: self.r.min(1.0).max(0.0), + g: self.g.min(1.0).max(0.0), + b: self.b.min(1.0).max(0.0), + a: self.a.min(1.0).max(0.0), + } + } +} + +impl RGBAColor { + /// Creates a new color from the given RGBA values. + /// + /// This color can be used to convert to any [`palette::Color`] type. + pub fn new(r: f32, g: f32, b: f32, a: f32) -> Self { + RGBAColor { r, g, b, a } + } + + /// Returns a set of states for this color. + pub fn states(self, is_light: bool) -> ColorStates { + states_for_color(self, is_light) + } + + /// Mixes this color with another [`palette::Hsl`] color at the given `mix_ratio`. + pub fn mixed(&self, other: RGBAColor, mix_ratio: f32) -> Self { + let srgb_self = Srgb::new(self.r, self.g, self.b); + let srgb_other = Srgb::new(other.r, other.g, other.b); + + // Directly mix the colors as sRGB values + let mixed = srgb_self.mix(srgb_other, mix_ratio); + RGBAColor::from_color_unclamped(mixed) + } + + pub fn blend(&self, other: RGBAColor, blend_mode: BlendMode) -> Self { + let srgb_self = Srgb::new(self.r, self.g, self.b); + let srgb_other = Srgb::new(other.r, other.g, other.b); + + let blended = match blend_mode { + // replace hsl methods with the respective sRGB methods + BlendMode::Multiply => srgb_self.multiply(srgb_other), + _ => unimplemented!(), + }; + + Self { + r: blended.red, + g: blended.green, + b: blended.blue, + a: self.a, + } + } +} + +/// A set of colors for different states of an element. +#[derive(Debug, Clone)] +pub struct ColorStates { + /// The default color. + pub default: RGBAColor, + /// The color when the mouse is hovering over the element. + pub hover: RGBAColor, + /// The color when the mouse button is held down on the element. + pub active: RGBAColor, + /// The color when the element is focused with the keyboard. + pub focused: RGBAColor, + /// The color when the element is disabled. + pub disabled: RGBAColor, +} + +/// Returns a set of colors for different states of an element. +/// +/// todo("This should take a theme and use appropriate colors from it") +pub fn states_for_color(color: RGBAColor, is_light: bool) -> ColorStates { + let adjustment_factor = if is_light { 0.1 } else { -0.1 }; + let hover_adjustment = 1.0 - adjustment_factor; + let active_adjustment = 1.0 - 2.0 * adjustment_factor; + let focused_adjustment = 1.0 - 3.0 * adjustment_factor; + let disabled_adjustment = 1.0 - 4.0 * adjustment_factor; + + let make_adjustment = |color: RGBAColor, adjustment: f32| -> RGBAColor { + // Adjust lightness for each state + // Note: Adjustment logic may differ; simplify as needed for sRGB + RGBAColor::new( + color.r * adjustment, + color.g * adjustment, + color.b * adjustment, + color.a, + ) + }; + + let color = color.clamp(); + + ColorStates { + default: color.clone(), + hover: make_adjustment(color.clone(), hover_adjustment), + active: make_adjustment(color.clone(), active_adjustment), + focused: make_adjustment(color.clone(), focused_adjustment), + disabled: make_adjustment(color.clone(), disabled_adjustment), + } +} diff --git a/crates/command_palette/Cargo.toml b/crates/command_palette/Cargo.toml new file mode 100644 index 0000000..eecd80b --- /dev/null +++ b/crates/command_palette/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "command_palette" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/command_palette.rs" +doctest = false + +[dependencies] +client.workspace = true +collections.workspace = true +command_palette_hooks.workspace = true +fuzzy.workspace = true +gpui.workspace = true +picker.workspace = true +postage.workspace = true +project.workspace = true +serde.workspace = true +settings.workspace = true +theme.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true +zed_actions.workspace = true + +[dev-dependencies] +ctor.workspace = true +editor = { workspace = true, features = ["test-support"] } +env_logger.workspace = true +go_to_line.workspace = true +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +menu.workspace = true +project = { workspace = true, features = ["test-support"] } +serde_json.workspace = true +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/command_palette/LICENSE-GPL b/crates/command_palette/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/command_palette/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs new file mode 100644 index 0000000..cf62b75 --- /dev/null +++ b/crates/command_palette/src/command_palette.rs @@ -0,0 +1,598 @@ +use std::{ + cmp::{self, Reverse}, + sync::Arc, + time::Duration, +}; + +use client::{parse_zed_link, telemetry::Telemetry}; +use collections::HashMap; +use command_palette_hooks::{ + CommandInterceptResult, CommandPaletteFilter, CommandPaletteInterceptor, +}; +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{ + actions, Action, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Global, + ParentElement, Render, Styled, Task, UpdateGlobal, View, ViewContext, VisualContext, WeakView, +}; +use picker::{Picker, PickerDelegate}; + +use postage::{sink::Sink, stream::Stream}; +use ui::{h_flex, prelude::*, v_flex, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing}; +use util::ResultExt; +use workspace::{ModalView, Workspace}; +use zed_actions::OpenZedUrl; + +actions!(command_palette, [Toggle]); + +pub fn init(cx: &mut AppContext) { + client::init_settings(cx); + cx.set_global(HitCounts::default()); + command_palette_hooks::init(cx); + cx.observe_new_views(CommandPalette::register).detach(); +} + +impl ModalView for CommandPalette {} + +pub struct CommandPalette { + picker: View>, +} + +fn trim_consecutive_whitespaces(input: &str) -> String { + let mut result = String::with_capacity(input.len()); + let mut last_char_was_whitespace = false; + + for char in input.trim().chars() { + if char.is_whitespace() { + if !last_char_was_whitespace { + result.push(char); + } + last_char_was_whitespace = true; + } else { + result.push(char); + last_char_was_whitespace = false; + } + } + result +} + +impl CommandPalette { + fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(|workspace, _: &Toggle, cx| { + let Some(previous_focus_handle) = cx.focused() else { + return; + }; + let telemetry = workspace.client().telemetry().clone(); + workspace.toggle_modal(cx, move |cx| { + CommandPalette::new(previous_focus_handle, telemetry, cx) + }); + }); + } + + fn new( + previous_focus_handle: FocusHandle, + telemetry: Arc, + cx: &mut ViewContext, + ) -> Self { + let filter = CommandPaletteFilter::try_global(cx); + + let commands = cx + .available_actions() + .into_iter() + .filter_map(|action| { + if filter.is_some_and(|filter| filter.is_hidden(&*action)) { + return None; + } + + Some(Command { + name: humanize_action_name(action.name()), + action, + }) + }) + .collect(); + + let delegate = CommandPaletteDelegate::new( + cx.view().downgrade(), + commands, + telemetry, + previous_focus_handle, + ); + + let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); + Self { picker } + } +} + +impl EventEmitter for CommandPalette {} + +impl FocusableView for CommandPalette { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for CommandPalette { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +pub struct CommandPaletteDelegate { + command_palette: WeakView, + all_commands: Vec, + commands: Vec, + matches: Vec, + selected_ix: usize, + telemetry: Arc, + previous_focus_handle: FocusHandle, + updating_matches: Option<( + Task<()>, + postage::dispatch::Receiver<(Vec, Vec)>, + )>, +} + +struct Command { + name: String, + action: Box, +} + +impl Clone for Command { + fn clone(&self) -> Self { + Self { + name: self.name.clone(), + action: self.action.boxed_clone(), + } + } +} + +/// Hit count for each command in the palette. +/// We only account for commands triggered directly via command palette and not by e.g. keystrokes because +/// if a user already knows a keystroke for a command, they are unlikely to use a command palette to look for it. +#[derive(Default, Clone)] +struct HitCounts(HashMap); + +impl Global for HitCounts {} + +impl CommandPaletteDelegate { + fn new( + command_palette: WeakView, + commands: Vec, + telemetry: Arc, + previous_focus_handle: FocusHandle, + ) -> Self { + Self { + command_palette, + all_commands: commands.clone(), + matches: vec![], + commands, + selected_ix: 0, + telemetry, + previous_focus_handle, + updating_matches: None, + } + } + + fn matches_updated( + &mut self, + query: String, + mut commands: Vec, + mut matches: Vec, + cx: &mut ViewContext>, + ) { + self.updating_matches.take(); + + let mut intercept_result = CommandPaletteInterceptor::try_global(cx) + .and_then(|interceptor| interceptor.intercept(&query, cx)); + + if parse_zed_link(&query, cx).is_some() { + intercept_result = Some(CommandInterceptResult { + action: OpenZedUrl { url: query.clone() }.boxed_clone(), + string: query.clone(), + positions: vec![], + }) + } + + if let Some(CommandInterceptResult { + action, + string, + positions, + }) = intercept_result + { + if let Some(idx) = matches + .iter() + .position(|m| commands[m.candidate_id].action.type_id() == action.type_id()) + { + matches.remove(idx); + } + commands.push(Command { + name: string.clone(), + action, + }); + matches.insert( + 0, + StringMatch { + candidate_id: commands.len() - 1, + string, + positions, + score: 0.0, + }, + ) + } + self.commands = commands; + self.matches = matches; + if self.matches.is_empty() { + self.selected_ix = 0; + } else { + self.selected_ix = cmp::min(self.selected_ix, self.matches.len() - 1); + } + } +} + +impl PickerDelegate for CommandPaletteDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Execute a command...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn selected_index(&self) -> usize { + self.selected_ix + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_ix = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let (mut tx, mut rx) = postage::dispatch::channel(1); + let task = cx.background_executor().spawn({ + let mut commands = self.all_commands.clone(); + let hit_counts = cx.global::().clone(); + let executor = cx.background_executor().clone(); + let query = trim_consecutive_whitespaces(&query.as_str()); + async move { + commands.sort_by_key(|action| { + ( + Reverse(hit_counts.0.get(&action.name).cloned()), + action.name.clone(), + ) + }); + + let candidates = commands + .iter() + .enumerate() + .map(|(ix, command)| StringMatchCandidate { + id: ix, + string: command.name.to_string(), + char_bag: command.name.chars().collect(), + }) + .collect::>(); + let matches = if query.is_empty() { + candidates + .into_iter() + .enumerate() + .map(|(index, candidate)| StringMatch { + candidate_id: index, + string: candidate.string, + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + let ret = fuzzy::match_strings( + &candidates, + &query, + true, + 10000, + &Default::default(), + executor, + ) + .await; + ret + }; + + tx.send((commands, matches)).await.log_err(); + } + }); + self.updating_matches = Some((task, rx.clone())); + + cx.spawn(move |picker, mut cx| async move { + let Some((commands, matches)) = rx.recv().await else { + return; + }; + + picker + .update(&mut cx, |picker, cx| { + picker + .delegate + .matches_updated(query, commands, matches, cx) + }) + .log_err(); + }) + } + + fn finalize_update_matches( + &mut self, + query: String, + duration: Duration, + cx: &mut ViewContext>, + ) -> bool { + let Some((task, rx)) = self.updating_matches.take() else { + return true; + }; + + match cx + .background_executor() + .block_with_timeout(duration, rx.clone().recv()) + { + Ok(Some((commands, matches))) => { + self.matches_updated(query, commands, matches, cx); + true + } + _ => { + self.updating_matches = Some((task, rx)); + false + } + } + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.command_palette + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if self.matches.is_empty() { + self.dismissed(cx); + return; + } + let action_ix = self.matches[self.selected_ix].candidate_id; + let command = self.commands.swap_remove(action_ix); + + self.telemetry + .report_action_event("command palette", command.name.clone()); + + self.matches.clear(); + self.commands.clear(); + HitCounts::update_global(cx, |hit_counts, _cx| { + *hit_counts.0.entry(command.name).or_default() += 1; + }); + let action = command.action; + cx.focus(&self.previous_focus_handle); + self.dismissed(cx); + cx.dispatch_action(action); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + cx: &mut ViewContext>, + ) -> Option { + let r#match = self.matches.get(ix)?; + let command = self.commands.get(r#match.candidate_id)?; + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child( + h_flex() + .w_full() + .py_px() + .justify_between() + .child(HighlightedLabel::new( + command.name.clone(), + r#match.positions.clone(), + )) + .children(KeyBinding::for_action_in( + &*command.action, + &self.previous_focus_handle, + cx, + )), + ), + ) + } +} + +fn humanize_action_name(name: &str) -> String { + let capacity = name.len() + name.chars().filter(|c| c.is_uppercase()).count(); + let mut result = String::with_capacity(capacity); + for char in name.chars() { + if char == ':' { + if result.ends_with(':') { + result.push(' '); + } else { + result.push(':'); + } + } else if char == '_' { + result.push(' '); + } else if char.is_uppercase() { + if !result.ends_with(' ') { + result.push(' '); + } + result.extend(char.to_lowercase()); + } else { + result.push(char); + } + } + result +} + +impl std::fmt::Debug for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Command") + .field("name", &self.name) + .finish_non_exhaustive() + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::*; + use editor::Editor; + use go_to_line::GoToLine; + use gpui::TestAppContext; + use language::Point; + use project::Project; + use settings::KeymapFile; + use workspace::{AppState, Workspace}; + + #[test] + fn test_humanize_action_name() { + assert_eq!( + humanize_action_name("editor::GoToDefinition"), + "editor: go to definition" + ); + assert_eq!( + humanize_action_name("editor::Backspace"), + "editor: backspace" + ); + assert_eq!( + humanize_action_name("go_to_line::Deploy"), + "go to line: deploy" + ); + } + + #[gpui::test] + async fn test_command_palette(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); + + let editor = cx.new_view(|cx| { + let mut editor = Editor::single_line(cx); + editor.set_text("abc", cx); + editor + }); + + workspace.update(cx, |workspace, cx| { + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx); + editor.update(cx, |editor, cx| editor.focus(cx)) + }); + + cx.simulate_keystrokes("cmd-shift-p"); + + let palette = workspace.update(cx, |workspace, cx| { + workspace + .active_modal::(cx) + .unwrap() + .read(cx) + .picker + .clone() + }); + + palette.update(cx, |palette, _| { + assert!(palette.delegate.commands.len() > 5); + let is_sorted = + |actions: &[Command]| actions.windows(2).all(|pair| pair[0].name <= pair[1].name); + assert!(is_sorted(&palette.delegate.commands)); + }); + + cx.simulate_input("bcksp"); + + palette.update(cx, |palette, _| { + assert_eq!(palette.delegate.matches[0].string, "editor: backspace"); + }); + + cx.simulate_keystrokes("enter"); + + workspace.update(cx, |workspace, cx| { + assert!(workspace.active_modal::(cx).is_none()); + assert_eq!(editor.read(cx).text(cx), "ab") + }); + + // Add namespace filter, and redeploy the palette + cx.update(|cx| { + CommandPaletteFilter::update_global(cx, |filter, _| { + filter.hide_namespace("editor"); + }); + }); + + cx.simulate_keystrokes("cmd-shift-p"); + cx.simulate_input("bcksp"); + + let palette = workspace.update(cx, |workspace, cx| { + workspace + .active_modal::(cx) + .unwrap() + .read(cx) + .picker + .clone() + }); + palette.update(cx, |palette, _| { + assert!(palette.delegate.matches.is_empty()) + }); + } + + #[gpui::test] + async fn test_go_to_line(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); + + cx.simulate_keystrokes("cmd-n"); + + let editor = workspace.update(cx, |workspace, cx| { + workspace.active_item_as::(cx).unwrap() + }); + editor.update(cx, |editor, cx| editor.set_text("1\n2\n3\n4\n5\n6\n", cx)); + + cx.simulate_keystrokes("cmd-shift-p"); + cx.simulate_input("go to line: Toggle"); + cx.simulate_keystrokes("enter"); + + workspace.update(cx, |workspace, cx| { + assert!(workspace.active_modal::(cx).is_some()) + }); + + cx.simulate_keystrokes("3 enter"); + + editor.update(cx, |editor, cx| { + assert!(editor.focus_handle(cx).is_focused(cx)); + assert_eq!( + editor.selections.last::(cx).range().start, + Point::new(2, 0) + ); + }); + } + + fn init_test(cx: &mut TestAppContext) -> Arc { + cx.update(|cx| { + let app_state = AppState::test(cx); + theme::init(theme::LoadThemes::JustBase, cx); + language::init(cx); + editor::init(cx); + menu::init(); + go_to_line::init(cx); + workspace::init(app_state.clone(), cx); + init(cx); + Project::init_settings(cx); + KeymapFile::parse( + r#"[ + { + "bindings": { + "cmd-n": "workspace::NewFile", + "enter": "menu::Confirm", + "cmd-shift-p": "command_palette::Toggle" + } + } + ]"#, + ) + .unwrap() + .add_to_cx(cx) + .unwrap(); + app_state + }) + } +} diff --git a/crates/command_palette_hooks/Cargo.toml b/crates/command_palette_hooks/Cargo.toml new file mode 100644 index 0000000..941233a --- /dev/null +++ b/crates/command_palette_hooks/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "command_palette_hooks" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/command_palette_hooks.rs" +doctest = false + +[dependencies] +collections.workspace = true +derive_more.workspace = true +gpui.workspace = true diff --git a/crates/command_palette_hooks/LICENSE-GPL b/crates/command_palette_hooks/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/command_palette_hooks/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/command_palette_hooks/src/command_palette_hooks.rs b/crates/command_palette_hooks/src/command_palette_hooks.rs new file mode 100644 index 0000000..5c9b797 --- /dev/null +++ b/crates/command_palette_hooks/src/command_palette_hooks.rs @@ -0,0 +1,142 @@ +//! Provides hooks for customizing the behavior of the command palette. + +#![deny(missing_docs)] + +use std::any::TypeId; + +use collections::HashSet; +use derive_more::{Deref, DerefMut}; +use gpui::{Action, AppContext, BorrowAppContext, Global}; + +/// Initializes the command palette hooks. +pub fn init(cx: &mut AppContext) { + cx.set_global(GlobalCommandPaletteFilter::default()); + cx.set_global(GlobalCommandPaletteInterceptor::default()); +} + +/// A filter for the command palette. +#[derive(Default)] +pub struct CommandPaletteFilter { + hidden_namespaces: HashSet<&'static str>, + hidden_action_types: HashSet, +} + +#[derive(Deref, DerefMut, Default)] +struct GlobalCommandPaletteFilter(CommandPaletteFilter); + +impl Global for GlobalCommandPaletteFilter {} + +impl CommandPaletteFilter { + /// Returns the global [`CommandPaletteFilter`], if one is set. + pub fn try_global(cx: &AppContext) -> Option<&CommandPaletteFilter> { + cx.try_global::() + .map(|filter| &filter.0) + } + + /// Returns a mutable reference to the global [`CommandPaletteFilter`]. + pub fn global_mut(cx: &mut AppContext) -> &mut Self { + cx.global_mut::() + } + + /// Updates the global [`CommandPaletteFilter`] using the given closure. + pub fn update_global(cx: &mut AppContext, update: F) -> R + where + F: FnOnce(&mut Self, &mut AppContext) -> R, + { + cx.update_global(|this: &mut GlobalCommandPaletteFilter, cx| update(&mut this.0, cx)) + } + + /// Returns whether the given [`Action`] is hidden by the filter. + pub fn is_hidden(&self, action: &dyn Action) -> bool { + let name = action.name(); + let namespace = name.split("::").next().unwrap_or("malformed action name"); + + self.hidden_namespaces.contains(namespace) + || self.hidden_action_types.contains(&action.type_id()) + } + + /// Hides all actions in the given namespace. + pub fn hide_namespace(&mut self, namespace: &'static str) { + self.hidden_namespaces.insert(namespace); + } + + /// Shows all actions in the given namespace. + pub fn show_namespace(&mut self, namespace: &'static str) { + self.hidden_namespaces.remove(namespace); + } + + /// Hides all actions with the given types. + pub fn hide_action_types(&mut self, action_types: &[TypeId]) { + self.hidden_action_types.extend(action_types); + } + + /// Shows all actions with the given types. + pub fn show_action_types<'a>(&mut self, action_types: impl Iterator) { + for action_type in action_types { + self.hidden_action_types.remove(action_type); + } + } +} + +/// The result of intercepting a command palette command. +pub struct CommandInterceptResult { + /// The action produced as a result of the interception. + pub action: Box, + // TODO: Document this field. + #[allow(missing_docs)] + pub string: String, + // TODO: Document this field. + #[allow(missing_docs)] + pub positions: Vec, +} + +/// An interceptor for the command palette. +#[derive(Default)] +pub struct CommandPaletteInterceptor( + Option Option>>, +); + +#[derive(Default)] +struct GlobalCommandPaletteInterceptor(CommandPaletteInterceptor); + +impl Global for GlobalCommandPaletteInterceptor {} + +impl CommandPaletteInterceptor { + /// Returns the global [`CommandPaletteInterceptor`], if one is set. + pub fn try_global(cx: &AppContext) -> Option<&CommandPaletteInterceptor> { + cx.try_global::() + .map(|interceptor| &interceptor.0) + } + + /// Updates the global [`CommandPaletteInterceptor`] using the given closure. + pub fn update_global(cx: &mut AppContext, update: F) -> R + where + F: FnOnce(&mut Self, &mut AppContext) -> R, + { + cx.update_global(|this: &mut GlobalCommandPaletteInterceptor, cx| update(&mut this.0, cx)) + } + + /// Intercepts the given query from the command palette. + pub fn intercept(&self, query: &str, cx: &AppContext) -> Option { + let Some(handler) = self.0.as_ref() else { + return None; + }; + + (handler)(query, cx) + } + + /// Clears the global interceptor. + pub fn clear(&mut self) { + self.0 = None; + } + + /// Sets the global interceptor. + /// + /// This will override the previous interceptor, if it exists. + pub fn set( + &mut self, + handler: Box Option>, + ) { + self.0 = Some(handler); + } +} diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml new file mode 100644 index 0000000..f7b9988 --- /dev/null +++ b/crates/copilot/Cargo.toml @@ -0,0 +1,67 @@ +[package] +name = "copilot" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/copilot.rs" +doctest = false + +[features] +test-support = [ + "collections/test-support", + "gpui/test-support", + "language/test-support", + "lsp/test-support", + "settings/test-support", + "util/test-support", +] + +[dependencies] +anyhow.workspace = true +async-compression.workspace = true +async-tar.workspace = true +collections.workspace = true +client.workspace = true +command_palette_hooks.workspace = true +editor.workspace = true +futures.workspace = true +gpui.workspace = true +http.workspace = true +language.workspace = true +lsp.workspace = true +menu.workspace = true +node_runtime.workspace = true +parking_lot.workspace = true +project.workspace = true +serde.workspace = true +settings.workspace = true +smol.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[target.'cfg(windows)'.dependencies] +async-std = { version = "1.12.0", features = ["unstable"] } + +[dev-dependencies] +clock.workspace = true +indoc.workspace = true +serde_json.workspace = true +collections = { workspace = true, features = ["test-support"] } +editor = { workspace = true, features = ["test-support"] } +fs = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +lsp = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } +rpc = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +theme = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } diff --git a/crates/copilot/LICENSE-GPL b/crates/copilot/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/copilot/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs new file mode 100644 index 0000000..9a836eb --- /dev/null +++ b/crates/copilot/src/copilot.rs @@ -0,0 +1,1273 @@ +mod copilot_completion_provider; +pub mod request; +mod sign_in; + +use anyhow::{anyhow, Context as _, Result}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use collections::{HashMap, HashSet}; +use command_palette_hooks::CommandPaletteFilter; +use futures::{channel::oneshot, future::Shared, Future, FutureExt, TryFutureExt}; +use gpui::{ + actions, AppContext, AsyncAppContext, Context, Entity, EntityId, EventEmitter, Global, Model, + ModelContext, Task, WeakModel, +}; +use http::github::latest_github_release; +use http::HttpClient; +use language::{ + language_settings::{all_language_settings, language_settings, InlineCompletionProvider}, + point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16, + ToPointUtf16, +}; +use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId}; +use node_runtime::NodeRuntime; +use parking_lot::Mutex; +use request::StatusNotification; +use settings::SettingsStore; +use smol::{fs, io::BufReader, stream::StreamExt}; +use std::{ + any::TypeId, + ffi::OsString, + mem, + ops::Range, + path::{Path, PathBuf}, + sync::Arc, +}; +use util::{fs::remove_matching, maybe, paths, ResultExt}; + +pub use copilot_completion_provider::CopilotCompletionProvider; +pub use sign_in::CopilotCodeVerification; + +actions!( + copilot, + [ + Suggest, + NextSuggestion, + PreviousSuggestion, + Reinstall, + SignIn, + SignOut + ] +); + +pub fn init( + new_server_id: LanguageServerId, + http: Arc, + node_runtime: Arc, + cx: &mut AppContext, +) { + let copilot = cx.new_model({ + let node_runtime = node_runtime.clone(); + move |cx| Copilot::start(new_server_id, http, node_runtime, cx) + }); + Copilot::set_global(copilot.clone(), cx); + cx.observe(&copilot, |handle, cx| { + let copilot_action_types = [ + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + ]; + let copilot_auth_action_types = [TypeId::of::()]; + let copilot_no_auth_action_types = [TypeId::of::()]; + let status = handle.read(cx).status(); + let filter = CommandPaletteFilter::global_mut(cx); + + match status { + Status::Disabled => { + filter.hide_action_types(&copilot_action_types); + filter.hide_action_types(&copilot_auth_action_types); + filter.hide_action_types(&copilot_no_auth_action_types); + } + Status::Authorized => { + filter.hide_action_types(&copilot_no_auth_action_types); + filter.show_action_types( + copilot_action_types + .iter() + .chain(&copilot_auth_action_types), + ); + } + _ => { + filter.hide_action_types(&copilot_action_types); + filter.hide_action_types(&copilot_auth_action_types); + filter.show_action_types(copilot_no_auth_action_types.iter()); + } + } + }) + .detach(); + + cx.on_action(|_: &SignIn, cx| { + if let Some(copilot) = Copilot::global(cx) { + copilot + .update(cx, |copilot, cx| copilot.sign_in(cx)) + .detach_and_log_err(cx); + } + }); + cx.on_action(|_: &SignOut, cx| { + if let Some(copilot) = Copilot::global(cx) { + copilot + .update(cx, |copilot, cx| copilot.sign_out(cx)) + .detach_and_log_err(cx); + } + }); + cx.on_action(|_: &Reinstall, cx| { + if let Some(copilot) = Copilot::global(cx) { + copilot + .update(cx, |copilot, cx| copilot.reinstall(cx)) + .detach(); + } + }); +} + +enum CopilotServer { + Disabled, + Starting { task: Shared> }, + Error(Arc), + Running(RunningCopilotServer), +} + +impl CopilotServer { + fn as_authenticated(&mut self) -> Result<&mut RunningCopilotServer> { + let server = self.as_running()?; + if matches!(server.sign_in_status, SignInStatus::Authorized { .. }) { + Ok(server) + } else { + Err(anyhow!("must sign in before using copilot")) + } + } + + fn as_running(&mut self) -> Result<&mut RunningCopilotServer> { + match self { + CopilotServer::Starting { .. } => Err(anyhow!("copilot is still starting")), + CopilotServer::Disabled => Err(anyhow!("copilot is disabled")), + CopilotServer::Error(error) => Err(anyhow!( + "copilot was not started because of an error: {}", + error + )), + CopilotServer::Running(server) => Ok(server), + } + } +} + +struct RunningCopilotServer { + lsp: Arc, + sign_in_status: SignInStatus, + registered_buffers: HashMap, +} + +#[derive(Clone, Debug)] +enum SignInStatus { + Authorized, + Unauthorized, + SigningIn { + prompt: Option, + task: Shared>>>, + }, + SignedOut, +} + +#[derive(Debug, Clone)] +pub enum Status { + Starting { + task: Shared>, + }, + Error(Arc), + Disabled, + SignedOut, + SigningIn { + prompt: Option, + }, + Unauthorized, + Authorized, +} + +impl Status { + pub fn is_authorized(&self) -> bool { + matches!(self, Status::Authorized) + } +} + +struct RegisteredBuffer { + uri: lsp::Url, + language_id: String, + snapshot: BufferSnapshot, + snapshot_version: i32, + _subscriptions: [gpui::Subscription; 2], + pending_buffer_change: Task>, +} + +impl RegisteredBuffer { + fn report_changes( + &mut self, + buffer: &Model, + cx: &mut ModelContext, + ) -> oneshot::Receiver<(i32, BufferSnapshot)> { + let (done_tx, done_rx) = oneshot::channel(); + + if buffer.read(cx).version() == self.snapshot.version { + let _ = done_tx.send((self.snapshot_version, self.snapshot.clone())); + } else { + let buffer = buffer.downgrade(); + let id = buffer.entity_id(); + let prev_pending_change = + mem::replace(&mut self.pending_buffer_change, Task::ready(None)); + self.pending_buffer_change = cx.spawn(move |copilot, mut cx| async move { + prev_pending_change.await; + + let old_version = copilot + .update(&mut cx, |copilot, _| { + let server = copilot.server.as_authenticated().log_err()?; + let buffer = server.registered_buffers.get_mut(&id)?; + Some(buffer.snapshot.version.clone()) + }) + .ok()??; + let new_snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot()).ok()?; + + let content_changes = cx + .background_executor() + .spawn({ + let new_snapshot = new_snapshot.clone(); + async move { + new_snapshot + .edits_since::<(PointUtf16, usize)>(&old_version) + .map(|edit| { + let edit_start = edit.new.start.0; + let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0); + let new_text = new_snapshot + .text_for_range(edit.new.start.1..edit.new.end.1) + .collect(); + lsp::TextDocumentContentChangeEvent { + range: Some(lsp::Range::new( + point_to_lsp(edit_start), + point_to_lsp(edit_end), + )), + range_length: None, + text: new_text, + } + }) + .collect::>() + } + }) + .await; + + copilot + .update(&mut cx, |copilot, _| { + let server = copilot.server.as_authenticated().log_err()?; + let buffer = server.registered_buffers.get_mut(&id)?; + if !content_changes.is_empty() { + buffer.snapshot_version += 1; + buffer.snapshot = new_snapshot; + server + .lsp + .notify::( + lsp::DidChangeTextDocumentParams { + text_document: lsp::VersionedTextDocumentIdentifier::new( + buffer.uri.clone(), + buffer.snapshot_version, + ), + content_changes, + }, + ) + .log_err(); + } + let _ = done_tx.send((buffer.snapshot_version, buffer.snapshot.clone())); + Some(()) + }) + .ok()?; + + Some(()) + }); + } + + done_rx + } +} + +#[derive(Debug)] +pub struct Completion { + pub uuid: String, + pub range: Range, + pub text: String, +} + +pub struct Copilot { + http: Arc, + node_runtime: Arc, + server: CopilotServer, + buffers: HashSet>, + server_id: LanguageServerId, + _subscription: gpui::Subscription, +} + +pub enum Event { + CopilotLanguageServerStarted, +} + +impl EventEmitter for Copilot {} + +struct GlobalCopilot(Model); + +impl Global for GlobalCopilot {} + +impl Copilot { + pub fn global(cx: &AppContext) -> Option> { + cx.try_global::() + .map(|model| model.0.clone()) + } + + pub fn set_global(copilot: Model, cx: &mut AppContext) { + cx.set_global(GlobalCopilot(copilot)); + } + + fn start( + new_server_id: LanguageServerId, + http: Arc, + node_runtime: Arc, + cx: &mut ModelContext, + ) -> Self { + let mut this = Self { + server_id: new_server_id, + http, + node_runtime, + server: CopilotServer::Disabled, + buffers: Default::default(), + _subscription: cx.on_app_quit(Self::shutdown_language_server), + }; + this.enable_or_disable_copilot(cx); + cx.observe_global::(move |this, cx| this.enable_or_disable_copilot(cx)) + .detach(); + this + } + + fn shutdown_language_server( + &mut self, + _cx: &mut ModelContext, + ) -> impl Future { + let shutdown = match mem::replace(&mut self.server, CopilotServer::Disabled) { + CopilotServer::Running(server) => Some(Box::pin(async move { server.lsp.shutdown() })), + _ => None, + }; + + async move { + if let Some(shutdown) = shutdown { + shutdown.await; + } + } + } + + fn enable_or_disable_copilot(&mut self, cx: &mut ModelContext) { + let server_id = self.server_id; + let http = self.http.clone(); + let node_runtime = self.node_runtime.clone(); + if all_language_settings(None, cx).inline_completions.provider + == InlineCompletionProvider::Copilot + { + if matches!(self.server, CopilotServer::Disabled) { + let start_task = cx + .spawn(move |this, cx| { + Self::start_language_server(server_id, http, node_runtime, this, cx) + }) + .shared(); + self.server = CopilotServer::Starting { task: start_task }; + cx.notify(); + } + } else { + self.server = CopilotServer::Disabled; + cx.notify(); + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn fake(cx: &mut gpui::TestAppContext) -> (Model, lsp::FakeLanguageServer) { + use lsp::FakeLanguageServer; + use node_runtime::FakeNodeRuntime; + + let (server, fake_server) = FakeLanguageServer::new( + LanguageServerId(0), + LanguageServerBinary { + path: "path/to/copilot".into(), + arguments: vec![], + env: None, + }, + "copilot".into(), + Default::default(), + cx.to_async(), + ); + let http = http::FakeHttpClient::create(|_| async { unreachable!() }); + let node_runtime = FakeNodeRuntime::new(); + let this = cx.new_model(|cx| Self { + server_id: LanguageServerId(0), + http: http.clone(), + node_runtime, + server: CopilotServer::Running(RunningCopilotServer { + lsp: Arc::new(server), + sign_in_status: SignInStatus::Authorized, + registered_buffers: Default::default(), + }), + _subscription: cx.on_app_quit(Self::shutdown_language_server), + buffers: Default::default(), + }); + (this, fake_server) + } + + fn start_language_server( + new_server_id: LanguageServerId, + http: Arc, + node_runtime: Arc, + this: WeakModel, + mut cx: AsyncAppContext, + ) -> impl Future { + async move { + let start_language_server = async { + let server_path = get_copilot_lsp(http).await?; + let node_path = node_runtime.binary_path().await?; + let arguments: Vec = vec![server_path.into(), "--stdio".into()]; + let binary = LanguageServerBinary { + path: node_path, + arguments, + // TODO: We could set HTTP_PROXY etc here and fix the copilot issue. + env: None, + }; + + let root_path = if cfg!(target_os = "windows") { + Path::new("C:/") + } else { + Path::new("/") + }; + + let server = LanguageServer::new( + Arc::new(Mutex::new(None)), + new_server_id, + binary, + root_path, + None, + cx.clone(), + )?; + + server + .on_notification::( + |_, _| { /* Silence the notification */ }, + ) + .detach(); + let server = cx.update(|cx| server.initialize(None, cx))?.await?; + + let status = server + .request::(request::CheckStatusParams { + local_checks_only: false, + }) + .await?; + + server + .request::(request::SetEditorInfoParams { + editor_info: request::EditorInfo { + name: "zed".into(), + version: env!("CARGO_PKG_VERSION").into(), + }, + editor_plugin_info: request::EditorPluginInfo { + name: "zed-copilot".into(), + version: "0.0.1".into(), + }, + }) + .await?; + + anyhow::Ok((server, status)) + }; + + let server = start_language_server.await; + this.update(&mut cx, |this, cx| { + cx.notify(); + match server { + Ok((server, status)) => { + this.server = CopilotServer::Running(RunningCopilotServer { + lsp: server, + sign_in_status: SignInStatus::SignedOut, + registered_buffers: Default::default(), + }); + cx.emit(Event::CopilotLanguageServerStarted); + this.update_sign_in_status(status, cx); + } + Err(error) => { + this.server = CopilotServer::Error(error.to_string().into()); + cx.notify() + } + } + }) + .ok(); + } + } + + pub fn sign_in(&mut self, cx: &mut ModelContext) -> Task> { + if let CopilotServer::Running(server) = &mut self.server { + let task = match &server.sign_in_status { + SignInStatus::Authorized { .. } => Task::ready(Ok(())).shared(), + SignInStatus::SigningIn { task, .. } => { + cx.notify(); + task.clone() + } + SignInStatus::SignedOut | SignInStatus::Unauthorized { .. } => { + let lsp = server.lsp.clone(); + let task = cx + .spawn(|this, mut cx| async move { + let sign_in = async { + let sign_in = lsp + .request::( + request::SignInInitiateParams {}, + ) + .await?; + match sign_in { + request::SignInInitiateResult::AlreadySignedIn { user } => { + Ok(request::SignInStatus::Ok { user: Some(user) }) + } + request::SignInInitiateResult::PromptUserDeviceFlow(flow) => { + this.update(&mut cx, |this, cx| { + if let CopilotServer::Running(RunningCopilotServer { + sign_in_status: status, + .. + }) = &mut this.server + { + if let SignInStatus::SigningIn { + prompt: prompt_flow, + .. + } = status + { + *prompt_flow = Some(flow.clone()); + cx.notify(); + } + } + })?; + let response = lsp + .request::( + request::SignInConfirmParams { + user_code: flow.user_code, + }, + ) + .await?; + Ok(response) + } + } + }; + + let sign_in = sign_in.await; + this.update(&mut cx, |this, cx| match sign_in { + Ok(status) => { + this.update_sign_in_status(status, cx); + Ok(()) + } + Err(error) => { + this.update_sign_in_status( + request::SignInStatus::NotSignedIn, + cx, + ); + Err(Arc::new(error)) + } + })? + }) + .shared(); + server.sign_in_status = SignInStatus::SigningIn { + prompt: None, + task: task.clone(), + }; + cx.notify(); + task + } + }; + + cx.background_executor() + .spawn(task.map_err(|err| anyhow!("{:?}", err))) + } else { + // If we're downloading, wait until download is finished + // If we're in a stuck state, display to the user + Task::ready(Err(anyhow!("copilot hasn't started yet"))) + } + } + + fn sign_out(&mut self, cx: &mut ModelContext) -> Task> { + self.update_sign_in_status(request::SignInStatus::NotSignedIn, cx); + if let CopilotServer::Running(RunningCopilotServer { lsp: server, .. }) = &self.server { + let server = server.clone(); + cx.background_executor().spawn(async move { + server + .request::(request::SignOutParams {}) + .await?; + anyhow::Ok(()) + }) + } else { + Task::ready(Err(anyhow!("copilot hasn't started yet"))) + } + } + + pub fn reinstall(&mut self, cx: &mut ModelContext) -> Task<()> { + let start_task = cx + .spawn({ + let http = self.http.clone(); + let node_runtime = self.node_runtime.clone(); + let server_id = self.server_id; + move |this, cx| async move { + clear_copilot_dir().await; + Self::start_language_server(server_id, http, node_runtime, this, cx).await + } + }) + .shared(); + + self.server = CopilotServer::Starting { + task: start_task.clone(), + }; + + cx.notify(); + + cx.background_executor().spawn(start_task) + } + + pub fn language_server(&self) -> Option<&Arc> { + if let CopilotServer::Running(server) = &self.server { + Some(&server.lsp) + } else { + None + } + } + + pub fn register_buffer(&mut self, buffer: &Model, cx: &mut ModelContext) { + let weak_buffer = buffer.downgrade(); + self.buffers.insert(weak_buffer.clone()); + + if let CopilotServer::Running(RunningCopilotServer { + lsp: server, + sign_in_status: status, + registered_buffers, + .. + }) = &mut self.server + { + if !matches!(status, SignInStatus::Authorized { .. }) { + return; + } + + registered_buffers + .entry(buffer.entity_id()) + .or_insert_with(|| { + let uri: lsp::Url = uri_for_buffer(buffer, cx); + let language_id = id_for_language(buffer.read(cx).language()); + let snapshot = buffer.read(cx).snapshot(); + server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem { + uri: uri.clone(), + language_id: language_id.clone(), + version: 0, + text: snapshot.text(), + }, + }, + ) + .log_err(); + + RegisteredBuffer { + uri, + language_id, + snapshot, + snapshot_version: 0, + pending_buffer_change: Task::ready(Some(())), + _subscriptions: [ + cx.subscribe(buffer, |this, buffer, event, cx| { + this.handle_buffer_event(buffer, event, cx).log_err(); + }), + cx.observe_release(buffer, move |this, _buffer, _cx| { + this.buffers.remove(&weak_buffer); + this.unregister_buffer(&weak_buffer); + }), + ], + } + }); + } + } + + fn handle_buffer_event( + &mut self, + buffer: Model, + event: &language::Event, + cx: &mut ModelContext, + ) -> Result<()> { + if let Ok(server) = self.server.as_running() { + if let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) + { + match event { + language::Event::Edited => { + let _ = registered_buffer.report_changes(&buffer, cx); + } + language::Event::Saved => { + server + .lsp + .notify::( + lsp::DidSaveTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new( + registered_buffer.uri.clone(), + ), + text: None, + }, + )?; + } + language::Event::FileHandleChanged | language::Event::LanguageChanged => { + let new_language_id = id_for_language(buffer.read(cx).language()); + let new_uri = uri_for_buffer(&buffer, cx); + if new_uri != registered_buffer.uri + || new_language_id != registered_buffer.language_id + { + let old_uri = mem::replace(&mut registered_buffer.uri, new_uri); + registered_buffer.language_id = new_language_id; + server + .lsp + .notify::( + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(old_uri), + }, + )?; + server + .lsp + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + registered_buffer.uri.clone(), + registered_buffer.language_id.clone(), + registered_buffer.snapshot_version, + registered_buffer.snapshot.text(), + ), + }, + )?; + } + } + _ => {} + } + } + } + + Ok(()) + } + + fn unregister_buffer(&mut self, buffer: &WeakModel) { + if let Ok(server) = self.server.as_running() { + if let Some(buffer) = server.registered_buffers.remove(&buffer.entity_id()) { + server + .lsp + .notify::( + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(buffer.uri), + }, + ) + .log_err(); + } + } + } + + pub fn completions( + &mut self, + buffer: &Model, + position: T, + cx: &mut ModelContext, + ) -> Task>> + where + T: ToPointUtf16, + { + self.request_completions::(buffer, position, cx) + } + + pub fn completions_cycling( + &mut self, + buffer: &Model, + position: T, + cx: &mut ModelContext, + ) -> Task>> + where + T: ToPointUtf16, + { + self.request_completions::(buffer, position, cx) + } + + pub fn accept_completion( + &mut self, + completion: &Completion, + cx: &mut ModelContext, + ) -> Task> { + let server = match self.server.as_authenticated() { + Ok(server) => server, + Err(error) => return Task::ready(Err(error)), + }; + let request = + server + .lsp + .request::(request::NotifyAcceptedParams { + uuid: completion.uuid.clone(), + }); + cx.background_executor().spawn(async move { + request.await?; + Ok(()) + }) + } + + pub fn discard_completions( + &mut self, + completions: &[Completion], + cx: &mut ModelContext, + ) -> Task> { + let server = match self.server.as_authenticated() { + Ok(server) => server, + Err(_) => return Task::ready(Ok(())), + }; + let request = + server + .lsp + .request::(request::NotifyRejectedParams { + uuids: completions + .iter() + .map(|completion| completion.uuid.clone()) + .collect(), + }); + cx.background_executor().spawn(async move { + request.await?; + Ok(()) + }) + } + + fn request_completions( + &mut self, + buffer: &Model, + position: T, + cx: &mut ModelContext, + ) -> Task>> + where + R: 'static + + lsp::request::Request< + Params = request::GetCompletionsParams, + Result = request::GetCompletionsResult, + >, + T: ToPointUtf16, + { + self.register_buffer(buffer, cx); + + let server = match self.server.as_authenticated() { + Ok(server) => server, + Err(error) => return Task::ready(Err(error)), + }; + let lsp = server.lsp.clone(); + let registered_buffer = server + .registered_buffers + .get_mut(&buffer.entity_id()) + .unwrap(); + let snapshot = registered_buffer.report_changes(buffer, cx); + let buffer = buffer.read(cx); + let uri = registered_buffer.uri.clone(); + let position = position.to_point_utf16(buffer); + let settings = language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx); + let tab_size = settings.tab_size; + let hard_tabs = settings.hard_tabs; + let relative_path = buffer + .file() + .map(|file| file.path().to_path_buf()) + .unwrap_or_default(); + + cx.background_executor().spawn(async move { + let (version, snapshot) = snapshot.await?; + let result = lsp + .request::(request::GetCompletionsParams { + doc: request::GetCompletionsDocument { + uri, + tab_size: tab_size.into(), + indent_size: 1, + insert_spaces: !hard_tabs, + relative_path: relative_path.to_string_lossy().into(), + position: point_to_lsp(position), + version: version.try_into().unwrap(), + }, + }) + .await?; + let completions = result + .completions + .into_iter() + .map(|completion| { + let start = snapshot + .clip_point_utf16(point_from_lsp(completion.range.start), Bias::Left); + let end = + snapshot.clip_point_utf16(point_from_lsp(completion.range.end), Bias::Left); + Completion { + uuid: completion.uuid, + range: snapshot.anchor_before(start)..snapshot.anchor_after(end), + text: completion.text, + } + }) + .collect(); + anyhow::Ok(completions) + }) + } + + pub fn status(&self) -> Status { + match &self.server { + CopilotServer::Starting { task } => Status::Starting { task: task.clone() }, + CopilotServer::Disabled => Status::Disabled, + CopilotServer::Error(error) => Status::Error(error.clone()), + CopilotServer::Running(RunningCopilotServer { sign_in_status, .. }) => { + match sign_in_status { + SignInStatus::Authorized { .. } => Status::Authorized, + SignInStatus::Unauthorized { .. } => Status::Unauthorized, + SignInStatus::SigningIn { prompt, .. } => Status::SigningIn { + prompt: prompt.clone(), + }, + SignInStatus::SignedOut => Status::SignedOut, + } + } + } + } + + fn update_sign_in_status( + &mut self, + lsp_status: request::SignInStatus, + cx: &mut ModelContext, + ) { + self.buffers.retain(|buffer| buffer.is_upgradable()); + + if let Ok(server) = self.server.as_running() { + match lsp_status { + request::SignInStatus::Ok { user: Some(_) } + | request::SignInStatus::MaybeOk { .. } + | request::SignInStatus::AlreadySignedIn { .. } => { + server.sign_in_status = SignInStatus::Authorized; + for buffer in self.buffers.iter().cloned().collect::>() { + if let Some(buffer) = buffer.upgrade() { + self.register_buffer(&buffer, cx); + } + } + } + request::SignInStatus::NotAuthorized { .. } => { + server.sign_in_status = SignInStatus::Unauthorized; + for buffer in self.buffers.iter().cloned().collect::>() { + self.unregister_buffer(&buffer); + } + } + request::SignInStatus::Ok { user: None } | request::SignInStatus::NotSignedIn => { + server.sign_in_status = SignInStatus::SignedOut; + for buffer in self.buffers.iter().cloned().collect::>() { + self.unregister_buffer(&buffer); + } + } + } + + cx.notify(); + } + } +} + +fn id_for_language(language: Option<&Arc>) -> String { + language + .map(|language| language.lsp_id()) + .unwrap_or_else(|| "plaintext".to_string()) +} + +fn uri_for_buffer(buffer: &Model, cx: &AppContext) -> lsp::Url { + if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) { + lsp::Url::from_file_path(file.abs_path(cx)).unwrap() + } else { + format!("buffer://{}", buffer.entity_id()).parse().unwrap() + } +} + +async fn clear_copilot_dir() { + remove_matching(&paths::COPILOT_DIR, |_| true).await +} + +async fn get_copilot_lsp(http: Arc) -> anyhow::Result { + const SERVER_PATH: &str = "dist/agent.js"; + + ///Check for the latest copilot language server and download it if we haven't already + async fn fetch_latest(http: Arc) -> anyhow::Result { + let release = + latest_github_release("zed-industries/copilot", true, false, http.clone()).await?; + + let version_dir = &*paths::COPILOT_DIR.join(format!("copilot-{}", release.tag_name)); + + fs::create_dir_all(version_dir).await?; + let server_path = version_dir.join(SERVER_PATH); + + if fs::metadata(&server_path).await.is_err() { + // Copilot LSP looks for this dist dir specifically, so lets add it in. + let dist_dir = version_dir.join("dist"); + fs::create_dir_all(dist_dir.as_path()).await?; + + let url = &release + .assets + .get(0) + .context("Github release for copilot contained no assets")? + .browser_download_url; + + let mut response = http + .get(url, Default::default(), true) + .await + .context("error downloading copilot release")?; + let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut())); + let archive = Archive::new(decompressed_bytes); + archive.unpack(dist_dir).await?; + + remove_matching(&paths::COPILOT_DIR, |entry| entry != version_dir).await; + } + + Ok(server_path) + } + + match fetch_latest(http).await { + ok @ Result::Ok(..) => ok, + e @ Err(..) => { + e.log_err(); + // Fetch a cached binary, if it exists + maybe!(async { + let mut last_version_dir = None; + let mut entries = fs::read_dir(paths::COPILOT_DIR.as_path()).await?; + while let Some(entry) = entries.next().await { + let entry = entry?; + if entry.file_type().await?.is_dir() { + last_version_dir = Some(entry.path()); + } + } + let last_version_dir = + last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?; + let server_path = last_version_dir.join(SERVER_PATH); + if server_path.exists() { + Ok(server_path) + } else { + Err(anyhow!( + "missing executable in directory {:?}", + last_version_dir + )) + } + }) + .await + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use language::BufferId; + + #[gpui::test(iterations = 10)] + async fn test_buffer_management(cx: &mut TestAppContext) { + let (copilot, mut lsp) = Copilot::fake(cx); + + let buffer_1 = cx.new_model(|cx| Buffer::local("Hello", cx)); + let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64()) + .parse() + .unwrap(); + copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx)); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + buffer_1_uri.clone(), + "plaintext".into(), + 0, + "Hello".into() + ), + } + ); + + let buffer_2 = cx.new_model(|cx| Buffer::local("Goodbye", cx)); + let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64()) + .parse() + .unwrap(); + copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx)); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + buffer_2_uri.clone(), + "plaintext".into(), + 0, + "Goodbye".into() + ), + } + ); + + buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx)); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidChangeTextDocumentParams { + text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1), + content_changes: vec![lsp::TextDocumentContentChangeEvent { + range: Some(lsp::Range::new( + lsp::Position::new(0, 5), + lsp::Position::new(0, 5) + )), + range_length: None, + text: " world".into(), + }], + } + ); + + // Ensure updates to the file are reflected in the LSP. + buffer_1.update(cx, |buffer, cx| { + buffer.file_updated( + Arc::new(File { + abs_path: "/root/child/buffer-1".into(), + path: Path::new("child/buffer-1").into(), + }), + cx, + ) + }); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri), + } + ); + let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap(); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + buffer_1_uri.clone(), + "plaintext".into(), + 1, + "Hello world".into() + ), + } + ); + + // Ensure all previously-registered buffers are closed when signing out. + lsp.handle_request::(|_, _| async { + Ok(request::SignOutResult {}) + }); + copilot + .update(cx, |copilot, cx| copilot.sign_out(cx)) + .await + .unwrap(); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()), + } + ); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()), + } + ); + + // Ensure all previously-registered buffers are re-opened when signing in. + lsp.handle_request::(|_, _| async { + Ok(request::SignInInitiateResult::AlreadySignedIn { + user: "user-1".into(), + }) + }); + copilot + .update(cx, |copilot, cx| copilot.sign_in(cx)) + .await + .unwrap(); + + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + buffer_1_uri.clone(), + "plaintext".into(), + 0, + "Hello world".into() + ), + } + ); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + buffer_2_uri.clone(), + "plaintext".into(), + 0, + "Goodbye".into() + ), + } + ); + // Dropping a buffer causes it to be closed on the LSP side as well. + cx.update(|_| drop(buffer_2)); + assert_eq!( + lsp.receive_notification::() + .await, + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri), + } + ); + } + + struct File { + abs_path: PathBuf, + path: Arc, + } + + impl language::File for File { + fn as_local(&self) -> Option<&dyn language::LocalFile> { + Some(self) + } + + fn mtime(&self) -> Option { + unimplemented!() + } + + fn path(&self) -> &Arc { + &self.path + } + + fn full_path(&self, _: &AppContext) -> PathBuf { + unimplemented!() + } + + fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr { + unimplemented!() + } + + fn is_deleted(&self) -> bool { + unimplemented!() + } + + fn as_any(&self) -> &dyn std::any::Any { + unimplemented!() + } + + fn to_proto(&self) -> rpc::proto::File { + unimplemented!() + } + + fn worktree_id(&self) -> usize { + 0 + } + + fn is_private(&self) -> bool { + false + } + } + + impl language::LocalFile for File { + fn abs_path(&self, _: &AppContext) -> PathBuf { + self.abs_path.clone() + } + + fn load(&self, _: &AppContext) -> Task> { + unimplemented!() + } + + fn buffer_reloaded( + &self, + _: BufferId, + _: &clock::Global, + _: language::LineEnding, + _: Option, + _: &mut AppContext, + ) { + unimplemented!() + } + } +} diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs new file mode 100644 index 0000000..e5f2a46 --- /dev/null +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -0,0 +1,1165 @@ +use crate::{Completion, Copilot}; +use anyhow::Result; +use client::telemetry::Telemetry; +use editor::{Direction, InlineCompletionProvider}; +use gpui::{AppContext, EntityId, Model, ModelContext, Task}; +use language::{ + language_settings::{all_language_settings, AllLanguageSettings}, + Buffer, OffsetRangeExt, ToOffset, +}; +use settings::Settings; +use std::{path::Path, sync::Arc, time::Duration}; + +pub const COPILOT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); + +pub struct CopilotCompletionProvider { + cycled: bool, + buffer_id: Option, + completions: Vec, + active_completion_index: usize, + file_extension: Option, + pending_refresh: Task>, + pending_cycling_refresh: Task>, + copilot: Model, + telemetry: Option>, +} + +impl CopilotCompletionProvider { + pub fn new(copilot: Model) -> Self { + Self { + cycled: false, + buffer_id: None, + completions: Vec::new(), + active_completion_index: 0, + file_extension: None, + pending_refresh: Task::ready(Ok(())), + pending_cycling_refresh: Task::ready(Ok(())), + copilot, + telemetry: None, + } + } + + pub fn with_telemetry(mut self, telemetry: Arc) -> Self { + self.telemetry = Some(telemetry); + self + } + + fn active_completion(&self) -> Option<&Completion> { + self.completions.get(self.active_completion_index) + } + + fn push_completion(&mut self, new_completion: Completion) { + for completion in &self.completions { + if completion.text == new_completion.text && completion.range == new_completion.range { + return; + } + } + self.completions.push(new_completion); + } +} + +impl InlineCompletionProvider for CopilotCompletionProvider { + fn name() -> &'static str { + "copilot" + } + + fn is_enabled( + &self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &AppContext, + ) -> bool { + if !self.copilot.read(cx).status().is_authorized() { + return false; + } + + let buffer = buffer.read(cx); + let file = buffer.file(); + let language = buffer.language_at(cursor_position); + let settings = all_language_settings(file, cx); + settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref())) + } + + fn refresh( + &mut self, + buffer: Model, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut ModelContext, + ) { + let copilot = self.copilot.clone(); + self.pending_refresh = cx.spawn(|this, mut cx| async move { + if debounce { + cx.background_executor() + .timer(COPILOT_DEBOUNCE_TIMEOUT) + .await; + } + + let completions = copilot + .update(&mut cx, |copilot, cx| { + copilot.completions(&buffer, cursor_position, cx) + })? + .await?; + + this.update(&mut cx, |this, cx| { + if !completions.is_empty() { + this.cycled = false; + this.pending_cycling_refresh = Task::ready(Ok(())); + this.completions.clear(); + this.active_completion_index = 0; + this.buffer_id = Some(buffer.entity_id()); + this.file_extension = buffer.read(cx).file().and_then(|file| { + Some( + Path::new(file.file_name(cx)) + .extension()? + .to_str()? + .to_string(), + ) + }); + + for completion in completions { + this.push_completion(completion); + } + cx.notify(); + } + })?; + + Ok(()) + }); + } + + fn cycle( + &mut self, + buffer: Model, + cursor_position: language::Anchor, + direction: Direction, + cx: &mut ModelContext, + ) { + if self.cycled { + match direction { + Direction::Prev => { + self.active_completion_index = if self.active_completion_index == 0 { + self.completions.len().saturating_sub(1) + } else { + self.active_completion_index - 1 + }; + } + Direction::Next => { + if self.completions.len() == 0 { + self.active_completion_index = 0 + } else { + self.active_completion_index = + (self.active_completion_index + 1) % self.completions.len(); + } + } + } + + cx.notify(); + } else { + let copilot = self.copilot.clone(); + self.pending_cycling_refresh = cx.spawn(|this, mut cx| async move { + let completions = copilot + .update(&mut cx, |copilot, cx| { + copilot.completions_cycling(&buffer, cursor_position, cx) + })? + .await?; + + this.update(&mut cx, |this, cx| { + this.cycled = true; + this.file_extension = buffer.read(cx).file().and_then(|file| { + Some( + Path::new(file.file_name(cx)) + .extension()? + .to_str()? + .to_string(), + ) + }); + for completion in completions { + this.push_completion(completion); + } + this.cycle(buffer, cursor_position, direction, cx); + })?; + + Ok(()) + }); + } + } + + fn accept(&mut self, cx: &mut ModelContext) { + if let Some(completion) = self.active_completion() { + self.copilot + .update(cx, |copilot, cx| copilot.accept_completion(completion, cx)) + .detach_and_log_err(cx); + if self.active_completion().is_some() { + if let Some(telemetry) = self.telemetry.as_ref() { + telemetry.report_inline_completion_event( + Self::name().to_string(), + true, + self.file_extension.clone(), + ); + } + } + } + } + + fn discard( + &mut self, + should_report_inline_completion_event: bool, + cx: &mut ModelContext, + ) { + let settings = AllLanguageSettings::get_global(cx); + + let copilot_enabled = settings.inline_completions_enabled(None, None); + + if !copilot_enabled { + return; + } + + self.copilot + .update(cx, |copilot, cx| { + copilot.discard_completions(&self.completions, cx) + }) + .detach_and_log_err(cx); + + if should_report_inline_completion_event { + if self.active_completion().is_some() { + if let Some(telemetry) = self.telemetry.as_ref() { + telemetry.report_inline_completion_event( + Self::name().to_string(), + false, + self.file_extension.clone(), + ); + } + } + } + } + + fn active_completion_text<'a>( + &'a self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &'a AppContext, + ) -> Option<&'a str> { + let buffer_id = buffer.entity_id(); + let buffer = buffer.read(cx); + let completion = self.active_completion()?; + if Some(buffer_id) != self.buffer_id + || !completion.range.start.is_valid(buffer) + || !completion.range.end.is_valid(buffer) + { + return None; + } + + let mut completion_range = completion.range.to_offset(buffer); + let prefix_len = common_prefix( + buffer.chars_for_range(completion_range.clone()), + completion.text.chars(), + ); + completion_range.start += prefix_len; + let suffix_len = common_prefix( + buffer.reversed_chars_for_range(completion_range.clone()), + completion.text[prefix_len..].chars().rev(), + ); + completion_range.end = completion_range.end.saturating_sub(suffix_len); + + if completion_range.is_empty() + && completion_range.start == cursor_position.to_offset(buffer) + { + let completion_text = &completion.text[prefix_len..completion.text.len() - suffix_len]; + if completion_text.trim().is_empty() { + None + } else { + Some(completion_text) + } + } else { + None + } + } +} + +fn common_prefix, T2: Iterator>(a: T1, b: T2) -> usize { + a.zip(b) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a.len_utf8()) + .sum() +} + +#[cfg(test)] +mod tests { + use super::*; + use editor::{ + test::editor_lsp_test_context::EditorLspTestContext, Editor, ExcerptRange, MultiBuffer, + }; + use fs::FakeFs; + use futures::StreamExt; + use gpui::{BackgroundExecutor, Context, TestAppContext, UpdateGlobal}; + use indoc::indoc; + use language::{ + language_settings::{AllLanguageSettings, AllLanguageSettingsContent}, + Point, + }; + use project::Project; + use serde_json::json; + use settings::SettingsStore; + use std::future::Future; + use util::test::{marked_text_ranges_by, TextRangeMarker}; + + #[gpui::test(iterations = 10)] + async fn test_copilot(executor: BackgroundExecutor, cx: &mut TestAppContext) { + // flaky + init_test(cx, |_| {}); + + let (copilot, copilot_lsp) = Copilot::fake(cx); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + let copilot_provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); + cx.update_editor(|editor, cx| { + editor.set_inline_completion_provider(Some(copilot_provider), cx) + }); + + // When inserting, ensure autocompletion is favored over Copilot suggestions. + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec!["completion_a", "completion_b"], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "one.copilot1".into(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(editor.context_menu_visible()); + assert!(!editor.has_active_inline_completion(cx)); + + // Confirming a completion inserts it and hides the context menu, without showing + // the copilot suggestion afterwards. + editor + .confirm_completion(&Default::default(), cx) + .unwrap() + .detach(); + assert!(!editor.context_menu_visible()); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "one.completion_a\ntwo\nthree\n"); + assert_eq!(editor.display_text(cx), "one.completion_a\ntwo\nthree\n"); + }); + + // Ensure Copilot suggestions are shown right away if no autocompletion is available. + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec![], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "one.copilot1".into(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(!editor.context_menu_visible()); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.\ntwo\nthree\n"); + }); + + // Reset editor, and ensure autocompletion is still favored over Copilot suggestions. + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec!["completion_a", "completion_b"], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "one.copilot1".into(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(editor.context_menu_visible()); + assert!(!editor.has_active_inline_completion(cx)); + + // When hiding the context menu, the Copilot suggestion becomes visible. + editor.cancel(&Default::default(), cx); + assert!(!editor.context_menu_visible()); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.\ntwo\nthree\n"); + }); + + // Ensure existing completion is interpolated when inserting again. + cx.simulate_keystroke("c"); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + assert!(!editor.context_menu_visible()); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.c\ntwo\nthree\n"); + }); + + // After debouncing, new Copilot completions should be requested. + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "one.copilot2".into(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 5)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(!editor.context_menu_visible()); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.c\ntwo\nthree\n"); + + // Canceling should remove the active Copilot suggestion. + editor.cancel(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.c\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.c\ntwo\nthree\n"); + + // After canceling, tabbing shouldn't insert the previously shown suggestion. + editor.tab(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.c \ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.c \ntwo\nthree\n"); + + // When undoing the previously active suggestion is shown again. + editor.undo(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.c\ntwo\nthree\n"); + }); + + // If an edit occurs outside of this editor, the suggestion is still correctly interpolated. + cx.update_buffer(|buffer, cx| buffer.edit([(5..5, "o")], None, cx)); + cx.update_editor(|editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.co\ntwo\nthree\n"); + + // Tabbing when there is an active suggestion inserts it. + editor.tab(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.copilot2\ntwo\nthree\n"); + + // When undoing the previously active suggestion is shown again. + editor.undo(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.co\ntwo\nthree\n"); + + // Hide suggestion. + editor.cancel(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.co\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.co\ntwo\nthree\n"); + }); + + // If an edit occurs outside of this editor but no suggestion is being shown, + // we won't make it visible. + cx.update_buffer(|buffer, cx| buffer.edit([(6..6, "p")], None, cx)); + cx.update_editor(|editor, cx| { + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one.cop\ntwo\nthree\n"); + assert_eq!(editor.text(cx), "one.cop\ntwo\nthree\n"); + }); + + // Reset the editor to verify how suggestions behave when tabbing on leading indentation. + cx.update_editor(|editor, cx| { + editor.set_text("fn foo() {\n \n}", cx); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(1, 2)..Point::new(1, 2)]) + }); + }); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: " let x = 4;".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 2)), + ..Default::default() + }], + vec![], + ); + + cx.update_editor(|editor, cx| editor.next_inline_completion(&Default::default(), cx)); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); + assert_eq!(editor.text(cx), "fn foo() {\n \n}"); + + // Tabbing inside of leading whitespace inserts indentation without accepting the suggestion. + editor.tab(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "fn foo() {\n \n}"); + assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); + + // Tabbing again accepts the suggestion. + editor.tab(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "fn foo() {\n let x = 4;\n}"); + assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); + }); + } + + #[gpui::test(iterations = 10)] + async fn test_accept_partial_copilot_suggestion( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + // flaky + init_test(cx, |_| {}); + + let (copilot, copilot_lsp) = Copilot::fake(cx); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + let copilot_provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); + cx.update_editor(|editor, cx| { + editor.set_inline_completion_provider(Some(copilot_provider), cx) + }); + + // Setup the editor with a completion request. + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec![], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "one.copilot1".into(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + + // Accepting the first word of the suggestion should only accept the first word and still show the rest. + editor.accept_partial_inline_completion(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "one.copilot\ntwo\nthree\n"); + assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); + + // Accepting next word should accept the non-word and copilot suggestion should be gone + editor.accept_partial_inline_completion(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "one.copilot1\ntwo\nthree\n"); + assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); + }); + + // Reset the editor and check non-word and whitespace completion + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec![], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "one.123. copilot\n 456".into(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + + // Accepting the first word (non-word) of the suggestion should only accept the first word and still show the rest. + editor.accept_partial_inline_completion(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "one.123. \ntwo\nthree\n"); + assert_eq!( + editor.display_text(cx), + "one.123. copilot\n 456\ntwo\nthree\n" + ); + + // Accepting next word should accept the next word and copilot suggestion should still exist + editor.accept_partial_inline_completion(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "one.123. copilot\ntwo\nthree\n"); + assert_eq!( + editor.display_text(cx), + "one.123. copilot\n 456\ntwo\nthree\n" + ); + + // Accepting the whitespace should accept the non-word/whitespaces with newline and copilot suggestion should be gone + editor.accept_partial_inline_completion(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.text(cx), "one.123. copilot\n 456\ntwo\nthree\n"); + assert_eq!( + editor.display_text(cx), + "one.123. copilot\n 456\ntwo\nthree\n" + ); + }); + } + + #[gpui::test] + async fn test_copilot_completion_invalidation( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + init_test(cx, |_| {}); + + let (copilot, copilot_lsp) = Copilot::fake(cx); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + let copilot_provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); + cx.update_editor(|editor, cx| { + editor.set_inline_completion_provider(Some(copilot_provider), cx) + }); + + cx.set_state(indoc! {" + one + twˇ + three + "}); + + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "two.foo()".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 2)), + ..Default::default() + }], + vec![], + ); + cx.update_editor(|editor, cx| editor.next_inline_completion(&Default::default(), cx)); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); + assert_eq!(editor.text(cx), "one\ntw\nthree\n"); + + editor.backspace(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); + assert_eq!(editor.text(cx), "one\nt\nthree\n"); + + editor.backspace(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); + assert_eq!(editor.text(cx), "one\n\nthree\n"); + + // Deleting across the original suggestion range invalidates it. + editor.backspace(&Default::default(), cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\nthree\n"); + assert_eq!(editor.text(cx), "one\nthree\n"); + + // Undoing the deletion restores the suggestion. + editor.undo(&Default::default(), cx); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); + assert_eq!(editor.text(cx), "one\n\nthree\n"); + }); + } + + #[gpui::test] + async fn test_copilot_multibuffer(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let (copilot, copilot_lsp) = Copilot::fake(cx); + + let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx)); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }], + cx, + ); + multibuffer + }); + let editor = cx.add_window(|cx| Editor::for_multibuffer(multibuffer, None, cx)); + editor.update(cx, |editor, cx| editor.focus(cx)).unwrap(); + let copilot_provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); + editor + .update(cx, |editor, cx| { + editor.set_inline_completion_provider(Some(copilot_provider), cx) + }) + .unwrap(); + + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "b = 2 + a".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 5)), + ..Default::default() + }], + vec![], + ); + _ = editor.update(cx, |editor, cx| { + // Ensure copilot suggestions are shown for the first excerpt. + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(1, 5)..Point::new(1, 5)]) + }); + editor.next_inline_completion(&Default::default(), cx); + }); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + _ = editor.update(cx, |editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + assert_eq!( + editor.display_text(cx), + "\n\na = 1\nb = 2 + a\n\n\n\nc = 3\nd = 4\n" + ); + assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n"); + }); + + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "d = 4 + c".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 6)), + ..Default::default() + }], + vec![], + ); + _ = editor.update(cx, |editor, cx| { + // Move to another excerpt, ensuring the suggestion gets cleared. + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(4, 5)..Point::new(4, 5)]) + }); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!( + editor.display_text(cx), + "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4\n" + ); + assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n"); + + // Type a character, ensuring we don't even try to interpolate the previous suggestion. + editor.handle_input(" ", cx); + assert!(!editor.has_active_inline_completion(cx)); + assert_eq!( + editor.display_text(cx), + "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 \n" + ); + assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n"); + }); + + // Ensure the new suggestion is displayed when the debounce timeout expires. + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + _ = editor.update(cx, |editor, cx| { + assert!(editor.has_active_inline_completion(cx)); + assert_eq!( + editor.display_text(cx), + "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 + c\n" + ); + assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n"); + }); + } + + #[gpui::test] + async fn test_copilot_does_not_prevent_completion_triggers( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + init_test(cx, |_| {}); + + let (copilot, copilot_lsp) = Copilot::fake(cx); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + ..lsp::CompletionOptions::default() + }), + ..lsp::ServerCapabilities::default() + }, + cx, + ) + .await; + let copilot_provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); + cx.update_editor(|editor, cx| { + editor.set_inline_completion_provider(Some(copilot_provider), cx) + }); + + cx.set_state(indoc! {" + one + twˇ + three + "}); + + let _ = handle_completion_request( + &mut cx, + indoc! {" + one + tw|<> + three + "}, + vec!["completion_a", "completion_b"], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "two.foo()".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 2)), + ..Default::default() + }], + vec![], + ); + cx.update_editor(|editor, cx| editor.next_inline_completion(&Default::default(), cx)); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(!editor.context_menu_visible(), "Even there are some completions available, those are not triggered when active copilot suggestion is present"); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); + assert_eq!(editor.text(cx), "one\ntw\nthree\n"); + }); + + cx.simulate_keystroke("o"); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one + two|<> + three + "}, + vec!["completion_a_2", "completion_b_2"], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "two.foo()".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 3)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!(!editor.context_menu_visible()); + assert!(editor.has_active_inline_completion(cx)); + assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); + assert_eq!(editor.text(cx), "one\ntwo\nthree\n"); + }); + + cx.simulate_keystroke("."); + let _ = handle_completion_request( + &mut cx, + indoc! {" + one + two.|<> + three + "}, + vec!["something_else()"], + ); + handle_copilot_completion_request( + &copilot_lsp, + vec![crate::request::Completion { + text: "two.foo()".into(), + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 4)), + ..Default::default() + }], + vec![], + ); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.update_editor(|editor, cx| { + assert!( + editor.context_menu_visible(), + "On completion trigger input, the completions should be fetched and visible" + ); + assert!( + !editor.has_active_inline_completion(cx), + "On completion trigger input, copilot suggestion should be dismissed" + ); + assert_eq!(editor.display_text(cx), "one\ntwo.\nthree\n"); + assert_eq!(editor.text(cx), "one\ntwo.\nthree\n"); + }); + } + + #[gpui::test] + async fn test_copilot_disabled_globs(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings + .inline_completions + .get_or_insert(Default::default()) + .disabled_globs = Some(vec![".env*".to_string()]); + }); + + let (copilot, copilot_lsp) = Copilot::fake(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + ".env": "SECRET=something\n", + "README.md": "hello\n" + }), + ) + .await; + let project = Project::test(fs, ["/test".as_ref()], cx).await; + + let private_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/test/.env", cx) + }) + .await + .unwrap(); + let public_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/test/README.md", cx) + }) + .await + .unwrap(); + + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + multibuffer.push_excerpts( + private_buffer.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 0), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + public_buffer.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 0), + primary: None, + }], + cx, + ); + multibuffer + }); + let editor = cx.add_window(|cx| Editor::for_multibuffer(multibuffer, None, cx)); + let copilot_provider = cx.new_model(|_| CopilotCompletionProvider::new(copilot)); + editor + .update(cx, |editor, cx| { + editor.set_inline_completion_provider(Some(copilot_provider), cx) + }) + .unwrap(); + + let mut copilot_requests = copilot_lsp + .handle_request::( + move |_params, _cx| async move { + Ok(crate::request::GetCompletionsResult { + completions: vec![crate::request::Completion { + text: "next line".into(), + range: lsp::Range::new( + lsp::Position::new(1, 0), + lsp::Position::new(1, 0), + ), + ..Default::default() + }], + }) + }, + ); + + _ = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |selections| { + selections.select_ranges([Point::new(0, 0)..Point::new(0, 0)]) + }); + editor.next_inline_completion(&Default::default(), cx); + }); + + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + assert!(copilot_requests.try_next().is_err()); + + _ = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(2, 0)..Point::new(2, 0)]) + }); + editor.next_inline_completion(&Default::default(), cx); + }); + + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + assert!(copilot_requests.try_next().is_ok()); + } + + fn handle_copilot_completion_request( + lsp: &lsp::FakeLanguageServer, + completions: Vec, + completions_cycling: Vec, + ) { + lsp.handle_request::(move |_params, _cx| { + let completions = completions.clone(); + async move { + Ok(crate::request::GetCompletionsResult { + completions: completions.clone(), + }) + } + }); + lsp.handle_request::(move |_params, _cx| { + let completions_cycling = completions_cycling.clone(); + async move { + Ok(crate::request::GetCompletionsResult { + completions: completions_cycling.clone(), + }) + } + }); + } + + fn handle_completion_request( + cx: &mut EditorLspTestContext, + marked_string: &str, + completions: Vec<&'static str>, + ) -> impl Future { + let complete_from_marker: TextRangeMarker = '|'.into(); + let replace_range_marker: TextRangeMarker = ('<', '>').into(); + let (_, mut marked_ranges) = marked_text_ranges_by( + marked_string, + vec![complete_from_marker.clone(), replace_range_marker.clone()], + ); + + let complete_from_position = + cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start); + let replace_range = + cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); + + let mut request = + cx.handle_request::(move |url, params, _| { + let completions = completions.clone(); + async move { + assert_eq!(params.text_document_position.text_document.uri, url.clone()); + assert_eq!( + params.text_document_position.position, + complete_from_position + ); + Ok(Some(lsp::CompletionResponse::Array( + completions + .iter() + .map(|completion_text| lsp::CompletionItem { + label: completion_text.to_string(), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: replace_range, + new_text: completion_text.to_string(), + })), + ..Default::default() + }) + .collect(), + ))) + } + }); + + async move { + request.next().await; + } + } + + fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) { + _ = cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + theme::init(theme::LoadThemes::JustBase, cx); + client::init_settings(cx); + language::init(cx); + editor::init_settings(cx); + Project::init_settings(cx); + workspace::init_settings(cx); + SettingsStore::update_global(cx, |store: &mut SettingsStore, cx| { + store.update_user_settings::(cx, f); + }); + }); + } +} diff --git a/crates/copilot/src/request.rs b/crates/copilot/src/request.rs new file mode 100644 index 0000000..0deabe1 --- /dev/null +++ b/crates/copilot/src/request.rs @@ -0,0 +1,225 @@ +use serde::{Deserialize, Serialize}; + +pub enum CheckStatus {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CheckStatusParams { + pub local_checks_only: bool, +} + +impl lsp::request::Request for CheckStatus { + type Params = CheckStatusParams; + type Result = SignInStatus; + const METHOD: &'static str = "checkStatus"; +} + +pub enum SignInInitiate {} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SignInInitiateParams {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "status")] +pub enum SignInInitiateResult { + AlreadySignedIn { user: String }, + PromptUserDeviceFlow(PromptUserDeviceFlow), +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PromptUserDeviceFlow { + pub user_code: String, + pub verification_uri: String, +} + +impl lsp::request::Request for SignInInitiate { + type Params = SignInInitiateParams; + type Result = SignInInitiateResult; + const METHOD: &'static str = "signInInitiate"; +} + +pub enum SignInConfirm {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SignInConfirmParams { + pub user_code: String, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "status")] +pub enum SignInStatus { + #[serde(rename = "OK")] + Ok { + user: Option, + }, + MaybeOk { + user: String, + }, + AlreadySignedIn { + user: String, + }, + NotAuthorized { + user: String, + }, + NotSignedIn, +} + +impl lsp::request::Request for SignInConfirm { + type Params = SignInConfirmParams; + type Result = SignInStatus; + const METHOD: &'static str = "signInConfirm"; +} + +pub enum SignOut {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SignOutParams {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SignOutResult {} + +impl lsp::request::Request for SignOut { + type Params = SignOutParams; + type Result = SignOutResult; + const METHOD: &'static str = "signOut"; +} + +pub enum GetCompletions {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GetCompletionsParams { + pub doc: GetCompletionsDocument, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GetCompletionsDocument { + pub tab_size: u32, + pub indent_size: u32, + pub insert_spaces: bool, + pub uri: lsp::Url, + pub relative_path: String, + pub position: lsp::Position, + pub version: usize, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GetCompletionsResult { + pub completions: Vec, +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Completion { + pub text: String, + pub position: lsp::Position, + pub uuid: String, + pub range: lsp::Range, + pub display_text: String, +} + +impl lsp::request::Request for GetCompletions { + type Params = GetCompletionsParams; + type Result = GetCompletionsResult; + const METHOD: &'static str = "getCompletions"; +} + +pub enum GetCompletionsCycling {} + +impl lsp::request::Request for GetCompletionsCycling { + type Params = GetCompletionsParams; + type Result = GetCompletionsResult; + const METHOD: &'static str = "getCompletionsCycling"; +} + +pub enum LogMessage {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LogMessageParams { + pub level: u8, + pub message: String, + pub metadata_str: String, + pub extra: Vec, +} + +impl lsp::notification::Notification for LogMessage { + type Params = LogMessageParams; + const METHOD: &'static str = "LogMessage"; +} + +pub enum StatusNotification {} + +#[derive(Debug, Serialize, Deserialize)] +pub struct StatusNotificationParams { + pub message: String, + pub status: String, // One of Normal/InProgress +} + +impl lsp::notification::Notification for StatusNotification { + type Params = StatusNotificationParams; + const METHOD: &'static str = "statusNotification"; +} + +pub enum SetEditorInfo {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SetEditorInfoParams { + pub editor_info: EditorInfo, + pub editor_plugin_info: EditorPluginInfo, +} + +impl lsp::request::Request for SetEditorInfo { + type Params = SetEditorInfoParams; + type Result = String; + const METHOD: &'static str = "setEditorInfo"; +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EditorInfo { + pub name: String, + pub version: String, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EditorPluginInfo { + pub name: String, + pub version: String, +} + +pub enum NotifyAccepted {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NotifyAcceptedParams { + pub uuid: String, +} + +impl lsp::request::Request for NotifyAccepted { + type Params = NotifyAcceptedParams; + type Result = String; + const METHOD: &'static str = "notifyAccepted"; +} + +pub enum NotifyRejected {} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NotifyRejectedParams { + pub uuids: Vec, +} + +impl lsp::request::Request for NotifyRejected { + type Params = NotifyRejectedParams; + type Result = String; + const METHOD: &'static str = "notifyRejected"; +} diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs new file mode 100644 index 0000000..41c6ee8 --- /dev/null +++ b/crates/copilot/src/sign_in.rs @@ -0,0 +1,210 @@ +use crate::{request::PromptUserDeviceFlow, Copilot, Status}; +use gpui::{ + div, svg, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, + FocusableView, InteractiveElement, IntoElement, Model, MouseDownEvent, ParentElement, Render, + Styled, Subscription, ViewContext, +}; +use ui::{prelude::*, Button, IconName, Label}; +use workspace::ModalView; + +const COPILOT_SIGN_UP_URL: &str = "https://github.com/features/copilot"; + +pub struct CopilotCodeVerification { + status: Status, + connect_clicked: bool, + focus_handle: FocusHandle, + _subscription: Subscription, +} + +impl FocusableView for CopilotCodeVerification { + fn focus_handle(&self, _: &AppContext) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} + +impl EventEmitter for CopilotCodeVerification {} +impl ModalView for CopilotCodeVerification {} + +impl CopilotCodeVerification { + pub fn new(copilot: &Model, cx: &mut ViewContext) -> Self { + let status = copilot.read(cx).status(); + Self { + status, + connect_clicked: false, + focus_handle: cx.focus_handle(), + _subscription: cx.observe(copilot, |this, copilot, cx| { + let status = copilot.read(cx).status(); + match status { + Status::Authorized | Status::Unauthorized | Status::SigningIn { .. } => { + this.set_status(status, cx) + } + _ => cx.emit(DismissEvent), + } + }), + } + } + + pub fn set_status(&mut self, status: Status, cx: &mut ViewContext) { + self.status = status; + cx.notify(); + } + + fn render_device_code( + data: &PromptUserDeviceFlow, + cx: &mut ViewContext, + ) -> impl IntoElement { + let copied = cx + .read_from_clipboard() + .map(|item| item.text() == &data.user_code) + .unwrap_or(false); + h_flex() + .w_full() + .p_1() + .border_1() + .border_muted(cx) + .rounded_md() + .cursor_pointer() + .justify_between() + .on_mouse_down(gpui::MouseButton::Left, { + let user_code = data.user_code.clone(); + move |_, cx| { + cx.write_to_clipboard(ClipboardItem::new(user_code.clone())); + cx.refresh(); + } + }) + .child(div().flex_1().child(Label::new(data.user_code.clone()))) + .child(div().flex_none().px_1().child(Label::new(if copied { + "Copied!" + } else { + "Copy" + }))) + } + + fn render_prompting_modal( + connect_clicked: bool, + data: &PromptUserDeviceFlow, + cx: &mut ViewContext, + ) -> impl Element { + let connect_button_label = if connect_clicked { + "Waiting for connection..." + } else { + "Connect to GitHub" + }; + v_flex() + .flex_1() + .gap_2() + .items_center() + .child(Headline::new("Use GitHub Copilot in Zed.").size(HeadlineSize::Large)) + .child( + Label::new("Using Copilot requires an active subscription on GitHub.") + .color(Color::Muted), + ) + .child(Self::render_device_code(data, cx)) + .child( + Label::new("Paste this code into GitHub after clicking the button below.") + .size(ui::LabelSize::Small), + ) + .child( + Button::new("connect-button", connect_button_label) + .on_click({ + let verification_uri = data.verification_uri.clone(); + cx.listener(move |this, _, cx| { + cx.open_url(&verification_uri); + this.connect_clicked = true; + }) + }) + .full_width() + .style(ButtonStyle::Filled), + ) + .child( + Button::new("copilot-enable-cancel-button", "Cancel") + .full_width() + .on_click(cx.listener(|_, _, cx| cx.emit(DismissEvent))), + ) + } + fn render_enabled_modal(cx: &mut ViewContext) -> impl Element { + v_flex() + .gap_2() + .child(Headline::new("Copilot Enabled!").size(HeadlineSize::Large)) + .child(Label::new( + "You can update your settings or sign out from the Copilot menu in the status bar.", + )) + .child( + Button::new("copilot-enabled-done-button", "Done") + .full_width() + .on_click(cx.listener(|_, _, cx| cx.emit(DismissEvent))), + ) + } + + fn render_unauthorized_modal(cx: &mut ViewContext) -> impl Element { + v_flex() + .child(Headline::new("You must have an active GitHub Copilot subscription.").size(HeadlineSize::Large)) + + .child(Label::new( + "You can enable Copilot by connecting your existing license once you have subscribed or renewed your subscription.", + ).color(Color::Warning)) + .child( + Button::new("copilot-subscribe-button", "Subscribe on GitHub") + .full_width() + .on_click(|_, cx| cx.open_url(COPILOT_SIGN_UP_URL)), + ) + .child( + Button::new("copilot-subscribe-cancel-button", "Cancel") + .full_width() + .on_click(cx.listener(|_, _, cx| cx.emit(DismissEvent))), + ) + } + + fn render_disabled_modal() -> impl Element { + v_flex() + .child(Headline::new("Copilot is disabled").size(HeadlineSize::Large)) + .child(Label::new("You can enable Copilot in your settings.")) + } +} + +impl Render for CopilotCodeVerification { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let prompt = match &self.status { + Status::SigningIn { + prompt: Some(prompt), + } => Self::render_prompting_modal(self.connect_clicked, &prompt, cx).into_any_element(), + Status::Unauthorized => { + self.connect_clicked = false; + Self::render_unauthorized_modal(cx).into_any_element() + } + Status::Authorized => { + self.connect_clicked = false; + Self::render_enabled_modal(cx).into_any_element() + } + Status::Disabled => { + self.connect_clicked = false; + Self::render_disabled_modal().into_any_element() + } + _ => div().into_any_element(), + }; + + v_flex() + .id("copilot code verification") + .track_focus(&self.focus_handle) + .elevation_3(cx) + .w_96() + .items_center() + .p_4() + .gap_2() + .on_action(cx.listener(|_, _: &menu::Cancel, cx| { + cx.emit(DismissEvent); + })) + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, cx| { + cx.focus(&this.focus_handle); + })) + .child( + svg() + .w_32() + .h_16() + .flex_none() + .path(IconName::ZedXCopilot.path()) + .text_color(cx.theme().colors().icon), + ) + .child(prompt) + } +} diff --git a/crates/db/Cargo.toml b/crates/db/Cargo.toml new file mode 100644 index 0000000..f316092 --- /dev/null +++ b/crates/db/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "db" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/db.rs" +doctest = false + +[features] +test-support = [] + +[dependencies] +anyhow.workspace = true +gpui.workspace = true +indoc.workspace = true +lazy_static.workspace = true +log.workspace = true +release_channel.workspace = true +smol.workspace = true +sqlez.workspace = true +sqlez_macros.workspace = true +util.workspace = true + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } +tempfile.workspace = true diff --git a/crates/db/LICENSE-GPL b/crates/db/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/db/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/db/README.md b/crates/db/README.md new file mode 100644 index 0000000..b734a2e --- /dev/null +++ b/crates/db/README.md @@ -0,0 +1,5 @@ +# Building Queries + +First, craft your test data. The examples folder shows a template for building a test-db, and can be ran with `cargo run --example [your-example]`. + +To actually use and test your queries, import the generated DB file into https://sqliteonline.com/ diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs new file mode 100644 index 0000000..577a174 --- /dev/null +++ b/crates/db/src/db.rs @@ -0,0 +1,340 @@ +pub mod kvp; +pub mod query; + +// Re-export +pub use anyhow; +use anyhow::Context; +use gpui::AppContext; +pub use indoc::indoc; +pub use lazy_static; +pub use smol; +pub use sqlez; +pub use sqlez_macros; +pub use util::paths::DB_DIR; + +use release_channel::ReleaseChannel; +pub use release_channel::RELEASE_CHANNEL; +use sqlez::domain::Migrator; +use sqlez::thread_safe_connection::ThreadSafeConnection; +use sqlez_macros::sql; +use std::future::Future; +use std::path::{Path, PathBuf}; +use std::sync::atomic::{AtomicBool, Ordering}; +use util::{maybe, ResultExt}; + +const CONNECTION_INITIALIZE_QUERY: &str = sql!( + PRAGMA foreign_keys=TRUE; +); + +const DB_INITIALIZE_QUERY: &str = sql!( + PRAGMA journal_mode=WAL; + PRAGMA busy_timeout=1; + PRAGMA case_sensitive_like=TRUE; + PRAGMA synchronous=NORMAL; +); + +const FALLBACK_DB_NAME: &str = "FALLBACK_MEMORY_DB"; + +const DB_FILE_NAME: &str = "db.sqlite"; + +lazy_static::lazy_static! { + pub static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty()); + pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false); +} + +/// Open or create a database at the given directory path. +/// This will retry a couple times if there are failures. If opening fails once, the db directory +/// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. +/// In either case, static variables are set so that the user can be notified. +pub async fn open_db( + db_dir: &Path, + release_channel: &ReleaseChannel, +) -> ThreadSafeConnection { + if *ZED_STATELESS { + return open_fallback_db().await; + } + + let release_channel_name = release_channel.dev_name(); + let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); + + let connection = maybe!(async { + smol::fs::create_dir_all(&main_db_dir) + .await + .context("Could not create db directory") + .log_err()?; + let db_path = main_db_dir.join(Path::new(DB_FILE_NAME)); + open_main_db(&db_path).await + }) + .await; + + if let Some(connection) = connection { + return connection; + } + + // Set another static ref so that we can escalate the notification + ALL_FILE_DB_FAILED.store(true, Ordering::Release); + + // If still failed, create an in memory db with a known name + open_fallback_db().await +} + +async fn open_main_db(db_path: &PathBuf) -> Option> { + log::info!("Opening main db"); + ThreadSafeConnection::::builder(db_path.to_string_lossy().as_ref(), true) + .with_db_initialization_query(DB_INITIALIZE_QUERY) + .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY) + .build() + .await + .log_err() +} + +async fn open_fallback_db() -> ThreadSafeConnection { + log::info!("Opening fallback db"); + ThreadSafeConnection::::builder(FALLBACK_DB_NAME, false) + .with_db_initialization_query(DB_INITIALIZE_QUERY) + .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY) + .build() + .await + .expect( + "Fallback in memory database failed. Likely initialization queries or migrations have fundamental errors", + ) +} + +#[cfg(any(test, feature = "test-support"))] +pub async fn open_test_db(db_name: &str) -> ThreadSafeConnection { + use sqlez::thread_safe_connection::locking_queue; + + ThreadSafeConnection::::builder(db_name, false) + .with_db_initialization_query(DB_INITIALIZE_QUERY) + .with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY) + // Serialize queued writes via a mutex and run them synchronously + .with_write_queue_constructor(locking_queue()) + .build() + .await + .unwrap() +} + +/// Implements a basic DB wrapper for a given domain +#[macro_export] +macro_rules! define_connection { + (pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => { + pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>); + + impl ::std::ops::Deref for $t { + type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>; + + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + impl $crate::sqlez::domain::Domain for $t { + fn name() -> &'static str { + stringify!($t) + } + + fn migrations() -> &'static [&'static str] { + $migrations + } + } + + #[cfg(any(test, feature = "test-support"))] + $crate::lazy_static::lazy_static! { + pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id)))); + } + + #[cfg(not(any(test, feature = "test-support")))] + $crate::lazy_static::lazy_static! { + pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL))); + } + }; + (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => { + pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>); + + impl ::std::ops::Deref for $t { + type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<($($d),+, $t)>; + + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + impl $crate::sqlez::domain::Domain for $t { + fn name() -> &'static str { + stringify!($t) + } + + fn migrations() -> &'static [&'static str] { + $migrations + } + } + + #[cfg(any(test, feature = "test-support"))] + $crate::lazy_static::lazy_static! { + pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id)))); + } + + #[cfg(not(any(test, feature = "test-support")))] + $crate::lazy_static::lazy_static! { + pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL))); + } + }; +} + +pub fn write_and_log(cx: &mut AppContext, db_write: impl FnOnce() -> F + Send + 'static) +where + F: Future> + Send, +{ + cx.background_executor() + .spawn(async move { db_write().await.log_err() }) + .detach() +} + +#[cfg(test)] +mod tests { + use std::thread; + + use sqlez::domain::Domain; + use sqlez_macros::sql; + + use crate::open_db; + + // Test bad migration panics + #[gpui::test] + #[should_panic] + async fn test_bad_migration_panics() { + enum BadDB {} + + impl Domain for BadDB { + fn name() -> &'static str { + "db_tests" + } + + fn migrations() -> &'static [&'static str] { + &[ + sql!(CREATE TABLE test(value);), + // failure because test already exists + sql!(CREATE TABLE test(value);), + ] + } + } + + let tempdir = tempfile::Builder::new() + .prefix("DbTests") + .tempdir() + .unwrap(); + let _bad_db = open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + } + + /// Test that DB exists but corrupted (causing recreate) + #[gpui::test] + async fn test_db_corruption(cx: &mut gpui::TestAppContext) { + cx.executor().allow_parking(); + + enum CorruptedDB {} + + impl Domain for CorruptedDB { + fn name() -> &'static str { + "db_tests" + } + + fn migrations() -> &'static [&'static str] { + &[sql!(CREATE TABLE test(value);)] + } + } + + enum GoodDB {} + + impl Domain for GoodDB { + fn name() -> &'static str { + "db_tests" //Notice same name + } + + fn migrations() -> &'static [&'static str] { + &[sql!(CREATE TABLE test2(value);)] //But different migration + } + } + + let tempdir = tempfile::Builder::new() + .prefix("DbTests") + .tempdir() + .unwrap(); + { + let corrupt_db = + open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + assert!(corrupt_db.persistent()); + } + + let good_db = + open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + assert!( + good_db.select_row::("SELECT * FROM test2").unwrap()() + .unwrap() + .is_none() + ); + } + + /// Test that DB exists but corrupted (causing recreate) + #[gpui::test(iterations = 30)] + async fn test_simultaneous_db_corruption(cx: &mut gpui::TestAppContext) { + cx.executor().allow_parking(); + + enum CorruptedDB {} + + impl Domain for CorruptedDB { + fn name() -> &'static str { + "db_tests" + } + + fn migrations() -> &'static [&'static str] { + &[sql!(CREATE TABLE test(value);)] + } + } + + enum GoodDB {} + + impl Domain for GoodDB { + fn name() -> &'static str { + "db_tests" //Notice same name + } + + fn migrations() -> &'static [&'static str] { + &[sql!(CREATE TABLE test2(value);)] //But different migration + } + } + + let tempdir = tempfile::Builder::new() + .prefix("DbTests") + .tempdir() + .unwrap(); + { + // Setup the bad database + let corrupt_db = + open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + assert!(corrupt_db.persistent()); + } + + // Try to connect to it a bunch of times at once + let mut guards = vec![]; + for _ in 0..10 { + let tmp_path = tempdir.path().to_path_buf(); + let guard = thread::spawn(move || { + let good_db = smol::block_on(open_db::( + tmp_path.as_path(), + &release_channel::ReleaseChannel::Dev, + )); + assert!( + good_db.select_row::("SELECT * FROM test2").unwrap()() + .unwrap() + .is_none() + ); + }); + + guards.push(guard); + } + + for guard in guards.into_iter() { + assert!(guard.join().is_ok()); + } + } +} diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs new file mode 100644 index 0000000..0b0cdd9 --- /dev/null +++ b/crates/db/src/kvp.rs @@ -0,0 +1,62 @@ +use sqlez_macros::sql; + +use crate::{define_connection, query}; + +define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> = + &[sql!( + CREATE TABLE IF NOT EXISTS kv_store( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ) STRICT; + )]; +); + +impl KeyValueStore { + query! { + pub fn read_kvp(key: &str) -> Result> { + SELECT value FROM kv_store WHERE key = (?) + } + } + + query! { + pub async fn write_kvp(key: String, value: String) -> Result<()> { + INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?)) + } + } + + query! { + pub async fn delete_kvp(key: String) -> Result<()> { + DELETE FROM kv_store WHERE key = (?) + } + } +} + +#[cfg(test)] +mod tests { + use crate::kvp::KeyValueStore; + + #[gpui::test] + async fn test_kvp() { + let db = KeyValueStore(crate::open_test_db("test_kvp").await); + + assert_eq!(db.read_kvp("key-1").unwrap(), None); + + db.write_kvp("key-1".to_string(), "one".to_string()) + .await + .unwrap(); + assert_eq!(db.read_kvp("key-1").unwrap(), Some("one".to_string())); + + db.write_kvp("key-1".to_string(), "one-2".to_string()) + .await + .unwrap(); + assert_eq!(db.read_kvp("key-1").unwrap(), Some("one-2".to_string())); + + db.write_kvp("key-2".to_string(), "two".to_string()) + .await + .unwrap(); + assert_eq!(db.read_kvp("key-2").unwrap(), Some("two".to_string())); + + db.delete_kvp("key-1".to_string()).await.unwrap(); + assert_eq!(db.read_kvp("key-1").unwrap(), None); + } +} diff --git a/crates/db/src/query.rs b/crates/db/src/query.rs new file mode 100644 index 0000000..932db25 --- /dev/null +++ b/crates/db/src/query.rs @@ -0,0 +1,314 @@ +#[macro_export] +macro_rules! query { + ($vis:vis fn $id:ident() -> Result<()> { $($sql:tt)+ }) => { + $vis fn $id(&self) -> $crate::anyhow::Result<()> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.exec(sql_stmt)?().context(::std::format!( + "Error in {}, exec failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt, + )) + } + }; + ($vis:vis async fn $id:ident() -> Result<()> { $($sql:tt)+ }) => { + $vis async fn $id(&self) -> $crate::anyhow::Result<()> { + use $crate::anyhow::Context; + + self.write(|connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.exec(sql_stmt)?().context(::std::format!( + "Error in {}, exec failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => { + $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, exec_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis async fn $id:ident($arg:ident: $arg_type:ty) -> Result<()> { $($sql:tt)+ }) => { + $vis async fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<()> { + use $crate::anyhow::Context; + + self.write(move |connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.exec_bound::<$arg_type>(sql_stmt)?($arg) + .context(::std::format!( + "Error in {}, exec_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => { + $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> { + use $crate::anyhow::Context; + + self.write(move |connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, exec_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident() -> Result> { $($sql:tt)+ }) => { + $vis fn $id(&self) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select::<$return_type>(sql_stmt)?() + .context(::std::format!( + "Error in {}, select_row failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis async fn $id:ident() -> Result> { $($sql:tt)+ }) => { + pub async fn $id(&self) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + self.write(|connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.select::<$return_type>(sql_stmt)?() + .context(::std::format!( + "Error in {}, select_row failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result> { $($sql:tt)+ }) => { + $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, exec_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result> { $($sql:tt)+ }) => { + $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + self.write(|connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, exec_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident() -> Result> { $($sql:tt)+ }) => { + $vis fn $id(&self) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_row::<$return_type>(sql_stmt)?() + .context(::std::format!( + "Error in {}, select_row failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis async fn $id:ident() -> Result> { $($sql:tt)+ }) => { + $vis async fn $id(&self) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + self.write(|connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.select_row::<$return_type>(sql_stmt)?() + .context(::std::format!( + "Error in {}, select_row failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident($arg:ident: $arg_type:ty) -> Result> { $($sql:tt)+ }) => { + $vis fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg) + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + + } + }; + ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result> { $($sql:tt)+ }) => { + $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + + } + }; + ($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result> { $($sql:tt)+ }) => { + $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result> { + use $crate::anyhow::Context; + + + self.write(move |connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident() -> Result<$return_type:ty> { $($sql:tt)+ }) => { + $vis fn $id(&self) -> $crate::anyhow::Result<$return_type> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_row::<$return_type>(sql_stmt)?() + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + ))? + .context(::std::format!( + "Error in {}, select_row_bound expected single row result but found none for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis async fn $id:ident() -> Result<$return_type:ty> { $($sql:tt)+ }) => { + $vis async fn $id(&self) -> $crate::anyhow::Result<$return_type> { + use $crate::anyhow::Context; + + self.write(|connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.select_row::<$return_type>(sql_stmt)?() + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + ))? + .context(::std::format!( + "Error in {}, select_row_bound expected single row result but found none for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; + ($vis:vis fn $id:ident($arg:ident: $arg_type:ty) -> Result<$return_type:ty> { $($sql:tt)+ }) => { + pub fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<$return_type> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg) + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + ))? + .context(::std::format!( + "Error in {}, select_row_bound expected single row result but found none for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<$return_type:ty> { $($sql:tt)+ }) => { + $vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<$return_type> { + use $crate::anyhow::Context; + + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + ))? + .context(::std::format!( + "Error in {}, select_row_bound expected single row result but found none for: {}", + ::std::stringify!($id), + sql_stmt + )) + } + }; + ($vis:vis fn async $id:ident($($arg:ident: $arg_type:ty),+) -> Result<$return_type:ty> { $($sql:tt)+ }) => { + $vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<$return_type> { + use $crate::anyhow::Context; + + + self.write(|connection| { + let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); + + connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+)) + .context(::std::format!( + "Error in {}, select_row_bound failed to execute or parse for: {}", + ::std::stringify!($id), + sql_stmt + ))? + .context(::std::format!( + "Error in {}, select_row_bound expected single row result but found none for: {}", + ::std::stringify!($id), + sql_stmt + )) + }).await + } + }; +} diff --git a/crates/dev_server_projects/Cargo.toml b/crates/dev_server_projects/Cargo.toml new file mode 100644 index 0000000..81d5030 --- /dev/null +++ b/crates/dev_server_projects/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "dev_server_projects" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/dev_server_projects.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +gpui.workspace = true +serde.workspace = true +client.workspace = true +rpc.workspace = true + +[dev-dependencies] +serde_json.workspace = true diff --git a/crates/dev_server_projects/LICENSE-GPL b/crates/dev_server_projects/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/dev_server_projects/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/dev_server_projects/src/dev_server_projects.rs b/crates/dev_server_projects/src/dev_server_projects.rs new file mode 100644 index 0000000..31a7d4e --- /dev/null +++ b/crates/dev_server_projects/src/dev_server_projects.rs @@ -0,0 +1,248 @@ +use anyhow::Result; +use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, SharedString, Task}; +use rpc::{ + proto::{self, DevServerStatus}, + TypedEnvelope, +}; +use std::{collections::HashMap, sync::Arc}; + +use client::{Client, ProjectId}; +pub use client::{DevServerId, DevServerProjectId}; + +pub struct Store { + dev_server_projects: HashMap, + dev_servers: HashMap, + _subscriptions: Vec, + client: Arc, +} + +#[derive(Debug, Clone)] +pub struct DevServerProject { + pub id: DevServerProjectId, + pub project_id: Option, + pub path: SharedString, + pub dev_server_id: DevServerId, +} + +impl From for DevServerProject { + fn from(project: proto::DevServerProject) -> Self { + Self { + id: DevServerProjectId(project.id), + project_id: project.project_id.map(|id| ProjectId(id)), + path: project.path.into(), + dev_server_id: DevServerId(project.dev_server_id), + } + } +} + +#[derive(Debug, Clone)] +pub struct DevServer { + pub id: DevServerId, + pub name: SharedString, + pub ssh_connection_string: Option, + pub status: DevServerStatus, +} + +impl From for DevServer { + fn from(dev_server: proto::DevServer) -> Self { + Self { + id: DevServerId(dev_server.dev_server_id), + status: dev_server.status(), + name: dev_server.name.into(), + ssh_connection_string: dev_server.ssh_connection_string.map(|s| s.into()), + } + } +} + +struct GlobalStore(Model); + +impl Global for GlobalStore {} + +pub fn init(client: Arc, cx: &mut AppContext) { + let store = cx.new_model(|cx| Store::new(client, cx)); + cx.set_global(GlobalStore(store)); +} + +impl Store { + pub fn global(cx: &AppContext) -> Model { + cx.global::().0.clone() + } + + pub fn new(client: Arc, cx: &ModelContext) -> Self { + Self { + dev_server_projects: Default::default(), + dev_servers: Default::default(), + _subscriptions: vec![client + .add_message_handler(cx.weak_model(), Self::handle_dev_server_projects_update)], + client, + } + } + + pub fn projects_for_server(&self, id: DevServerId) -> Vec { + let mut projects: Vec = self + .dev_server_projects + .values() + .filter(|project| project.dev_server_id == id) + .cloned() + .collect(); + projects.sort_by_key(|p| (p.path.clone(), p.id)); + projects + } + + pub fn dev_servers(&self) -> Vec { + let mut dev_servers: Vec = self.dev_servers.values().cloned().collect(); + dev_servers.sort_by_key(|d| (d.status == DevServerStatus::Offline, d.name.clone(), d.id)); + dev_servers + } + + pub fn dev_server(&self, id: DevServerId) -> Option<&DevServer> { + self.dev_servers.get(&id) + } + + pub fn dev_server_status(&self, id: DevServerId) -> DevServerStatus { + self.dev_server(id) + .map(|server| server.status) + .unwrap_or(DevServerStatus::Offline) + } + + pub fn dev_server_projects(&self) -> Vec { + let mut projects: Vec = + self.dev_server_projects.values().cloned().collect(); + projects.sort_by_key(|p| (p.path.clone(), p.id)); + projects + } + + pub fn dev_server_project(&self, id: DevServerProjectId) -> Option<&DevServerProject> { + self.dev_server_projects.get(&id) + } + + pub fn dev_server_for_project(&self, id: DevServerProjectId) -> Option<&DevServer> { + self.dev_server_project(id) + .and_then(|project| self.dev_server(project.dev_server_id)) + } + + async fn handle_dev_server_projects_update( + this: Model, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + this.dev_servers = envelope + .payload + .dev_servers + .into_iter() + .map(|dev_server| (DevServerId(dev_server.dev_server_id), dev_server.into())) + .collect(); + this.dev_server_projects = envelope + .payload + .dev_server_projects + .into_iter() + .map(|project| (DevServerProjectId(project.id), project.into())) + .collect(); + + cx.notify(); + })?; + Ok(()) + } + + pub fn create_dev_server_project( + &mut self, + dev_server_id: DevServerId, + path: String, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::CreateDevServerProject { + dev_server_id: dev_server_id.0, + path, + }) + .await + }) + } + + pub fn create_dev_server( + &mut self, + name: String, + ssh_connection_string: Option, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + let result = client + .request(proto::CreateDevServer { + name, + ssh_connection_string, + }) + .await?; + Ok(result) + }) + } + + pub fn rename_dev_server( + &mut self, + dev_server_id: DevServerId, + name: String, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::RenameDevServer { + dev_server_id: dev_server_id.0, + name, + }) + .await?; + Ok(()) + }) + } + + pub fn regenerate_dev_server_token( + &mut self, + dev_server_id: DevServerId, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::RegenerateDevServerToken { + dev_server_id: dev_server_id.0, + }) + .await + }) + } + + pub fn delete_dev_server( + &mut self, + id: DevServerId, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::DeleteDevServer { + dev_server_id: id.0, + }) + .await?; + Ok(()) + }) + } + + pub fn delete_dev_server_project( + &mut self, + id: DevServerProjectId, + cx: &mut ModelContext, + ) -> Task> { + let client = self.client.clone(); + cx.background_executor().spawn(async move { + client + .request(proto::DeleteDevServerProject { + dev_server_project_id: id.0, + }) + .await?; + Ok(()) + }) + } +} diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml new file mode 100644 index 0000000..48f0544 --- /dev/null +++ b/crates/diagnostics/Cargo.toml @@ -0,0 +1,46 @@ +[package] +name = "diagnostics" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/diagnostics.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +collections.workspace = true +ctor.workspace = true +editor.workspace = true +env_logger.workspace = true +futures.workspace = true +gpui.workspace = true +language.workspace = true +log.workspace = true +lsp.workspace = true +project.workspace = true +rand.workspace = true +schemars.workspace = true +serde.workspace = true +settings.workspace = true +theme.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[dev-dependencies] +client = { workspace = true, features = ["test-support"] } +editor = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +lsp = { workspace = true, features = ["test-support"] } +serde_json.workspace = true +theme = { workspace = true, features = ["test-support"] } +unindent.workspace = true +workspace = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true diff --git a/crates/diagnostics/LICENSE-GPL b/crates/diagnostics/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/diagnostics/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs new file mode 100644 index 0000000..f035746 --- /dev/null +++ b/crates/diagnostics/src/diagnostics.rs @@ -0,0 +1,883 @@ +pub mod items; +mod project_diagnostics_settings; +mod toolbar_controls; + +#[cfg(test)] +mod diagnostics_tests; + +use anyhow::Result; +use collections::{BTreeSet, HashSet}; +use editor::{ + diagnostic_block_renderer, + display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock}, + highlight_diagnostic_message, + scroll::Autoscroll, + Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset, +}; +use futures::{ + channel::mpsc::{self, UnboundedSender}, + StreamExt as _, +}; +use gpui::{ + actions, div, svg, AnyElement, AnyView, AppContext, Context, EventEmitter, FocusHandle, + FocusableView, HighlightStyle, InteractiveElement, IntoElement, Model, ParentElement, Render, + SharedString, Styled, StyledText, Subscription, Task, View, ViewContext, VisualContext, + WeakView, WindowContext, +}; +use language::{ + Bias, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Point, Selection, SelectionGoal, +}; +use lsp::LanguageServerId; +use project::{DiagnosticSummary, Project, ProjectPath}; +use project_diagnostics_settings::ProjectDiagnosticsSettings; +use settings::Settings; +use std::{ + any::{Any, TypeId}, + cmp::Ordering, + mem, + ops::Range, +}; +use theme::ActiveTheme; +pub use toolbar_controls::ToolbarControls; +use ui::{h_flex, prelude::*, Icon, IconName, Label}; +use util::ResultExt; +use workspace::{ + item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams}, + ItemNavHistory, Pane, ToolbarItemLocation, Workspace, +}; + +actions!(diagnostics, [Deploy, ToggleWarnings]); + +pub fn init(cx: &mut AppContext) { + ProjectDiagnosticsSettings::register(cx); + cx.observe_new_views(ProjectDiagnosticsEditor::register) + .detach(); +} + +struct ProjectDiagnosticsEditor { + project: Model, + workspace: WeakView, + focus_handle: FocusHandle, + editor: View, + summary: DiagnosticSummary, + excerpts: Model, + path_states: Vec, + paths_to_update: BTreeSet<(ProjectPath, LanguageServerId)>, + include_warnings: bool, + context: u32, + update_paths_tx: UnboundedSender<(ProjectPath, Option)>, + _update_excerpts_task: Task>, + _subscription: Subscription, +} + +struct PathState { + path: ProjectPath, + diagnostic_groups: Vec, +} + +struct DiagnosticGroupState { + language_server_id: LanguageServerId, + primary_diagnostic: DiagnosticEntry, + primary_excerpt_ix: usize, + excerpts: Vec, + blocks: HashSet, + block_count: usize, +} + +impl EventEmitter for ProjectDiagnosticsEditor {} + +impl Render for ProjectDiagnosticsEditor { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let child = if self.path_states.is_empty() { + div() + .bg(cx.theme().colors().editor_background) + .flex() + .items_center() + .justify_center() + .size_full() + .child(Label::new("No problems in workspace")) + } else { + div().size_full().child(self.editor.clone()) + }; + + div() + .track_focus(&self.focus_handle) + .size_full() + .on_action(cx.listener(Self::toggle_warnings)) + .child(child) + } +} + +impl ProjectDiagnosticsEditor { + fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(Self::deploy); + } + + fn new_with_context( + context: u32, + project_handle: Model, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + let project_event_subscription = + cx.subscribe(&project_handle, |this, project, event, cx| match event { + project::Event::DiskBasedDiagnosticsStarted { .. } => { + cx.notify(); + } + project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { + log::debug!("disk based diagnostics finished for server {language_server_id}"); + this.enqueue_update_stale_excerpts(Some(*language_server_id)); + } + project::Event::DiagnosticsUpdated { + language_server_id, + path, + } => { + this.paths_to_update + .insert((path.clone(), *language_server_id)); + this.summary = project.read(cx).diagnostic_summary(false, cx); + cx.emit(EditorEvent::TitleChanged); + + if this.editor.read(cx).is_focused(cx) || this.focus_handle.is_focused(cx) { + log::debug!("diagnostics updated for server {language_server_id}, path {path:?}. recording change"); + } else { + log::debug!("diagnostics updated for server {language_server_id}, path {path:?}. updating excerpts"); + this.enqueue_update_stale_excerpts(Some(*language_server_id)); + } + } + _ => {} + }); + + let focus_handle = cx.focus_handle(); + cx.on_focus_in(&focus_handle, |this, cx| this.focus_in(cx)) + .detach(); + cx.on_focus_out(&focus_handle, |this, cx| this.focus_out(cx)) + .detach(); + + let excerpts = cx.new_model(|cx| { + MultiBuffer::new( + project_handle.read(cx).replica_id(), + project_handle.read(cx).capability(), + ) + }); + let editor = cx.new_view(|cx| { + let mut editor = + Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), cx); + editor.set_vertical_scroll_margin(5, cx); + editor + }); + cx.subscribe(&editor, |this, _editor, event: &EditorEvent, cx| { + cx.emit(event.clone()); + match event { + EditorEvent::Focused => { + if this.path_states.is_empty() { + cx.focus(&this.focus_handle); + } + } + EditorEvent::Blurred => this.enqueue_update_stale_excerpts(None), + _ => {} + } + }) + .detach(); + + let (update_excerpts_tx, mut update_excerpts_rx) = mpsc::unbounded(); + + let project = project_handle.read(cx); + let mut this = Self { + project: project_handle.clone(), + context, + summary: project.diagnostic_summary(false, cx), + workspace, + excerpts, + focus_handle, + editor, + path_states: Default::default(), + paths_to_update: Default::default(), + include_warnings: ProjectDiagnosticsSettings::get_global(cx).include_warnings, + update_paths_tx: update_excerpts_tx, + _update_excerpts_task: cx.spawn(move |this, mut cx| async move { + while let Some((path, language_server_id)) = update_excerpts_rx.next().await { + if let Some(buffer) = project_handle + .update(&mut cx, |project, cx| project.open_buffer(path.clone(), cx))? + .await + .log_err() + { + this.update(&mut cx, |this, cx| { + this.update_excerpts(path, language_server_id, buffer, cx); + })?; + } + } + anyhow::Ok(()) + }), + _subscription: project_event_subscription, + }; + this.enqueue_update_all_excerpts(cx); + this + } + + fn new( + project_handle: Model, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + Self::new_with_context( + editor::DEFAULT_MULTIBUFFER_CONTEXT, + project_handle, + workspace, + cx, + ) + } + + fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext) { + if let Some(existing) = workspace.item_of_type::(cx) { + workspace.activate_item(&existing, cx); + } else { + let workspace_handle = cx.view().downgrade(); + let diagnostics = cx.new_view(|cx| { + ProjectDiagnosticsEditor::new(workspace.project().clone(), workspace_handle, cx) + }); + workspace.add_item_to_active_pane(Box::new(diagnostics), None, cx); + } + } + + fn toggle_warnings(&mut self, _: &ToggleWarnings, cx: &mut ViewContext) { + self.include_warnings = !self.include_warnings; + self.enqueue_update_all_excerpts(cx); + cx.notify(); + } + + fn focus_in(&mut self, cx: &mut ViewContext) { + if self.focus_handle.is_focused(cx) && !self.path_states.is_empty() { + self.editor.focus_handle(cx).focus(cx) + } + } + + fn focus_out(&mut self, cx: &mut ViewContext) { + if !self.focus_handle.is_focused(cx) && !self.editor.focus_handle(cx).is_focused(cx) { + self.enqueue_update_stale_excerpts(None); + } + } + + /// Enqueue an update of all excerpts. Updates all paths that either + /// currently have diagnostics or are currently present in this view. + fn enqueue_update_all_excerpts(&mut self, cx: &mut ViewContext) { + self.project.update(cx, |project, cx| { + let mut paths = project + .diagnostic_summaries(false, cx) + .map(|(path, _, _)| path) + .collect::>(); + paths.extend(self.path_states.iter().map(|state| state.path.clone())); + for path in paths { + self.update_paths_tx.unbounded_send((path, None)).unwrap(); + } + }); + } + + /// Enqueue an update of the excerpts for any path whose diagnostics are known + /// to have changed. If a language server id is passed, then only the excerpts for + /// that language server's diagnostics will be updated. Otherwise, all stale excerpts + /// will be refreshed. + fn enqueue_update_stale_excerpts(&mut self, language_server_id: Option) { + for (path, server_id) in &self.paths_to_update { + if language_server_id.map_or(true, |id| id == *server_id) { + self.update_paths_tx + .unbounded_send((path.clone(), Some(*server_id))) + .unwrap(); + } + } + } + + fn update_excerpts( + &mut self, + path_to_update: ProjectPath, + server_to_update: Option, + buffer: Model, + cx: &mut ViewContext, + ) { + self.paths_to_update.retain(|(path, server_id)| { + *path != path_to_update + || server_to_update.map_or(false, |to_update| *server_id != to_update) + }); + + let was_empty = self.path_states.is_empty(); + let snapshot = buffer.read(cx).snapshot(); + let path_ix = match self + .path_states + .binary_search_by_key(&&path_to_update, |e| &e.path) + { + Ok(ix) => ix, + Err(ix) => { + self.path_states.insert( + ix, + PathState { + path: path_to_update.clone(), + diagnostic_groups: Default::default(), + }, + ); + ix + } + }; + + let mut prev_excerpt_id = if path_ix > 0 { + let prev_path_last_group = &self.path_states[path_ix - 1] + .diagnostic_groups + .last() + .unwrap(); + *prev_path_last_group.excerpts.last().unwrap() + } else { + ExcerptId::min() + }; + + let path_state = &mut self.path_states[path_ix]; + let mut new_group_ixs = Vec::new(); + let mut blocks_to_add = Vec::new(); + let mut blocks_to_remove = HashSet::default(); + let mut first_excerpt_id = None; + let max_severity = if self.include_warnings { + DiagnosticSeverity::WARNING + } else { + DiagnosticSeverity::ERROR + }; + let excerpts_snapshot = self.excerpts.update(cx, |excerpts, cx| { + let mut old_groups = mem::take(&mut path_state.diagnostic_groups) + .into_iter() + .enumerate() + .peekable(); + let mut new_groups = snapshot + .diagnostic_groups(server_to_update) + .into_iter() + .filter(|(_, group)| { + group.entries[group.primary_ix].diagnostic.severity <= max_severity + }) + .peekable(); + loop { + let mut to_insert = None; + let mut to_remove = None; + let mut to_keep = None; + match (old_groups.peek(), new_groups.peek()) { + (None, None) => break, + (None, Some(_)) => to_insert = new_groups.next(), + (Some((_, old_group)), None) => { + if server_to_update.map_or(true, |id| id == old_group.language_server_id) { + to_remove = old_groups.next(); + } else { + to_keep = old_groups.next(); + } + } + (Some((_, old_group)), Some((new_language_server_id, new_group))) => { + let old_primary = &old_group.primary_diagnostic; + let new_primary = &new_group.entries[new_group.primary_ix]; + match compare_diagnostics(old_primary, new_primary, &snapshot) + .then_with(|| old_group.language_server_id.cmp(new_language_server_id)) + { + Ordering::Less => { + if server_to_update + .map_or(true, |id| id == old_group.language_server_id) + { + to_remove = old_groups.next(); + } else { + to_keep = old_groups.next(); + } + } + Ordering::Equal => { + to_keep = old_groups.next(); + new_groups.next(); + } + Ordering::Greater => to_insert = new_groups.next(), + } + } + } + + if let Some((language_server_id, group)) = to_insert { + let mut group_state = DiagnosticGroupState { + language_server_id, + primary_diagnostic: group.entries[group.primary_ix].clone(), + primary_excerpt_ix: 0, + excerpts: Default::default(), + blocks: Default::default(), + block_count: 0, + }; + let mut pending_range: Option<(Range, usize)> = None; + let mut is_first_excerpt_for_group = true; + for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { + let resolved_entry = entry.map(|e| e.resolve::(&snapshot)); + if let Some((range, start_ix)) = &mut pending_range { + if let Some(entry) = resolved_entry.as_ref() { + if entry.range.start.row <= range.end.row + 1 + self.context * 2 { + range.end = range.end.max(entry.range.end); + continue; + } + } + + let excerpt_start = + Point::new(range.start.row.saturating_sub(self.context), 0); + let excerpt_end = snapshot.clip_point( + Point::new(range.end.row + self.context, u32::MAX), + Bias::Left, + ); + + let excerpt_id = excerpts + .insert_excerpts_after( + prev_excerpt_id, + buffer.clone(), + [ExcerptRange { + context: excerpt_start..excerpt_end, + primary: Some(range.clone()), + }], + cx, + ) + .pop() + .unwrap(); + + prev_excerpt_id = excerpt_id; + first_excerpt_id.get_or_insert_with(|| prev_excerpt_id); + group_state.excerpts.push(excerpt_id); + let header_position = (excerpt_id, language::Anchor::MIN); + + if is_first_excerpt_for_group { + is_first_excerpt_for_group = false; + let mut primary = + group.entries[group.primary_ix].diagnostic.clone(); + primary.message = + primary.message.split('\n').next().unwrap().to_string(); + group_state.block_count += 1; + blocks_to_add.push(BlockProperties { + position: header_position, + height: 2, + style: BlockStyle::Sticky, + render: diagnostic_header_renderer(primary), + disposition: BlockDisposition::Above, + }); + } + + for entry in &group.entries[*start_ix..ix] { + let mut diagnostic = entry.diagnostic.clone(); + if diagnostic.is_primary { + group_state.primary_excerpt_ix = group_state.excerpts.len() - 1; + diagnostic.message = + entry.diagnostic.message.split('\n').skip(1).collect(); + } + + if !diagnostic.message.is_empty() { + group_state.block_count += 1; + blocks_to_add.push(BlockProperties { + position: (excerpt_id, entry.range.start), + height: diagnostic.message.matches('\n').count() as u8 + 1, + style: BlockStyle::Fixed, + render: diagnostic_block_renderer(diagnostic, true), + disposition: BlockDisposition::Below, + }); + } + } + + pending_range.take(); + } + + if let Some(entry) = resolved_entry { + pending_range = Some((entry.range.clone(), ix)); + } + } + + new_group_ixs.push(path_state.diagnostic_groups.len()); + path_state.diagnostic_groups.push(group_state); + } else if let Some((_, group_state)) = to_remove { + excerpts.remove_excerpts(group_state.excerpts.iter().copied(), cx); + blocks_to_remove.extend(group_state.blocks.iter().copied()); + } else if let Some((_, group_state)) = to_keep { + prev_excerpt_id = *group_state.excerpts.last().unwrap(); + first_excerpt_id.get_or_insert_with(|| prev_excerpt_id); + path_state.diagnostic_groups.push(group_state); + } + } + + excerpts.snapshot(cx) + }); + + self.editor.update(cx, |editor, cx| { + editor.remove_blocks(blocks_to_remove, None, cx); + let block_ids = editor.insert_blocks( + blocks_to_add.into_iter().flat_map(|block| { + let (excerpt_id, text_anchor) = block.position; + Some(BlockProperties { + position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?, + height: block.height, + style: block.style, + render: block.render, + disposition: block.disposition, + }) + }), + Some(Autoscroll::fit()), + cx, + ); + + let mut block_ids = block_ids.into_iter(); + for ix in new_group_ixs { + let group_state = &mut path_state.diagnostic_groups[ix]; + group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect(); + } + }); + + if path_state.diagnostic_groups.is_empty() { + self.path_states.remove(path_ix); + } + + self.editor.update(cx, |editor, cx| { + let groups; + let mut selections; + let new_excerpt_ids_by_selection_id; + if was_empty { + groups = self.path_states.first()?.diagnostic_groups.as_slice(); + new_excerpt_ids_by_selection_id = [(0, ExcerptId::min())].into_iter().collect(); + selections = vec![Selection { + id: 0, + start: 0, + end: 0, + reversed: false, + goal: SelectionGoal::None, + }]; + } else { + groups = self.path_states.get(path_ix)?.diagnostic_groups.as_slice(); + new_excerpt_ids_by_selection_id = + editor.change_selections(Some(Autoscroll::fit()), cx, |s| s.refresh()); + selections = editor.selections.all::(cx); + } + + // If any selection has lost its position, move it to start of the next primary diagnostic. + let snapshot = editor.snapshot(cx); + for selection in &mut selections { + if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) { + let group_ix = match groups.binary_search_by(|probe| { + probe + .excerpts + .last() + .unwrap() + .cmp(new_excerpt_id, &snapshot.buffer_snapshot) + }) { + Ok(ix) | Err(ix) => ix, + }; + if let Some(group) = groups.get(group_ix) { + if let Some(offset) = excerpts_snapshot + .anchor_in_excerpt( + group.excerpts[group.primary_excerpt_ix], + group.primary_diagnostic.range.start, + ) + .map(|anchor| anchor.to_offset(&excerpts_snapshot)) + { + selection.start = offset; + selection.end = offset; + } + } + } + } + editor.change_selections(None, cx, |s| { + s.select(selections); + }); + Some(()) + }); + + if self.path_states.is_empty() { + if self.editor.focus_handle(cx).is_focused(cx) { + cx.focus(&self.focus_handle); + } + } else if self.focus_handle.is_focused(cx) { + let focus_handle = self.editor.focus_handle(cx); + cx.focus(&focus_handle); + } + + #[cfg(test)] + self.check_invariants(cx); + + cx.notify(); + } + + #[cfg(test)] + fn check_invariants(&self, cx: &mut ViewContext) { + let mut excerpts = Vec::new(); + for (id, buffer, _) in self.excerpts.read(cx).snapshot(cx).excerpts() { + if let Some(file) = buffer.file() { + excerpts.push((id, file.path().clone())); + } + } + + let mut prev_path = None; + for (_, path) in &excerpts { + if let Some(prev_path) = prev_path { + if path < prev_path { + panic!("excerpts are not sorted by path {:?}", excerpts); + } + } + prev_path = Some(path); + } + } +} + +impl FocusableView for ProjectDiagnosticsEditor { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Item for ProjectDiagnosticsEditor { + type Event = EditorEvent; + + fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { + Editor::to_item_events(event, f) + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor.update(cx, |editor, cx| editor.deactivated(cx)); + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, cx)) + } + + fn tab_tooltip_text(&self, _: &AppContext) -> Option { + Some("Project Diagnostics".into()) + } + + fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement { + if self.summary.error_count == 0 && self.summary.warning_count == 0 { + Label::new("No problems") + .color(if params.selected { + Color::Default + } else { + Color::Muted + }) + .into_any_element() + } else { + h_flex() + .gap_1() + .when(self.summary.error_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child(Label::new(self.summary.error_count.to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .when(self.summary.warning_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning)) + .child(Label::new(self.summary.warning_count.to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .into_any_element() + } + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + Some("project diagnostics") + } + + fn for_each_project_item( + &self, + cx: &AppContext, + f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item), + ) { + self.editor.for_each_project_item(cx, f) + } + + fn is_singleton(&self, _: &AppContext) -> bool { + false + } + + fn set_nav_history(&mut self, nav_history: ItemNavHistory, cx: &mut ViewContext) { + self.editor.update(cx, |editor, _| { + editor.set_nav_history(Some(nav_history)); + }); + } + + fn clone_on_split( + &self, + _workspace_id: workspace::WorkspaceId, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(|cx| { + ProjectDiagnosticsEditor::new(self.project.clone(), self.workspace.clone(), cx) + })) + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).is_dirty(cx) + } + + fn has_conflict(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).has_conflict(cx) + } + + fn can_save(&self, _: &AppContext) -> bool { + true + } + + fn save( + &mut self, + format: bool, + project: Model, + cx: &mut ViewContext, + ) -> Task> { + self.editor.save(format, project, cx) + } + + fn save_as( + &mut self, + _: Model, + _: ProjectPath, + _: &mut ViewContext, + ) -> Task> { + unreachable!() + } + + fn reload(&mut self, project: Model, cx: &mut ViewContext) -> Task> { + self.editor.reload(project, cx) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a View, + _: &'a AppContext, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } + + fn breadcrumb_location(&self) -> ToolbarItemLocation { + ToolbarItemLocation::PrimaryLeft + } + + fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option> { + self.editor.breadcrumbs(theme, cx) + } + + fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + } + + fn serialized_item_kind() -> Option<&'static str> { + Some("diagnostics") + } + + fn deserialize( + project: Model, + workspace: WeakView, + _workspace_id: workspace::WorkspaceId, + _item_id: workspace::ItemId, + cx: &mut ViewContext, + ) -> Task>> { + Task::ready(Ok(cx.new_view(|cx| Self::new(project, workspace, cx)))) + } +} + +fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock { + let (message, code_ranges) = highlight_diagnostic_message(&diagnostic); + let message: SharedString = message; + Box::new(move |cx| { + let highlight_style: HighlightStyle = cx.theme().colors().text_accent.into(); + h_flex() + .id("diagnostic header") + .py_2() + .pl_10() + .pr_5() + .w_full() + .justify_between() + .gap_2() + .child( + h_flex() + .gap_3() + .map(|stack| { + stack.child( + svg() + .size(cx.text_style().font_size) + .flex_none() + .map(|icon| { + if diagnostic.severity == DiagnosticSeverity::ERROR { + icon.path(IconName::XCircle.path()) + .text_color(Color::Error.color(cx)) + } else { + icon.path(IconName::ExclamationTriangle.path()) + .text_color(Color::Warning.color(cx)) + } + }), + ) + }) + .child( + h_flex() + .gap_1() + .child( + StyledText::new(message.clone()).with_highlights( + &cx.text_style(), + code_ranges + .iter() + .map(|range| (range.clone(), highlight_style)), + ), + ) + .when_some(diagnostic.code.as_ref(), |stack, code| { + stack.child( + div() + .child(SharedString::from(format!("({code})"))) + .text_color(cx.theme().colors().text_muted), + ) + }), + ), + ) + .child( + h_flex() + .gap_1() + .when_some(diagnostic.source.as_ref(), |stack, source| { + stack.child( + div() + .child(SharedString::from(source.clone())) + .text_color(cx.theme().colors().text_muted), + ) + }), + ) + .into_any_element() + }) +} + +fn compare_diagnostics( + old: &DiagnosticEntry, + new: &DiagnosticEntry, + snapshot: &language::BufferSnapshot, +) -> Ordering { + use language::ToOffset; + // The old diagnostics may point to a previously open Buffer for this file. + if !old.range.start.is_valid(snapshot) { + return Ordering::Greater; + } + old.range + .start + .to_offset(snapshot) + .cmp(&new.range.start.to_offset(snapshot)) + .then_with(|| { + old.range + .end + .to_offset(snapshot) + .cmp(&new.range.end.to_offset(snapshot)) + }) + .then_with(|| old.diagnostic.message.cmp(&new.diagnostic.message)) +} diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs new file mode 100644 index 0000000..f456020 --- /dev/null +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -0,0 +1,1014 @@ +use super::*; +use collections::HashMap; +use editor::{ + display_map::{BlockContext, DisplayRow, TransformBlock}, + DisplayPoint, GutterDimensions, +}; +use gpui::{px, AvailableSpace, Stateful, TestAppContext, VisualTestContext}; +use language::{ + Diagnostic, DiagnosticEntry, DiagnosticSeverity, OffsetRangeExt, PointUtf16, Rope, Unclipped, +}; +use pretty_assertions::assert_eq; +use project::FakeFs; +use rand::{rngs::StdRng, seq::IteratorRandom as _, Rng}; +use serde_json::json; +use settings::SettingsStore; +use std::{ + env, + path::{Path, PathBuf}, +}; +use unindent::Unindent as _; +use util::{post_inc, RandomCharIter}; + +#[ctor::ctor] +fn init_logger() { + if env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} + +#[gpui::test] +async fn test_diagnostics(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + "consts.rs": " + const a: i32 = 'a'; + const b: i32 = c; + " + .unindent(), + + "main.rs": " + fn main() { + let x = vec![]; + let y = vec![]; + a(x); + b(y); + // comment 1 + // comment 2 + c(y); + d(x); + } + " + .unindent(), + }), + ) + .await; + + let language_server_id = LanguageServerId(0); + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*window, cx); + let workspace = window.root(cx).unwrap(); + + // Create some diagnostics + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + language_server_id, + PathBuf::from("/test/main.rs"), + None, + vec![ + DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 8))..Unclipped(PointUtf16::new(1, 9)), + diagnostic: Diagnostic { + message: + "move occurs because `x` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(2, 8))..Unclipped(PointUtf16::new(2, 9)), + diagnostic: Diagnostic { + message: + "move occurs because `y` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(3, 6))..Unclipped(PointUtf16::new(3, 7)), + diagnostic: Diagnostic { + message: "value moved here".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(4, 6))..Unclipped(PointUtf16::new(4, 7)), + diagnostic: Diagnostic { + message: "value moved here".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(7, 6))..Unclipped(PointUtf16::new(7, 7)), + diagnostic: Diagnostic { + message: "use of moved value\nvalue used here after move".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(8, 6))..Unclipped(PointUtf16::new(8, 7)), + diagnostic: Diagnostic { + message: "use of moved value\nvalue used here after move".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + ], + cx, + ) + .unwrap(); + }); + + // Open the project diagnostics view while there are already diagnostics. + let view = window.build_view(cx, |cx| { + ProjectDiagnosticsEditor::new_with_context(1, project.clone(), workspace.downgrade(), cx) + }); + let editor = view.update(cx, |view, _| view.editor.clone()); + + view.next_notification(cx).await; + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + (DisplayRow(15), "collapsed context".into()), + (DisplayRow(16), "diagnostic header".into()), + (DisplayRow(25), "collapsed context".into()), + ] + ); + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + // + // main.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + "\n", // context ellipsis + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); + + // Cursor is at the first diagnostic + editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(12), 6)..DisplayPoint::new(DisplayRow(12), 6)] + ); + }); + + // Diagnostics are added for another earlier path. + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(language_server_id, cx); + project + .update_diagnostic_entries( + language_server_id, + PathBuf::from("/test/consts.rs"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)), + diagnostic: Diagnostic { + message: "mismatched types\nexpected `usize`, found `char`".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(language_server_id, cx); + }); + + view.next_notification(cx).await; + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + (DisplayRow(7), "path header block".into()), + (DisplayRow(9), "diagnostic header".into()), + (DisplayRow(22), "collapsed context".into()), + (DisplayRow(23), "diagnostic header".into()), + (DisplayRow(32), "collapsed context".into()), + ] + ); + + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + // + // consts.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "const a: i32 = 'a';\n", + "\n", // supporting diagnostic + "const b: i32 = c;\n", + // + // main.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // filename + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); + + // Cursor keeps its position. + editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(19), 6)..DisplayPoint::new(DisplayRow(19), 6)] + ); + }); + + // Diagnostics are added to the first path + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(language_server_id, cx); + project + .update_diagnostic_entries( + language_server_id, + PathBuf::from("/test/consts.rs"), + None, + vec![ + DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)), + diagnostic: Diagnostic { + message: "mismatched types\nexpected `usize`, found `char`".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 15))..Unclipped(PointUtf16::new(1, 15)), + diagnostic: Diagnostic { + message: "unresolved name `c`".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + ], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(language_server_id, cx); + }); + + view.next_notification(cx).await; + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + (DisplayRow(7), "collapsed context".into()), + (DisplayRow(8), "diagnostic header".into()), + (DisplayRow(13), "path header block".into()), + (DisplayRow(15), "diagnostic header".into()), + (DisplayRow(28), "collapsed context".into()), + (DisplayRow(29), "diagnostic header".into()), + (DisplayRow(38), "collapsed context".into()), + ] + ); + + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + // + // consts.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "const a: i32 = 'a';\n", + "\n", // supporting diagnostic + "const b: i32 = c;\n", + "\n", // context ellipsis + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "const a: i32 = 'a';\n", + "const b: i32 = c;\n", + "\n", // supporting diagnostic + // + // main.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + "\n", // context ellipsis + // diagnostic group 2 + "\n", // primary message + "\n", // filename + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); +} + +#[gpui::test] +async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + "main.js": " + a(); + b(); + c(); + d(); + e(); + ".unindent() + }), + ) + .await; + + let server_id_1 = LanguageServerId(100); + let server_id_2 = LanguageServerId(101); + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*window, cx); + let workspace = window.root(cx).unwrap(); + + let view = window.build_view(cx, |cx| { + ProjectDiagnosticsEditor::new_with_context(1, project.clone(), workspace.downgrade(), cx) + }); + let editor = view.update(cx, |view, _| view.editor.clone()); + + // Two language servers start updating diagnostics + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(server_id_1, cx); + project.disk_based_diagnostics_started(server_id_2, cx); + project + .update_diagnostic_entries( + server_id_1, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)), + diagnostic: Diagnostic { + message: "error 1".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + }); + + // The first language server finishes + project.update(cx, |project, cx| { + project.disk_based_diagnostics_finished(server_id_1, cx); + }); + + // Only the first language server's diagnostics are shown. + cx.executor().run_until_parked(); + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + ] + ); + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // + "b();", + ) + ); + + // The second language server finishes + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)), + diagnostic: Diagnostic { + message: "warning 1".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 2, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_2, cx); + }); + + // Both language server's diagnostics are shown. + cx.executor().run_until_parked(); + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + (DisplayRow(6), "collapsed context".into()), + (DisplayRow(7), "diagnostic header".into()), + ] + ); + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // location + "b();\n", // + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "a();\n", // context + "b();\n", // + "c();", // context + ) + ); + + // Both language servers start updating diagnostics, and the first server finishes. + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(server_id_1, cx); + project.disk_based_diagnostics_started(server_id_2, cx); + project + .update_diagnostic_entries( + server_id_1, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)), + diagnostic: Diagnostic { + message: "warning 2".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.rs"), + None, + vec![], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_1, cx); + }); + + // Only the first language server's diagnostics are updated. + cx.executor().run_until_parked(); + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + (DisplayRow(7), "collapsed context".into()), + (DisplayRow(8), "diagnostic header".into()), + ] + ); + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // location + "b();\n", // + "c();\n", // context + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "b();\n", // context + "c();\n", // + "d();", // context + ) + ); + + // The second language server finishes. + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)), + diagnostic: Diagnostic { + message: "warning 2".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_2, cx); + }); + + // Both language servers' diagnostics are updated. + cx.executor().run_until_parked(); + assert_eq!( + editor_blocks(&editor, cx), + [ + (DisplayRow(0), "path header block".into()), + (DisplayRow(2), "diagnostic header".into()), + (DisplayRow(7), "collapsed context".into()), + (DisplayRow(8), "diagnostic header".into()), + ] + ); + assert_eq!( + editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "b();\n", // location + "c();\n", // + "d();\n", // context + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "c();\n", // context + "d();\n", // + "e();", // context + ) + ); +} + +#[gpui::test(iterations = 20)] +async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) { + init_test(cx); + + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/test", json!({})).await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*window, cx); + let workspace = window.root(cx).unwrap(); + + let mutated_view = window.build_view(cx, |cx| { + ProjectDiagnosticsEditor::new_with_context(1, project.clone(), workspace.downgrade(), cx) + }); + + workspace.update(cx, |workspace, cx| { + workspace.add_item_to_center(Box::new(mutated_view.clone()), cx); + }); + mutated_view.update(cx, |view, cx| { + assert!(view.focus_handle.is_focused(cx)); + }); + + let mut next_group_id = 0; + let mut next_filename = 0; + let mut language_server_ids = vec![LanguageServerId(0)]; + let mut updated_language_servers = HashSet::default(); + let mut current_diagnostics: HashMap< + (PathBuf, LanguageServerId), + Vec>>, + > = Default::default(); + + for _ in 0..operations { + match rng.gen_range(0..100) { + // language server completes its diagnostic check + 0..=20 if !updated_language_servers.is_empty() => { + let server_id = *updated_language_servers.iter().choose(&mut rng).unwrap(); + log::info!("finishing diagnostic check for language server {server_id}"); + project.update(cx, |project, cx| { + project.disk_based_diagnostics_finished(server_id, cx) + }); + + if rng.gen_bool(0.5) { + cx.run_until_parked(); + } + } + + // language server updates diagnostics + _ => { + let (path, server_id, diagnostics) = + match current_diagnostics.iter_mut().choose(&mut rng) { + // update existing set of diagnostics + Some(((path, server_id), diagnostics)) if rng.gen_bool(0.5) => { + (path.clone(), *server_id, diagnostics) + } + + // insert a set of diagnostics for a new path + _ => { + let path: PathBuf = + format!("/test/{}.rs", post_inc(&mut next_filename)).into(); + let len = rng.gen_range(128..256); + let content = + RandomCharIter::new(&mut rng).take(len).collect::(); + fs.insert_file(&path, content.into_bytes()).await; + + let server_id = match language_server_ids.iter().choose(&mut rng) { + Some(server_id) if rng.gen_bool(0.5) => *server_id, + _ => { + let id = LanguageServerId(language_server_ids.len()); + language_server_ids.push(id); + id + } + }; + + ( + path.clone(), + server_id, + current_diagnostics + .entry((path, server_id)) + .or_insert(vec![]), + ) + } + }; + + updated_language_servers.insert(server_id); + + project.update(cx, |project, cx| { + log::info!("updating diagnostics. language server {server_id} path {path:?}"); + randomly_update_diagnostics_for_path( + &fs, + &path, + diagnostics, + &mut next_group_id, + &mut rng, + ); + project + .update_diagnostic_entries(server_id, path, None, diagnostics.clone(), cx) + .unwrap() + }); + + cx.run_until_parked(); + } + } + } + + log::info!("updating mutated diagnostics view"); + mutated_view.update(cx, |view, _| view.enqueue_update_stale_excerpts(None)); + cx.run_until_parked(); + + log::info!("constructing reference diagnostics view"); + let reference_view = window.build_view(cx, |cx| { + ProjectDiagnosticsEditor::new_with_context(1, project.clone(), workspace.downgrade(), cx) + }); + cx.run_until_parked(); + + let mutated_excerpts = get_diagnostics_excerpts(&mutated_view, cx); + let reference_excerpts = get_diagnostics_excerpts(&reference_view, cx); + assert_eq!(mutated_excerpts, reference_excerpts); +} + +fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings = SettingsStore::test(cx); + cx.set_global(settings); + theme::init(theme::LoadThemes::JustBase, cx); + language::init(cx); + client::init_settings(cx); + workspace::init_settings(cx); + Project::init_settings(cx); + crate::init(cx); + editor::init(cx); + }); +} + +#[derive(Debug, PartialEq, Eq)] +struct ExcerptInfo { + path: PathBuf, + range: ExcerptRange, + group_id: usize, + primary: bool, + language_server: LanguageServerId, +} + +fn get_diagnostics_excerpts( + view: &View, + cx: &mut VisualTestContext, +) -> Vec { + view.update(cx, |view, cx| { + let mut result = vec![]; + let mut excerpt_indices_by_id = HashMap::default(); + view.excerpts.update(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + for (id, buffer, range) in snapshot.excerpts() { + excerpt_indices_by_id.insert(id, result.len()); + result.push(ExcerptInfo { + path: buffer.file().unwrap().path().to_path_buf(), + range: ExcerptRange { + context: range.context.to_point(&buffer), + primary: range.primary.map(|range| range.to_point(&buffer)), + }, + group_id: usize::MAX, + primary: false, + language_server: LanguageServerId(0), + }); + } + }); + + for state in &view.path_states { + for group in &state.diagnostic_groups { + for (ix, excerpt_id) in group.excerpts.iter().enumerate() { + let excerpt_ix = excerpt_indices_by_id[excerpt_id]; + let excerpt = &mut result[excerpt_ix]; + excerpt.group_id = group.primary_diagnostic.diagnostic.group_id; + excerpt.language_server = group.language_server_id; + excerpt.primary = ix == group.primary_excerpt_ix; + } + } + } + + result + }) +} + +fn randomly_update_diagnostics_for_path( + fs: &FakeFs, + path: &Path, + diagnostics: &mut Vec>>, + next_group_id: &mut usize, + rng: &mut impl Rng, +) { + let file_content = fs.read_file_sync(path).unwrap(); + let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref()); + + let mut group_ids = diagnostics + .iter() + .map(|d| d.diagnostic.group_id) + .collect::>(); + + let mutation_count = rng.gen_range(1..=3); + for _ in 0..mutation_count { + if rng.gen_bool(0.5) && !group_ids.is_empty() { + let group_id = *group_ids.iter().choose(rng).unwrap(); + log::info!(" removing diagnostic group {group_id}"); + diagnostics.retain(|d| d.diagnostic.group_id != group_id); + group_ids.remove(&group_id); + } else { + let group_id = *next_group_id; + *next_group_id += 1; + + let mut new_diagnostics = vec![random_diagnostic(rng, &file_text, group_id, true)]; + for _ in 0..rng.gen_range(0..=1) { + new_diagnostics.push(random_diagnostic(rng, &file_text, group_id, false)); + } + + let ix = rng.gen_range(0..=diagnostics.len()); + log::info!( + " inserting diagnostic group {group_id} at index {ix}. ranges: {:?}", + new_diagnostics + .iter() + .map(|d| (d.range.start.0, d.range.end.0)) + .collect::>() + ); + diagnostics.splice(ix..ix, new_diagnostics); + } + } +} + +fn random_diagnostic( + rng: &mut impl Rng, + file_text: &Rope, + group_id: usize, + is_primary: bool, +) -> DiagnosticEntry> { + // Intentionally allow erroneous ranges some of the time (that run off the end of the file), + // because language servers can potentially give us those, and we should handle them gracefully. + const ERROR_MARGIN: usize = 10; + + let start = rng.gen_range(0..file_text.len().saturating_add(ERROR_MARGIN)); + let end = rng.gen_range(start..file_text.len().saturating_add(ERROR_MARGIN)); + let range = Range { + start: Unclipped(file_text.offset_to_point_utf16(start)), + end: Unclipped(file_text.offset_to_point_utf16(end)), + }; + let severity = if rng.gen_bool(0.5) { + DiagnosticSeverity::WARNING + } else { + DiagnosticSeverity::ERROR + }; + let message = format!("diagnostic group {group_id}"); + + DiagnosticEntry { + range, + diagnostic: Diagnostic { + source: None, // (optional) service that created the diagnostic + code: None, // (optional) machine-readable code that identifies the diagnostic + severity, + message, + group_id, + is_primary, + is_disk_based: false, + is_unnecessary: false, + }, + } +} + +fn editor_blocks( + editor: &View, + cx: &mut VisualTestContext, +) -> Vec<(DisplayRow, SharedString)> { + let mut blocks = Vec::new(); + cx.draw(gpui::Point::default(), AvailableSpace::min_size(), |cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + blocks.extend( + snapshot + .blocks_in_range(DisplayRow(0)..snapshot.max_point().row()) + .enumerate() + .filter_map(|(ix, (row, block))| { + let name: SharedString = match block { + TransformBlock::Custom(block) => { + let mut element = block.render(&mut BlockContext { + context: cx, + anchor_x: px(0.), + gutter_dimensions: &GutterDimensions::default(), + line_height: px(0.), + em_width: px(0.), + max_width: px(0.), + block_id: ix, + editor_style: &editor::EditorStyle::default(), + }); + let element = element.downcast_mut::>().unwrap(); + element + .interactivity() + .element_id + .clone()? + .try_into() + .ok()? + } + + TransformBlock::ExcerptHeader { + starts_new_buffer, .. + } => { + if *starts_new_buffer { + "path header block".into() + } else { + "collapsed context".into() + } + } + }; + + Some((row, name)) + }), + ) + }); + + div().into_any() + }); + blocks +} diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs new file mode 100644 index 0000000..715da22 --- /dev/null +++ b/crates/diagnostics/src/items.rs @@ -0,0 +1,211 @@ +use std::time::Duration; + +use editor::Editor; +use gpui::{ + percentage, rems, Animation, AnimationExt, EventEmitter, IntoElement, ParentElement, Render, + Styled, Subscription, Transformation, View, ViewContext, WeakView, +}; +use language::Diagnostic; +use ui::{h_flex, prelude::*, Button, ButtonLike, Color, Icon, IconName, Label, Tooltip}; +use workspace::{item::ItemHandle, StatusItemView, ToolbarItemEvent, Workspace}; + +use crate::{Deploy, ProjectDiagnosticsEditor}; + +pub struct DiagnosticIndicator { + summary: project::DiagnosticSummary, + active_editor: Option>, + workspace: WeakView, + current_diagnostic: Option, + _observe_active_editor: Option, +} + +impl Render for DiagnosticIndicator { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let diagnostic_indicator = match (self.summary.error_count, self.summary.warning_count) { + (0, 0) => h_flex().map(|this| { + this.child( + Icon::new(IconName::Check) + .size(IconSize::Small) + .color(Color::Default), + ) + }), + (0, warning_count) => h_flex() + .gap_1() + .child( + Icon::new(IconName::ExclamationTriangle) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child(Label::new(warning_count.to_string()).size(LabelSize::Small)), + (error_count, 0) => h_flex() + .gap_1() + .child( + Icon::new(IconName::XCircle) + .size(IconSize::Small) + .color(Color::Error), + ) + .child(Label::new(error_count.to_string()).size(LabelSize::Small)), + (error_count, warning_count) => h_flex() + .gap_1() + .child( + Icon::new(IconName::XCircle) + .size(IconSize::Small) + .color(Color::Error), + ) + .child(Label::new(error_count.to_string()).size(LabelSize::Small)) + .child( + Icon::new(IconName::ExclamationTriangle) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child(Label::new(warning_count.to_string()).size(LabelSize::Small)), + }; + + let has_in_progress_checks = self + .workspace + .upgrade() + .and_then(|workspace| { + workspace + .read(cx) + .project() + .read(cx) + .language_servers_running_disk_based_diagnostics() + .next() + }) + .is_some(); + + let status = if has_in_progress_checks { + Some( + h_flex() + .gap_2() + .child( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| { + icon.transform(Transformation::rotate(percentage(delta))) + }, + ), + ) + .child( + Label::new("Checking…") + .size(LabelSize::Small) + .into_any_element(), + ) + .into_any_element(), + ) + } else if let Some(diagnostic) = &self.current_diagnostic { + let message = diagnostic.message.split('\n').next().unwrap().to_string(); + Some( + Button::new("diagnostic_message", message) + .label_size(LabelSize::Small) + .tooltip(|cx| { + Tooltip::for_action("Next Diagnostic", &editor::actions::GoToDiagnostic, cx) + }) + .on_click(cx.listener(|this, _, cx| { + this.go_to_next_diagnostic(cx); + })) + .into_any_element(), + ) + } else { + None + }; + + h_flex() + .h(rems(1.375)) + .gap_2() + .child( + ButtonLike::new("diagnostic-indicator") + .child(diagnostic_indicator) + .tooltip(|cx| Tooltip::for_action("Project Diagnostics", &Deploy, cx)) + .on_click(cx.listener(|this, _, cx| { + if let Some(workspace) = this.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + ProjectDiagnosticsEditor::deploy(workspace, &Default::default(), cx) + }) + } + })), + ) + .children(status) + } +} + +impl DiagnosticIndicator { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let project = workspace.project(); + cx.subscribe(project, |this, project, event, cx| match event { + project::Event::DiskBasedDiagnosticsStarted { .. } => { + cx.notify(); + } + + project::Event::DiskBasedDiagnosticsFinished { .. } + | project::Event::LanguageServerRemoved(_) => { + this.summary = project.read(cx).diagnostic_summary(false, cx); + cx.notify(); + } + + project::Event::DiagnosticsUpdated { .. } => { + this.summary = project.read(cx).diagnostic_summary(false, cx); + cx.notify(); + } + + _ => {} + }) + .detach(); + + Self { + summary: project.read(cx).diagnostic_summary(false, cx), + active_editor: None, + workspace: workspace.weak_handle(), + current_diagnostic: None, + _observe_active_editor: None, + } + } + + fn go_to_next_diagnostic(&mut self, cx: &mut ViewContext) { + if let Some(editor) = self.active_editor.as_ref().and_then(|e| e.upgrade()) { + editor.update(cx, |editor, cx| { + editor.go_to_diagnostic_impl(editor::Direction::Next, cx); + }) + } + } + + fn update(&mut self, editor: View, cx: &mut ViewContext) { + let editor = editor.read(cx); + let buffer = editor.buffer().read(cx); + let cursor_position = editor.selections.newest::(cx).head(); + let new_diagnostic = buffer + .snapshot(cx) + .diagnostics_in_range::<_, usize>(cursor_position..cursor_position, false) + .filter(|entry| !entry.range.is_empty()) + .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) + .map(|entry| entry.diagnostic); + if new_diagnostic != self.current_diagnostic { + self.current_diagnostic = new_diagnostic; + cx.notify(); + } + } +} + +impl EventEmitter for DiagnosticIndicator {} + +impl StatusItemView for DiagnosticIndicator { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + cx: &mut ViewContext, + ) { + if let Some(editor) = active_pane_item.and_then(|item| item.downcast::()) { + self.active_editor = Some(editor.downgrade()); + self._observe_active_editor = Some(cx.observe(&editor, Self::update)); + self.update(editor, cx); + } else { + self.active_editor = None; + self.current_diagnostic = None; + self._observe_active_editor = None; + } + cx.notify(); + } +} diff --git a/crates/diagnostics/src/project_diagnostics_settings.rs b/crates/diagnostics/src/project_diagnostics_settings.rs new file mode 100644 index 0000000..55879d0 --- /dev/null +++ b/crates/diagnostics/src/project_diagnostics_settings.rs @@ -0,0 +1,28 @@ +use anyhow::Result; +use gpui::AppContext; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug)] +pub struct ProjectDiagnosticsSettings { + pub include_warnings: bool, +} + +/// Diagnostics configuration. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct ProjectDiagnosticsSettingsContent { + /// Whether to show warnings or not by default. + /// + /// Default: true + include_warnings: Option, +} + +impl Settings for ProjectDiagnosticsSettings { + const KEY: Option<&'static str> = Some("diagnostics"); + type FileContent = ProjectDiagnosticsSettingsContent; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + sources.json_merge() + } +} diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs new file mode 100644 index 0000000..7f4deba --- /dev/null +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -0,0 +1,96 @@ +use crate::ProjectDiagnosticsEditor; +use gpui::{EventEmitter, ParentElement, Render, ViewContext, WeakView}; +use ui::prelude::*; +use ui::{IconButton, IconName, Tooltip}; +use workspace::{item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView}; + +pub struct ToolbarControls { + editor: Option>, +} + +impl Render for ToolbarControls { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let mut include_warnings = false; + let mut has_stale_excerpts = false; + let mut is_updating = false; + + if let Some(editor) = self.editor.as_ref().and_then(|editor| editor.upgrade()) { + let editor = editor.read(cx); + + include_warnings = editor.include_warnings; + has_stale_excerpts = !editor.paths_to_update.is_empty(); + is_updating = editor.update_paths_tx.len() > 0 + || editor + .project + .read(cx) + .language_servers_running_disk_based_diagnostics() + .next() + .is_some(); + } + + let tooltip = if include_warnings { + "Exclude Warnings" + } else { + "Include Warnings" + }; + + h_flex() + .when(has_stale_excerpts, |div| { + div.child( + IconButton::new("update-excerpts", IconName::Update) + .icon_color(Color::Info) + .disabled(is_updating) + .tooltip(move |cx| Tooltip::text("Update excerpts", cx)) + .on_click(cx.listener(|this, _, cx| { + if let Some(editor) = + this.editor.as_ref().and_then(|editor| editor.upgrade()) + { + editor.update(cx, |editor, _| { + editor.enqueue_update_stale_excerpts(None); + }); + } + })), + ) + }) + .child( + IconButton::new("toggle-warnings", IconName::ExclamationTriangle) + .tooltip(move |cx| Tooltip::text(tooltip, cx)) + .on_click(cx.listener(|this, _, cx| { + if let Some(editor) = + this.editor.as_ref().and_then(|editor| editor.upgrade()) + { + editor.update(cx, |editor, cx| { + editor.toggle_warnings(&Default::default(), cx); + }); + } + })), + ) + } +} + +impl EventEmitter for ToolbarControls {} + +impl ToolbarItemView for ToolbarControls { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + _: &mut ViewContext, + ) -> ToolbarItemLocation { + if let Some(pane_item) = active_pane_item.as_ref() { + if let Some(editor) = pane_item.downcast::() { + self.editor = Some(editor.downgrade()); + ToolbarItemLocation::PrimaryRight + } else { + ToolbarItemLocation::Hidden + } + } else { + ToolbarItemLocation::Hidden + } + } +} + +impl ToolbarControls { + pub fn new() -> Self { + ToolbarControls { editor: None } + } +} diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml new file mode 100644 index 0000000..f01b392 --- /dev/null +++ b/crates/editor/Cargo.toml @@ -0,0 +1,95 @@ +[package] +name = "editor" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/editor.rs" +doctest = false + +[features] +test-support = [ + "text/test-support", + "language/test-support", + "gpui/test-support", + "multi_buffer/test-support", + "project/test-support", + "util/test-support", + "workspace/test-support", + "tree-sitter-rust", + "tree-sitter-typescript", + "tree-sitter-html" +] + +[dependencies] +aho-corasick = "1.1" +anyhow.workspace = true +client.workspace = true +clock.workspace = true +collections.workspace = true +convert_case = "0.6.0" +db.workspace = true +emojis.workspace = true +futures.workspace = true +fuzzy.workspace = true +git.workspace = true +gpui.workspace = true +http.workspace = true +indoc.workspace = true +itertools.workspace = true +language.workspace = true +lazy_static.workspace = true +linkify.workspace = true +log.workspace = true +lsp.workspace = true +multi_buffer.workspace = true +ordered-float.workspace = true +parking_lot.workspace = true +project.workspace = true +rand.workspace = true +rpc.workspace = true +schemars.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +smallvec.workspace = true +smol.workspace = true +snippet.workspace = true +sum_tree.workspace = true +task.workspace = true +text.workspace = true +time.workspace = true +time_format.workspace = true +theme.workspace = true +tree-sitter-html = { workspace = true, optional = true } +tree-sitter-rust = { workspace = true, optional = true } +tree-sitter-typescript = { workspace = true, optional = true } +ui.workspace = true +url.workspace = true +util.workspace = true +workspace.workspace = true + +[dev-dependencies] +ctor.workspace = true +env_logger.workspace = true +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +lsp = { workspace = true, features = ["test-support"] } +multi_buffer = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } +release_channel.workspace = true +rand.workspace = true +settings = { workspace = true, features = ["test-support"] } +text = { workspace = true, features = ["test-support"] } +tree-sitter-html.workspace = true +tree-sitter-rust.workspace = true +tree-sitter-typescript.workspace = true +unindent.workspace = true +util = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } +http = { workspace = true, features = ["test-support"] } diff --git a/crates/editor/LICENSE-GPL b/crates/editor/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/editor/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs new file mode 100644 index 0000000..9d36f07 --- /dev/null +++ b/crates/editor/src/actions.rs @@ -0,0 +1,283 @@ +//! This module contains all actions supported by [`Editor`]. +use super::*; +use util::serde::default_true; + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct SelectNext { + #[serde(default)] + pub replace_newest: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct SelectPrevious { + #[serde(default)] + pub replace_newest: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct MoveToBeginningOfLine { + #[serde(default = "default_true")] + pub(super) stop_at_soft_wraps: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct SelectToBeginningOfLine { + #[serde(default)] + pub(super) stop_at_soft_wraps: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct MovePageUp { + #[serde(default)] + pub(super) center_cursor: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct MovePageDown { + #[serde(default)] + pub(super) center_cursor: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct MoveToEndOfLine { + #[serde(default = "default_true")] + pub(super) stop_at_soft_wraps: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct SelectToEndOfLine { + #[serde(default)] + pub(super) stop_at_soft_wraps: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct ToggleCodeActions { + // Display row from which the action was deployed. + #[serde(default)] + pub deployed_from_indicator: Option, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct ConfirmCompletion { + #[serde(default)] + pub item_ix: Option, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct ConfirmCodeAction { + #[serde(default)] + pub item_ix: Option, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct ToggleComments { + #[serde(default)] + pub advance_downwards: bool, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct FoldAt { + pub buffer_row: MultiBufferRow, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct UnfoldAt { + pub buffer_row: MultiBufferRow, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct MoveUpByLines { + #[serde(default)] + pub(super) lines: u32, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct MoveDownByLines { + #[serde(default)] + pub(super) lines: u32, +} +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct SelectUpByLines { + #[serde(default)] + pub(super) lines: u32, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct SelectDownByLines { + #[serde(default)] + pub(super) lines: u32, +} + +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct ExpandExcerpts { + #[serde(default)] + pub(super) lines: u32, +} + +impl_actions!( + editor, + [ + ConfirmCodeAction, + ConfirmCompletion, + ExpandExcerpts, + FoldAt, + MoveDownByLines, + MovePageDown, + MovePageUp, + MoveToBeginningOfLine, + MoveToEndOfLine, + MoveUpByLines, + SelectDownByLines, + SelectNext, + SelectPrevious, + SelectToBeginningOfLine, + SelectToEndOfLine, + SelectUpByLines, + ToggleCodeActions, + ToggleComments, + UnfoldAt, + ] +); + +gpui::actions!( + editor, + [ + AcceptPartialCopilotSuggestion, + AcceptPartialInlineCompletion, + AddSelectionAbove, + AddSelectionBelow, + Backspace, + Cancel, + ConfirmRename, + ContextMenuFirst, + ContextMenuLast, + ContextMenuNext, + ContextMenuPrev, + ConvertToKebabCase, + ConvertToLowerCamelCase, + ConvertToLowerCase, + ConvertToOppositeCase, + ConvertToSnakeCase, + ConvertToTitleCase, + ConvertToUpperCamelCase, + ConvertToUpperCase, + Copy, + CopyHighlightJson, + CopyPath, + CopyPermalinkToLine, + CopyRelativePath, + Cut, + CutToEndOfLine, + Delete, + DeleteLine, + DeleteToBeginningOfLine, + DeleteToEndOfLine, + DeleteToNextSubwordEnd, + DeleteToNextWordEnd, + DeleteToPreviousSubwordStart, + DeleteToPreviousWordStart, + DisplayCursorNames, + DuplicateLineDown, + DuplicateLineUp, + ExpandAllHunkDiffs, + ExpandMacroRecursively, + FindAllReferences, + Fold, + FoldSelectedRanges, + Format, + GoToDefinition, + GoToDefinitionSplit, + GoToDiagnostic, + GoToHunk, + GoToImplementation, + GoToImplementationSplit, + GoToPrevDiagnostic, + GoToPrevHunk, + GoToTypeDefinition, + GoToTypeDefinitionSplit, + HalfPageDown, + HalfPageUp, + Hover, + Indent, + JoinLines, + LineDown, + LineUp, + MoveDown, + MoveLeft, + MoveLineDown, + MoveLineUp, + MoveRight, + MoveToBeginning, + MoveToEnclosingBracket, + MoveToEnd, + MoveToEndOfParagraph, + MoveToNextSubwordEnd, + MoveToNextWordEnd, + MoveToPreviousSubwordStart, + MoveToPreviousWordStart, + MoveToStartOfParagraph, + MoveUp, + Newline, + NewlineAbove, + NewlineBelow, + NextInlineCompletion, + NextScreen, + OpenExcerpts, + OpenExcerptsSplit, + OpenPermalinkToLine, + OpenUrl, + Outdent, + PageDown, + PageUp, + Paste, + PreviousInlineCompletion, + Redo, + RedoSelection, + Rename, + RestartLanguageServer, + RevealInFinder, + ReverseLines, + RevertSelectedHunks, + ScrollCursorBottom, + ScrollCursorCenter, + ScrollCursorTop, + SelectAll, + SelectAllMatches, + SelectDown, + SelectLargerSyntaxNode, + SelectLeft, + SelectLine, + SelectRight, + SelectSmallerSyntaxNode, + SelectToBeginning, + SelectToEnd, + SelectToEndOfParagraph, + SelectToNextSubwordEnd, + SelectToNextWordEnd, + SelectToPreviousSubwordStart, + SelectToPreviousWordStart, + SelectToStartOfParagraph, + SelectUp, + ShowCharacterPalette, + ShowCompletions, + ShowInlineCompletion, + ShuffleLines, + SortLinesCaseInsensitive, + SortLinesCaseSensitive, + SplitSelectionIntoLines, + Tab, + TabPrev, + ToggleGitBlame, + ToggleGitBlameInline, + ToggleHunkDiff, + ToggleInlayHints, + ToggleLineNumbers, + ToggleSoftWrap, + Transpose, + Undo, + UndoSelection, + UnfoldLines, + UniqueLinesCaseInsensitive, + UniqueLinesCaseSensitive, + ] +); diff --git a/crates/editor/src/blame_entry_tooltip.rs b/crates/editor/src/blame_entry_tooltip.rs new file mode 100644 index 0000000..7864338 --- /dev/null +++ b/crates/editor/src/blame_entry_tooltip.rs @@ -0,0 +1,276 @@ +use futures::Future; +use git::blame::BlameEntry; +use git::Oid; +use gpui::{ + Asset, Element, ParentElement, Render, ScrollHandle, StatefulInteractiveElement, WeakView, + WindowContext, +}; +use settings::Settings; +use std::hash::Hash; +use theme::{ActiveTheme, ThemeSettings}; +use ui::{ + div, h_flex, tooltip_container, v_flex, Avatar, Button, ButtonStyle, Clickable as _, Color, + FluentBuilder, Icon, IconName, IconPosition, InteractiveElement as _, IntoElement, + SharedString, Styled as _, ViewContext, +}; +use ui::{ButtonCommon, Disableable as _}; +use workspace::Workspace; + +use crate::git::blame::{CommitDetails, GitRemote}; +use crate::EditorStyle; + +struct CommitAvatar<'a> { + details: Option<&'a CommitDetails>, + sha: Oid, +} + +impl<'a> CommitAvatar<'a> { + fn new(details: Option<&'a CommitDetails>, sha: Oid) -> Self { + Self { details, sha } + } +} + +impl<'a> CommitAvatar<'a> { + fn render(&'a self, cx: &mut ViewContext) -> Option { + let remote = self + .details + .and_then(|details| details.remote.as_ref()) + .filter(|remote| remote.host_supports_avatars())?; + + let avatar_url = CommitAvatarAsset::new(remote.clone(), self.sha); + + let element = match cx.use_cached_asset::(&avatar_url) { + // Loading or no avatar found + None | Some(None) => Icon::new(IconName::Person) + .color(Color::Muted) + .into_element() + .into_any(), + // Found + Some(Some(url)) => Avatar::new(url.to_string()).into_element().into_any(), + }; + Some(element) + } +} + +#[derive(Clone, Debug)] +struct CommitAvatarAsset { + sha: Oid, + remote: GitRemote, +} + +impl Hash for CommitAvatarAsset { + fn hash(&self, state: &mut H) { + self.sha.hash(state); + self.remote.host.name().hash(state); + } +} + +impl CommitAvatarAsset { + fn new(remote: GitRemote, sha: Oid) -> Self { + Self { remote, sha } + } +} + +impl Asset for CommitAvatarAsset { + type Source = Self; + type Output = Option; + + fn load( + source: Self::Source, + cx: &mut WindowContext, + ) -> impl Future + Send + 'static { + let client = cx.http_client(); + + async move { + source + .remote + .avatar_url(source.sha, client) + .await + .map(|url| SharedString::from(url.to_string())) + } + } +} + +pub(crate) struct BlameEntryTooltip { + blame_entry: BlameEntry, + details: Option, + editor_style: EditorStyle, + workspace: Option>, + scroll_handle: ScrollHandle, +} + +impl BlameEntryTooltip { + pub(crate) fn new( + blame_entry: BlameEntry, + details: Option, + style: &EditorStyle, + workspace: Option>, + ) -> Self { + Self { + editor_style: style.clone(), + blame_entry, + details, + workspace, + scroll_handle: ScrollHandle::new(), + } + } +} + +impl Render for BlameEntryTooltip { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let avatar = CommitAvatar::new(self.details.as_ref(), self.blame_entry.sha).render(cx); + + let author = self + .blame_entry + .author + .clone() + .unwrap_or("".to_string()); + + let author_email = self.blame_entry.author_mail.clone(); + + let short_commit_id = self.blame_entry.sha.display_short(); + let absolute_timestamp = blame_entry_absolute_timestamp(&self.blame_entry, cx); + + let message = self + .details + .as_ref() + .map(|details| { + crate::render_parsed_markdown( + "blame-message", + &details.parsed_message, + &self.editor_style, + self.workspace.clone(), + cx, + ) + .into_any() + }) + .unwrap_or("".into_any()); + + let pull_request = self + .details + .as_ref() + .and_then(|details| details.pull_request.clone()); + + let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; + let message_max_height = cx.line_height() * 12 + (ui_font_size / 0.4); + + tooltip_container(cx, move |this, cx| { + this.occlude() + .on_mouse_move(|_, cx| cx.stop_propagation()) + .child( + v_flex() + .w(gpui::rems(30.)) + .gap_4() + .child( + h_flex() + .gap_x_2() + .overflow_x_hidden() + .flex_wrap() + .children(avatar) + .child(author) + .when_some(author_email, |this, author_email| { + this.child( + div() + .text_color(cx.theme().colors().text_muted) + .child(author_email), + ) + }) + .border_b_1() + .border_color(cx.theme().colors().border), + ) + .child( + div() + .id("inline-blame-commit-message") + .occlude() + .child(message) + .max_h(message_max_height) + .overflow_y_scroll() + .track_scroll(&self.scroll_handle), + ) + .child( + h_flex() + .text_color(cx.theme().colors().text_muted) + .w_full() + .justify_between() + .child(absolute_timestamp) + .child( + h_flex() + .gap_2() + .when_some(pull_request, |this, pr| { + this.child( + Button::new( + "pull-request-button", + format!("#{}", pr.number), + ) + .color(Color::Muted) + .icon(IconName::PullRequest) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .style(ButtonStyle::Transparent) + .on_click(move |_, cx| { + cx.stop_propagation(); + cx.open_url(pr.url.as_str()) + }), + ) + }) + .child( + Button::new( + "commit-sha-button", + short_commit_id.clone(), + ) + .style(ButtonStyle::Transparent) + .color(Color::Muted) + .icon(IconName::FileGit) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .disabled( + self.details.as_ref().map_or(true, |details| { + details.permalink.is_none() + }), + ) + .when_some( + self.details + .as_ref() + .and_then(|details| details.permalink.clone()), + |this, url| { + this.on_click(move |_, cx| { + cx.stop_propagation(); + cx.open_url(url.as_str()) + }) + }, + ), + ), + ), + ), + ) + }) + } +} + +fn blame_entry_timestamp( + blame_entry: &BlameEntry, + format: time_format::TimestampFormat, + cx: &WindowContext, +) -> String { + match blame_entry.author_offset_date_time() { + Ok(timestamp) => time_format::format_localized_timestamp( + timestamp, + time::OffsetDateTime::now_utc(), + cx.local_timezone(), + format, + ), + Err(_) => "Error parsing date".to_string(), + } +} + +pub fn blame_entry_relative_timestamp(blame_entry: &BlameEntry, cx: &WindowContext) -> String { + blame_entry_timestamp(blame_entry, time_format::TimestampFormat::Relative, cx) +} + +fn blame_entry_absolute_timestamp(blame_entry: &BlameEntry, cx: &WindowContext) -> String { + blame_entry_timestamp( + blame_entry, + time_format::TimestampFormat::MediumAbsolute, + cx, + ) +} diff --git a/crates/editor/src/blink_manager.rs b/crates/editor/src/blink_manager.rs new file mode 100644 index 0000000..e3a8ce6 --- /dev/null +++ b/crates/editor/src/blink_manager.rs @@ -0,0 +1,107 @@ +use crate::EditorSettings; +use gpui::ModelContext; +use settings::Settings; +use settings::SettingsStore; +use smol::Timer; +use std::time::Duration; + +pub struct BlinkManager { + blink_interval: Duration, + + blink_epoch: usize, + blinking_paused: bool, + visible: bool, + enabled: bool, +} + +impl BlinkManager { + pub fn new(blink_interval: Duration, cx: &mut ModelContext) -> Self { + // Make sure we blink the cursors if the setting is re-enabled + cx.observe_global::(move |this, cx| { + this.blink_cursors(this.blink_epoch, cx) + }) + .detach(); + + Self { + blink_interval, + + blink_epoch: 0, + blinking_paused: false, + visible: true, + enabled: false, + } + } + + fn next_blink_epoch(&mut self) -> usize { + self.blink_epoch += 1; + self.blink_epoch + } + + pub fn pause_blinking(&mut self, cx: &mut ModelContext) { + self.show_cursor(cx); + + let epoch = self.next_blink_epoch(); + let interval = self.blink_interval; + cx.spawn(|this, mut cx| async move { + Timer::after(interval).await; + this.update(&mut cx, |this, cx| this.resume_cursor_blinking(epoch, cx)) + }) + .detach(); + } + + fn resume_cursor_blinking(&mut self, epoch: usize, cx: &mut ModelContext) { + if epoch == self.blink_epoch { + self.blinking_paused = false; + self.blink_cursors(epoch, cx); + } + } + + fn blink_cursors(&mut self, epoch: usize, cx: &mut ModelContext) { + if EditorSettings::get_global(cx).cursor_blink { + if epoch == self.blink_epoch && self.enabled && !self.blinking_paused { + self.visible = !self.visible; + cx.notify(); + + let epoch = self.next_blink_epoch(); + let interval = self.blink_interval; + cx.spawn(|this, mut cx| async move { + Timer::after(interval).await; + if let Some(this) = this.upgrade() { + this.update(&mut cx, |this, cx| this.blink_cursors(epoch, cx)) + .ok(); + } + }) + .detach(); + } + } else { + self.show_cursor(cx); + } + } + + pub fn show_cursor(&mut self, cx: &mut ModelContext<'_, BlinkManager>) { + if !self.visible { + self.visible = true; + cx.notify(); + } + } + + pub fn enable(&mut self, cx: &mut ModelContext) { + if self.enabled { + return; + } + + self.enabled = true; + // Set cursors as invisible and start blinking: this causes cursors + // to be visible during the next render. + self.visible = false; + self.blink_cursors(self.blink_epoch, cx); + } + + pub fn disable(&mut self, _cx: &mut ModelContext) { + self.enabled = false; + } + + pub fn visible(&self) -> bool { + self.visible + } +} diff --git a/crates/editor/src/debounced_delay.rs b/crates/editor/src/debounced_delay.rs new file mode 100644 index 0000000..b9d8ebf --- /dev/null +++ b/crates/editor/src/debounced_delay.rs @@ -0,0 +1,49 @@ +use std::time::Duration; + +use futures::{channel::oneshot, FutureExt}; +use gpui::{Task, ViewContext}; + +use crate::Editor; + +pub struct DebouncedDelay { + task: Option>, + cancel_channel: Option>, +} + +impl DebouncedDelay { + pub fn new() -> DebouncedDelay { + DebouncedDelay { + task: None, + cancel_channel: None, + } + } + + pub fn fire_new(&mut self, delay: Duration, cx: &mut ViewContext, func: F) + where + F: 'static + Send + FnOnce(&mut Editor, &mut ViewContext) -> Task<()>, + { + if let Some(channel) = self.cancel_channel.take() { + _ = channel.send(()); + } + + let (sender, mut receiver) = oneshot::channel::<()>(); + self.cancel_channel = Some(sender); + + let previous_task = self.task.take(); + self.task = Some(cx.spawn(move |model, mut cx| async move { + let mut timer = cx.background_executor().timer(delay).fuse(); + if let Some(previous_task) = previous_task { + previous_task.await; + } + + futures::select_biased! { + _ = receiver => return, + _ = timer => {} + } + + if let Ok(task) = model.update(&mut cx, |project, cx| (func)(project, cx)) { + task.await; + } + })); + } +} diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs new file mode 100644 index 0000000..c4b047b --- /dev/null +++ b/crates/editor/src/display_map.rs @@ -0,0 +1,1887 @@ +//! This module defines where the text should be displayed in an [`Editor`][Editor]. +//! +//! Not literally though - rendering, layout and all that jazz is a responsibility of [`EditorElement`][EditorElement]. +//! Instead, [`DisplayMap`] decides where Inlays/Inlay hints are displayed, when +//! to apply a soft wrap, where to add fold indicators, whether there are any tabs in the buffer that +//! we display as spaces and where to display custom blocks (like diagnostics). +//! Seems like a lot? That's because it is. [`DisplayMap`] is conceptually made up +//! of several smaller structures that form a hierarchy (starting at the bottom): +//! - [`InlayMap`] that decides where the [`Inlay`]s should be displayed. +//! - [`FoldMap`] that decides where the fold indicators should be; it also tracks parts of a source file that are currently folded. +//! - [`TabMap`] that keeps track of hard tabs in a buffer. +//! - [`WrapMap`] that handles soft wrapping. +//! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer. +//! - [`DisplayMap`] that adds background highlights to the regions of text. +//! Each one of those builds on top of preceding map. +//! +//! [Editor]: crate::Editor +//! [EditorElement]: crate::element::EditorElement + +mod block_map; +mod fold_map; +mod inlay_map; +mod tab_map; +mod wrap_map; + +use crate::{hover_links::InlayHighlight, movement::TextLayoutDetails, InlayId}; +use crate::{EditorStyle, RowExt}; +pub use block_map::{BlockMap, BlockPoint}; +use collections::{HashMap, HashSet}; +use fold_map::FoldMap; +use gpui::{Font, HighlightStyle, Hsla, LineLayout, Model, ModelContext, Pixels, UnderlineStyle}; +use inlay_map::InlayMap; +use language::{ + language_settings::language_settings, OffsetUtf16, Point, Subscription as BufferSubscription, +}; +use lsp::DiagnosticSeverity; +use multi_buffer::{ + Anchor, AnchorRangeExt, MultiBuffer, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, + ToOffset, ToPoint, +}; +use serde::Deserialize; +use std::{any::TypeId, borrow::Cow, fmt::Debug, num::NonZeroU32, ops::Range, sync::Arc}; +use sum_tree::{Bias, TreeMap}; +use tab_map::TabMap; + +use wrap_map::WrapMap; + +pub use block_map::{ + BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockDisposition, BlockId, + BlockProperties, BlockStyle, RenderBlock, TransformBlock, +}; + +use self::block_map::BlockRow; +pub use self::fold_map::{Fold, FoldId, FoldPoint}; +pub use self::inlay_map::{InlayOffset, InlayPoint}; +pub(crate) use inlay_map::Inlay; + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum FoldStatus { + Folded, + Foldable, +} + +const UNNECESSARY_CODE_FADE: f32 = 0.3; + +pub trait ToDisplayPoint { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint; +} + +type TextHighlights = TreeMap, Arc<(HighlightStyle, Vec>)>>; +type InlayHighlights = TreeMap>; + +/// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints, +/// folding, hard tabs, soft wrapping, custom blocks (like diagnostics), and highlighting. +/// +/// See the [module level documentation](self) for more information. +pub struct DisplayMap { + /// The buffer that we are displaying. + buffer: Model, + buffer_subscription: BufferSubscription, + /// Decides where the [`Inlay`]s should be displayed. + inlay_map: InlayMap, + /// Decides where the fold indicators should be and tracks parts of a source file that are currently folded. + fold_map: FoldMap, + /// Keeps track of hard tabs in a buffer. + tab_map: TabMap, + /// Handles soft wrapping. + wrap_map: Model, + /// Tracks custom blocks such as diagnostics that should be displayed within buffer. + block_map: BlockMap, + /// Regions of text that should be highlighted. + text_highlights: TextHighlights, + /// Regions of inlays that should be highlighted. + inlay_highlights: InlayHighlights, + pub clip_at_line_ends: bool, +} + +impl DisplayMap { + pub fn new( + buffer: Model, + font: Font, + font_size: Pixels, + wrap_width: Option, + buffer_header_height: u8, + excerpt_header_height: u8, + cx: &mut ModelContext, + ) -> Self { + let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + + let tab_size = Self::tab_size(&buffer, cx); + let (inlay_map, snapshot) = InlayMap::new(buffer.read(cx).snapshot(cx)); + let (fold_map, snapshot) = FoldMap::new(snapshot); + let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); + let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx); + let block_map = BlockMap::new(snapshot, buffer_header_height, excerpt_header_height); + cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); + DisplayMap { + buffer, + buffer_subscription, + fold_map, + inlay_map, + tab_map, + wrap_map, + block_map, + text_highlights: Default::default(), + inlay_highlights: Default::default(), + clip_at_line_ends: false, + } + } + + pub fn snapshot(&mut self, cx: &mut ModelContext) -> DisplaySnapshot { + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let edits = self.buffer_subscription.consume().into_inner(); + let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits); + let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits); + let tab_size = Self::tab_size(&self.buffer, cx); + let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size); + let (wrap_snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx)); + let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits); + + DisplaySnapshot { + buffer_snapshot: self.buffer.read(cx).snapshot(cx), + fold_snapshot, + inlay_snapshot, + tab_snapshot, + wrap_snapshot, + block_snapshot, + text_highlights: self.text_highlights.clone(), + inlay_highlights: self.inlay_highlights.clone(), + clip_at_line_ends: self.clip_at_line_ends, + } + } + + pub fn set_state(&mut self, other: &DisplaySnapshot, cx: &mut ModelContext) { + self.fold( + other + .folds_in_range(0..other.buffer_snapshot.len()) + .map(|fold| fold.range.to_offset(&other.buffer_snapshot)), + cx, + ); + } + + pub fn fold( + &mut self, + ranges: impl IntoIterator>, + cx: &mut ModelContext, + ) { + let snapshot = self.buffer.read(cx).snapshot(cx); + let edits = self.buffer_subscription.consume().into_inner(); + let tab_size = Self::tab_size(&self.buffer, cx); + let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); + let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + self.block_map.read(snapshot, edits); + let (snapshot, edits) = fold_map.fold(ranges); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + self.block_map.read(snapshot, edits); + } + + pub fn unfold( + &mut self, + ranges: impl IntoIterator>, + inclusive: bool, + cx: &mut ModelContext, + ) { + let snapshot = self.buffer.read(cx).snapshot(cx); + let edits = self.buffer_subscription.consume().into_inner(); + let tab_size = Self::tab_size(&self.buffer, cx); + let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); + let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + self.block_map.read(snapshot, edits); + let (snapshot, edits) = fold_map.unfold(ranges, inclusive); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + self.block_map.read(snapshot, edits); + } + + pub fn insert_blocks( + &mut self, + blocks: impl IntoIterator>, + cx: &mut ModelContext, + ) -> Vec { + let snapshot = self.buffer.read(cx).snapshot(cx); + let edits = self.buffer_subscription.consume().into_inner(); + let tab_size = Self::tab_size(&self.buffer, cx); + let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); + let (snapshot, edits) = self.fold_map.read(snapshot, edits); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + let mut block_map = self.block_map.write(snapshot, edits); + block_map.insert(blocks) + } + + pub fn replace_blocks(&mut self, styles: HashMap) { + self.block_map.replace(styles); + } + + pub fn remove_blocks(&mut self, ids: HashSet, cx: &mut ModelContext) { + let snapshot = self.buffer.read(cx).snapshot(cx); + let edits = self.buffer_subscription.consume().into_inner(); + let tab_size = Self::tab_size(&self.buffer, cx); + let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); + let (snapshot, edits) = self.fold_map.read(snapshot, edits); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + let mut block_map = self.block_map.write(snapshot, edits); + block_map.remove(ids); + } + + pub fn highlight_text( + &mut self, + type_id: TypeId, + ranges: Vec>, + style: HighlightStyle, + ) { + self.text_highlights + .insert(Some(type_id), Arc::new((style, ranges))); + } + + pub(crate) fn highlight_inlays( + &mut self, + type_id: TypeId, + highlights: Vec, + style: HighlightStyle, + ) { + for highlight in highlights { + let update = self.inlay_highlights.update(&type_id, |highlights| { + highlights.insert(highlight.inlay, (style, highlight.clone())) + }); + if update.is_none() { + self.inlay_highlights.insert( + type_id, + TreeMap::from_ordered_entries([(highlight.inlay, (style, highlight))]), + ); + } + } + } + + pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range])> { + let highlights = self.text_highlights.get(&Some(type_id))?; + Some((highlights.0, &highlights.1)) + } + pub fn clear_highlights(&mut self, type_id: TypeId) -> bool { + let mut cleared = self.text_highlights.remove(&Some(type_id)).is_some(); + cleared |= self.inlay_highlights.remove(&type_id).is_some(); + cleared + } + + pub fn set_font(&self, font: Font, font_size: Pixels, cx: &mut ModelContext) -> bool { + self.wrap_map + .update(cx, |map, cx| map.set_font_with_size(font, font_size, cx)) + } + + pub fn set_fold_ellipses_color(&mut self, color: Hsla) -> bool { + self.fold_map.set_ellipses_color(color) + } + + pub fn set_wrap_width(&self, width: Option, cx: &mut ModelContext) -> bool { + self.wrap_map + .update(cx, |map, cx| map.set_wrap_width(width, cx)) + } + + pub(crate) fn current_inlays(&self) -> impl Iterator { + self.inlay_map.current_inlays() + } + + pub(crate) fn splice_inlays( + &mut self, + to_remove: Vec, + to_insert: Vec, + cx: &mut ModelContext, + ) { + if to_remove.is_empty() && to_insert.is_empty() { + return; + } + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let edits = self.buffer_subscription.consume().into_inner(); + let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits); + let (snapshot, edits) = self.fold_map.read(snapshot, edits); + let tab_size = Self::tab_size(&self.buffer, cx); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + self.block_map.read(snapshot, edits); + + let (snapshot, edits) = self.inlay_map.splice(to_remove, to_insert); + let (snapshot, edits) = self.fold_map.read(snapshot, edits); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self + .wrap_map + .update(cx, |map, cx| map.sync(snapshot, edits, cx)); + self.block_map.read(snapshot, edits); + } + + fn tab_size(buffer: &Model, cx: &mut ModelContext) -> NonZeroU32 { + let language = buffer + .read(cx) + .as_singleton() + .and_then(|buffer| buffer.read(cx).language()); + language_settings(language, None, cx).tab_size + } + + #[cfg(test)] + pub fn is_rewrapping(&self, cx: &gpui::AppContext) -> bool { + self.wrap_map.read(cx).is_rewrapping() + } +} + +#[derive(Debug, Default)] +pub(crate) struct Highlights<'a> { + pub text_highlights: Option<&'a TextHighlights>, + pub inlay_highlights: Option<&'a InlayHighlights>, + pub styles: HighlightStyles, +} + +#[derive(Default, Debug, Clone, Copy)] +pub struct HighlightStyles { + pub inlay_hint: Option, + pub suggestion: Option, +} + +pub struct HighlightedChunk<'a> { + pub chunk: &'a str, + pub style: Option, + pub is_tab: bool, +} + +#[derive(Clone)] +pub struct DisplaySnapshot { + pub buffer_snapshot: MultiBufferSnapshot, + pub fold_snapshot: fold_map::FoldSnapshot, + inlay_snapshot: inlay_map::InlaySnapshot, + tab_snapshot: tab_map::TabSnapshot, + wrap_snapshot: wrap_map::WrapSnapshot, + block_snapshot: block_map::BlockSnapshot, + text_highlights: TextHighlights, + inlay_highlights: InlayHighlights, + clip_at_line_ends: bool, +} + +impl DisplaySnapshot { + #[cfg(test)] + pub fn fold_count(&self) -> usize { + self.fold_snapshot.fold_count() + } + + pub fn is_empty(&self) -> bool { + self.buffer_snapshot.len() == 0 + } + + pub fn buffer_rows( + &self, + start_row: DisplayRow, + ) -> impl Iterator> + '_ { + self.block_snapshot + .buffer_rows(BlockRow(start_row.0)) + .map(|row| row.map(|row| MultiBufferRow(row.0))) + } + + pub fn max_buffer_row(&self) -> MultiBufferRow { + self.buffer_snapshot.max_buffer_row() + } + + pub fn prev_line_boundary(&self, mut point: MultiBufferPoint) -> (Point, DisplayPoint) { + loop { + let mut inlay_point = self.inlay_snapshot.to_inlay_point(point); + let mut fold_point = self.fold_snapshot.to_fold_point(inlay_point, Bias::Left); + fold_point.0.column = 0; + inlay_point = fold_point.to_inlay_point(&self.fold_snapshot); + point = self.inlay_snapshot.to_buffer_point(inlay_point); + + let mut display_point = self.point_to_display_point(point, Bias::Left); + *display_point.column_mut() = 0; + let next_point = self.display_point_to_point(display_point, Bias::Left); + if next_point == point { + return (point, display_point); + } + point = next_point; + } + } + + pub fn next_line_boundary(&self, mut point: MultiBufferPoint) -> (Point, DisplayPoint) { + loop { + let mut inlay_point = self.inlay_snapshot.to_inlay_point(point); + let mut fold_point = self.fold_snapshot.to_fold_point(inlay_point, Bias::Right); + fold_point.0.column = self.fold_snapshot.line_len(fold_point.row()); + inlay_point = fold_point.to_inlay_point(&self.fold_snapshot); + point = self.inlay_snapshot.to_buffer_point(inlay_point); + + let mut display_point = self.point_to_display_point(point, Bias::Right); + *display_point.column_mut() = self.line_len(display_point.row()); + let next_point = self.display_point_to_point(display_point, Bias::Right); + if next_point == point { + return (point, display_point); + } + point = next_point; + } + } + + // used by line_mode selections and tries to match vim behaviour + pub fn expand_to_line(&self, range: Range) -> Range { + let new_start = if range.start.row == 0 { + MultiBufferPoint::new(0, 0) + } else if range.start.row == self.max_buffer_row().0 + || (range.end.column > 0 && range.end.row == self.max_buffer_row().0) + { + MultiBufferPoint::new( + range.start.row - 1, + self.buffer_snapshot + .line_len(MultiBufferRow(range.start.row - 1)), + ) + } else { + self.prev_line_boundary(range.start).0 + }; + + let new_end = if range.end.column == 0 { + range.end + } else if range.end.row < self.max_buffer_row().0 { + self.buffer_snapshot + .clip_point(MultiBufferPoint::new(range.end.row + 1, 0), Bias::Left) + } else { + self.buffer_snapshot.max_point() + }; + + new_start..new_end + } + + fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint { + let inlay_point = self.inlay_snapshot.to_inlay_point(point); + let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias); + let tab_point = self.tab_snapshot.to_tab_point(fold_point); + let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point); + let block_point = self.block_snapshot.to_block_point(wrap_point); + DisplayPoint(block_point) + } + + fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point { + self.inlay_snapshot + .to_buffer_point(self.display_point_to_inlay_point(point, bias)) + } + + pub fn display_point_to_inlay_offset(&self, point: DisplayPoint, bias: Bias) -> InlayOffset { + self.inlay_snapshot + .to_offset(self.display_point_to_inlay_point(point, bias)) + } + + pub fn anchor_to_inlay_offset(&self, anchor: Anchor) -> InlayOffset { + self.inlay_snapshot + .to_inlay_offset(anchor.to_offset(&self.buffer_snapshot)) + } + + pub fn display_point_to_anchor(&self, point: DisplayPoint, bias: Bias) -> Anchor { + self.buffer_snapshot + .anchor_at(point.to_offset(&self, bias), bias) + } + + fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint { + let block_point = point.0; + let wrap_point = self.block_snapshot.to_wrap_point(block_point); + let tab_point = self.wrap_snapshot.to_tab_point(wrap_point); + let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0; + fold_point.to_inlay_point(&self.fold_snapshot) + } + + pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint { + let block_point = point.0; + let wrap_point = self.block_snapshot.to_wrap_point(block_point); + let tab_point = self.wrap_snapshot.to_tab_point(wrap_point); + self.tab_snapshot.to_fold_point(tab_point, bias).0 + } + + pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint { + let tab_point = self.tab_snapshot.to_tab_point(fold_point); + let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point); + let block_point = self.block_snapshot.to_block_point(wrap_point); + DisplayPoint(block_point) + } + + pub fn max_point(&self) -> DisplayPoint { + DisplayPoint(self.block_snapshot.max_point()) + } + + /// Returns text chunks starting at the given display row until the end of the file + pub fn text_chunks(&self, display_row: DisplayRow) -> impl Iterator { + self.block_snapshot + .chunks( + display_row.0..self.max_point().row().next_row().0, + false, + Highlights::default(), + ) + .map(|h| h.text) + } + + /// Returns text chunks starting at the end of the given display row in reverse until the start of the file + pub fn reverse_text_chunks(&self, display_row: DisplayRow) -> impl Iterator { + (0..=display_row.0).rev().flat_map(|row| { + self.block_snapshot + .chunks(row..row + 1, false, Highlights::default()) + .map(|h| h.text) + .collect::>() + .into_iter() + .rev() + }) + } + + pub fn chunks( + &self, + display_rows: Range, + language_aware: bool, + highlight_styles: HighlightStyles, + ) -> DisplayChunks<'_> { + self.block_snapshot.chunks( + display_rows.start.0..display_rows.end.0, + language_aware, + Highlights { + text_highlights: Some(&self.text_highlights), + inlay_highlights: Some(&self.inlay_highlights), + styles: highlight_styles, + }, + ) + } + + pub fn highlighted_chunks<'a>( + &'a self, + display_rows: Range, + language_aware: bool, + editor_style: &'a EditorStyle, + ) -> impl Iterator> { + self.chunks( + display_rows, + language_aware, + HighlightStyles { + inlay_hint: Some(editor_style.inlay_hints_style), + suggestion: Some(editor_style.suggestions_style), + }, + ) + .map(|chunk| { + let mut highlight_style = chunk + .syntax_highlight_id + .and_then(|id| id.style(&editor_style.syntax)); + + if let Some(chunk_highlight) = chunk.highlight_style { + if let Some(highlight_style) = highlight_style.as_mut() { + highlight_style.highlight(chunk_highlight); + } else { + highlight_style = Some(chunk_highlight); + } + } + + let mut diagnostic_highlight = HighlightStyle::default(); + + if chunk.is_unnecessary { + diagnostic_highlight.fade_out = Some(UNNECESSARY_CODE_FADE); + } + + if let Some(severity) = chunk.diagnostic_severity { + // Omit underlines for HINT/INFO diagnostics on 'unnecessary' code. + if severity <= DiagnosticSeverity::WARNING || !chunk.is_unnecessary { + let diagnostic_color = + super::diagnostic_style(severity, true, &editor_style.status); + diagnostic_highlight.underline = Some(UnderlineStyle { + color: Some(diagnostic_color), + thickness: 1.0.into(), + wavy: true, + }); + } + } + + if let Some(highlight_style) = highlight_style.as_mut() { + highlight_style.highlight(diagnostic_highlight); + } else { + highlight_style = Some(diagnostic_highlight); + } + + HighlightedChunk { + chunk: chunk.text, + style: highlight_style, + is_tab: chunk.is_tab, + } + }) + } + + pub fn layout_row( + &self, + display_row: DisplayRow, + TextLayoutDetails { + text_system, + editor_style, + rem_size, + scroll_anchor: _, + visible_rows: _, + vertical_scroll_margin: _, + }: &TextLayoutDetails, + ) -> Arc { + let mut runs = Vec::new(); + let mut line = String::new(); + + let range = display_row..display_row.next_row(); + for chunk in self.highlighted_chunks(range, false, &editor_style) { + line.push_str(chunk.chunk); + + let text_style = if let Some(style) = chunk.style { + Cow::Owned(editor_style.text.clone().highlight(style)) + } else { + Cow::Borrowed(&editor_style.text) + }; + + runs.push(text_style.to_run(chunk.chunk.len())) + } + + if line.ends_with('\n') { + line.pop(); + if let Some(last_run) = runs.last_mut() { + last_run.len -= 1; + if last_run.len == 0 { + runs.pop(); + } + } + } + + let font_size = editor_style.text.font_size.to_pixels(*rem_size); + text_system + .layout_line(&line, font_size, &runs) + .expect("we expect the font to be loaded because it's rendered by the editor") + } + + pub fn x_for_display_point( + &self, + display_point: DisplayPoint, + text_layout_details: &TextLayoutDetails, + ) -> Pixels { + let line = self.layout_row(display_point.row(), text_layout_details); + line.x_for_index(display_point.column() as usize) + } + + pub fn display_column_for_x( + &self, + display_row: DisplayRow, + x: Pixels, + details: &TextLayoutDetails, + ) -> u32 { + let layout_line = self.layout_row(display_row, details); + layout_line.closest_index_for_x(x) as u32 + } + + pub fn display_chars_at( + &self, + mut point: DisplayPoint, + ) -> impl Iterator + '_ { + point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left)); + self.text_chunks(point.row()) + .flat_map(str::chars) + .skip_while({ + let mut column = 0; + move |char| { + let at_point = column >= point.column(); + column += char.len_utf8() as u32; + !at_point + } + }) + .map(move |ch| { + let result = (ch, point); + if ch == '\n' { + *point.row_mut() += 1; + *point.column_mut() = 0; + } else { + *point.column_mut() += ch.len_utf8() as u32; + } + result + }) + } + + pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator + '_ { + self.buffer_snapshot.chars_at(offset).map(move |ch| { + let ret = (ch, offset); + offset += ch.len_utf8(); + ret + }) + } + + pub fn reverse_buffer_chars_at( + &self, + mut offset: usize, + ) -> impl Iterator + '_ { + self.buffer_snapshot + .reversed_chars_at(offset) + .map(move |ch| { + offset -= ch.len_utf8(); + (ch, offset) + }) + } + + pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint { + let mut clipped = self.block_snapshot.clip_point(point.0, bias); + if self.clip_at_line_ends { + clipped = self.clip_at_line_end(DisplayPoint(clipped)).0 + } + DisplayPoint(clipped) + } + + pub fn clip_ignoring_line_ends(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint { + DisplayPoint(self.block_snapshot.clip_point(point.0, bias)) + } + + pub fn clip_at_line_end(&self, point: DisplayPoint) -> DisplayPoint { + let mut point = point.0; + if point.column == self.line_len(DisplayRow(point.row)) { + point.column = point.column.saturating_sub(1); + point = self.block_snapshot.clip_point(point, Bias::Left); + } + DisplayPoint(point) + } + + pub fn folds_in_range(&self, range: Range) -> impl Iterator + where + T: ToOffset, + { + self.fold_snapshot.folds_in_range(range) + } + + pub fn blocks_in_range( + &self, + rows: Range, + ) -> impl Iterator { + self.block_snapshot + .blocks_in_range(rows.start.0..rows.end.0) + .map(|(row, block)| (DisplayRow(row), block)) + } + + pub fn intersects_fold(&self, offset: T) -> bool { + self.fold_snapshot.intersects_fold(offset) + } + + pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool { + self.fold_snapshot.is_line_folded(buffer_row) + } + + pub fn is_block_line(&self, display_row: DisplayRow) -> bool { + self.block_snapshot.is_block_line(BlockRow(display_row.0)) + } + + pub fn soft_wrap_indent(&self, display_row: DisplayRow) -> Option { + let wrap_row = self + .block_snapshot + .to_wrap_point(BlockPoint::new(display_row.0, 0)) + .row(); + self.wrap_snapshot.soft_wrap_indent(wrap_row) + } + + pub fn text(&self) -> String { + self.text_chunks(DisplayRow(0)).collect() + } + + pub fn line(&self, display_row: DisplayRow) -> String { + let mut result = String::new(); + for chunk in self.text_chunks(display_row) { + if let Some(ix) = chunk.find('\n') { + result.push_str(&chunk[0..ix]); + break; + } else { + result.push_str(chunk); + } + } + result + } + + pub fn line_indent_for_buffer_row(&self, buffer_row: MultiBufferRow) -> (u32, bool) { + let (buffer, range) = self + .buffer_snapshot + .buffer_line_for_row(buffer_row) + .unwrap(); + + let mut indent_size = 0; + let mut is_blank = false; + for c in buffer.chars_at(Point::new(range.start.row, 0)) { + if c == ' ' || c == '\t' { + indent_size += 1; + } else { + if c == '\n' { + is_blank = true; + } + break; + } + } + + (indent_size, is_blank) + } + + pub fn line_len(&self, row: DisplayRow) -> u32 { + self.block_snapshot.line_len(BlockRow(row.0)) + } + + pub fn longest_row(&self) -> DisplayRow { + DisplayRow(self.block_snapshot.longest_row()) + } + + pub fn fold_for_line(&self, buffer_row: MultiBufferRow) -> Option { + if self.is_line_folded(buffer_row) { + Some(FoldStatus::Folded) + } else if self.is_foldable(buffer_row) { + Some(FoldStatus::Foldable) + } else { + None + } + } + + pub fn is_foldable(&self, buffer_row: MultiBufferRow) -> bool { + let max_row = self.buffer_snapshot.max_buffer_row(); + if buffer_row >= max_row { + return false; + } + + let (indent_size, is_blank) = self.line_indent_for_buffer_row(buffer_row); + if is_blank { + return false; + } + + for next_row in (buffer_row.0 + 1)..=max_row.0 { + let (next_indent_size, next_line_is_blank) = + self.line_indent_for_buffer_row(MultiBufferRow(next_row)); + if next_indent_size > indent_size { + return true; + } else if !next_line_is_blank { + break; + } + } + + false + } + + pub fn foldable_range(&self, buffer_row: MultiBufferRow) -> Option> { + let start = MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot.line_len(buffer_row)); + if self.is_foldable(MultiBufferRow(start.row)) + && !self.is_line_folded(MultiBufferRow(start.row)) + { + let (start_indent, _) = self.line_indent_for_buffer_row(buffer_row); + let max_point = self.buffer_snapshot.max_point(); + let mut end = None; + + for row in (buffer_row.0 + 1)..=max_point.row { + let (indent, is_blank) = self.line_indent_for_buffer_row(MultiBufferRow(row)); + if !is_blank && indent <= start_indent { + let prev_row = row - 1; + end = Some(Point::new( + prev_row, + self.buffer_snapshot.line_len(MultiBufferRow(prev_row)), + )); + break; + } + } + let end = end.unwrap_or(max_point); + Some(start..end) + } else { + None + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn text_highlight_ranges( + &self, + ) -> Option>)>> { + let type_id = TypeId::of::(); + self.text_highlights.get(&Some(type_id)).cloned() + } + + #[allow(unused)] + #[cfg(any(test, feature = "test-support"))] + pub(crate) fn inlay_highlights( + &self, + ) -> Option<&TreeMap> { + let type_id = TypeId::of::(); + self.inlay_highlights.get(&type_id) + } +} + +#[derive(Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct DisplayPoint(BlockPoint); + +impl Debug for DisplayPoint { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!( + "DisplayPoint({}, {})", + self.row().0, + self.column() + )) + } +} + +#[derive(Debug, Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq, Deserialize, Hash)] +#[serde(transparent)] +pub struct DisplayRow(pub u32); + +impl DisplayPoint { + pub fn new(row: DisplayRow, column: u32) -> Self { + Self(BlockPoint(Point::new(row.0, column))) + } + + pub fn zero() -> Self { + Self::new(DisplayRow(0), 0) + } + + pub fn is_zero(&self) -> bool { + self.0.is_zero() + } + + pub fn row(self) -> DisplayRow { + DisplayRow(self.0.row) + } + + pub fn column(self) -> u32 { + self.0.column + } + + pub fn row_mut(&mut self) -> &mut u32 { + &mut self.0.row + } + + pub fn column_mut(&mut self) -> &mut u32 { + &mut self.0.column + } + + pub fn to_point(self, map: &DisplaySnapshot) -> Point { + map.display_point_to_point(self, Bias::Left) + } + + pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize { + let wrap_point = map.block_snapshot.to_wrap_point(self.0); + let tab_point = map.wrap_snapshot.to_tab_point(wrap_point); + let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0; + let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot); + map.inlay_snapshot + .to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point)) + } +} + +impl ToDisplayPoint for usize { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { + map.point_to_display_point(self.to_point(&map.buffer_snapshot), Bias::Left) + } +} + +impl ToDisplayPoint for OffsetUtf16 { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { + self.to_offset(&map.buffer_snapshot).to_display_point(map) + } +} + +impl ToDisplayPoint for Point { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { + map.point_to_display_point(*self, Bias::Left) + } +} + +impl ToDisplayPoint for Anchor { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { + self.to_point(&map.buffer_snapshot).to_display_point(map) + } +} + +#[cfg(test)] +pub mod tests { + use super::*; + use crate::{ + movement, + test::{editor_test_context::EditorTestContext, marked_display_snapshot}, + }; + use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla}; + use language::{ + language_settings::{AllLanguageSettings, AllLanguageSettingsContent}, + Buffer, Language, LanguageConfig, LanguageMatcher, SelectionGoal, + }; + use project::Project; + use rand::{prelude::*, Rng}; + use settings::SettingsStore; + use smol::stream::StreamExt; + use std::{env, sync::Arc}; + use theme::{LoadThemes, SyntaxTheme}; + use util::test::{marked_text_ranges, sample_text}; + use Bias::*; + + #[gpui::test(iterations = 100)] + async fn test_random_display_map(cx: &mut gpui::TestAppContext, mut rng: StdRng) { + cx.background_executor.set_block_on_ticks(0..=50); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let mut tab_size = rng.gen_range(1..=4); + let buffer_start_excerpt_header_height = rng.gen_range(1..=5); + let excerpt_header_height = rng.gen_range(1..=5); + let font_size = px(14.0); + let max_wrap_width = 300.0; + let mut wrap_width = if rng.gen_bool(0.1) { + None + } else { + Some(px(rng.gen_range(0.0..=max_wrap_width))) + }; + + log::info!("tab size: {}", tab_size); + log::info!("wrap width: {:?}", wrap_width); + + cx.update(|cx| { + init_test(cx, |s| s.defaults.tab_size = NonZeroU32::new(tab_size)); + }); + + let buffer = cx.update(|cx| { + if rng.gen() { + let len = rng.gen_range(0..10); + let text = util::RandomCharIter::new(&mut rng) + .take(len) + .collect::(); + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(&mut rng, cx) + } + }); + + let map = cx.new_model(|cx| { + DisplayMap::new( + buffer.clone(), + font("Helvetica"), + font_size, + wrap_width, + buffer_start_excerpt_header_height, + excerpt_header_height, + cx, + ) + }); + let mut notifications = observe(&map, cx); + let mut fold_count = 0; + let mut blocks = Vec::new(); + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); + log::info!("fold text: {:?}", snapshot.fold_snapshot.text()); + log::info!("tab text: {:?}", snapshot.tab_snapshot.text()); + log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text()); + log::info!("block text: {:?}", snapshot.block_snapshot.text()); + log::info!("display text: {:?}", snapshot.text()); + + for _i in 0..operations { + match rng.gen_range(0..100) { + 0..=19 => { + wrap_width = if rng.gen_bool(0.2) { + None + } else { + Some(px(rng.gen_range(0.0..=max_wrap_width))) + }; + log::info!("setting wrap width to {:?}", wrap_width); + map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); + } + 20..=29 => { + let mut tab_sizes = vec![1, 2, 3, 4]; + tab_sizes.remove((tab_size - 1) as usize); + tab_size = *tab_sizes.choose(&mut rng).unwrap(); + log::info!("setting tab size to {:?}", tab_size); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |s| { + s.defaults.tab_size = NonZeroU32::new(tab_size); + }); + }); + }); + } + 30..=44 => { + map.update(cx, |map, cx| { + if rng.gen() || blocks.is_empty() { + let buffer = map.snapshot(cx).buffer_snapshot; + let block_properties = (0..rng.gen_range(1..=1)) + .map(|_| { + let position = + buffer.anchor_after(buffer.clip_offset( + rng.gen_range(0..=buffer.len()), + Bias::Left, + )); + + let disposition = if rng.gen() { + BlockDisposition::Above + } else { + BlockDisposition::Below + }; + let height = rng.gen_range(1..5); + log::info!( + "inserting block {:?} {:?} with height {}", + disposition, + position.to_point(&buffer), + height + ); + BlockProperties { + style: BlockStyle::Fixed, + position, + height, + disposition, + render: Box::new(|_| div().into_any()), + } + }) + .collect::>(); + blocks.extend(map.insert_blocks(block_properties, cx)); + } else { + blocks.shuffle(&mut rng); + let remove_count = rng.gen_range(1..=4.min(blocks.len())); + let block_ids_to_remove = (0..remove_count) + .map(|_| blocks.remove(rng.gen_range(0..blocks.len()))) + .collect(); + log::info!("removing block ids {:?}", block_ids_to_remove); + map.remove_blocks(block_ids_to_remove, cx); + } + }); + } + 45..=79 => { + let mut ranges = Vec::new(); + for _ in 0..rng.gen_range(1..=3) { + buffer.read_with(cx, |buffer, cx| { + let buffer = buffer.read(cx); + let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); + let start = buffer.clip_offset(rng.gen_range(0..=end), Left); + ranges.push(start..end); + }); + } + + if rng.gen() && fold_count > 0 { + log::info!("unfolding ranges: {:?}", ranges); + map.update(cx, |map, cx| { + map.unfold(ranges, true, cx); + }); + } else { + log::info!("folding ranges: {:?}", ranges); + map.update(cx, |map, cx| { + map.fold(ranges, cx); + }); + } + } + _ => { + buffer.update(cx, |buffer, cx| buffer.randomly_mutate(&mut rng, 5, cx)); + } + } + + if map.read_with(cx, |map, cx| map.is_rewrapping(cx)) { + notifications.next().await.unwrap(); + } + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + fold_count = snapshot.fold_count(); + log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); + log::info!("fold text: {:?}", snapshot.fold_snapshot.text()); + log::info!("tab text: {:?}", snapshot.tab_snapshot.text()); + log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text()); + log::info!("block text: {:?}", snapshot.block_snapshot.text()); + log::info!("display text: {:?}", snapshot.text()); + + // Line boundaries + let buffer = &snapshot.buffer_snapshot; + for _ in 0..5 { + let row = rng.gen_range(0..=buffer.max_point().row); + let column = rng.gen_range(0..=buffer.line_len(MultiBufferRow(row))); + let point = buffer.clip_point(Point::new(row, column), Left); + + let (prev_buffer_bound, prev_display_bound) = snapshot.prev_line_boundary(point); + let (next_buffer_bound, next_display_bound) = snapshot.next_line_boundary(point); + + assert!(prev_buffer_bound <= point); + assert!(next_buffer_bound >= point); + assert_eq!(prev_buffer_bound.column, 0); + assert_eq!(prev_display_bound.column(), 0); + if next_buffer_bound < buffer.max_point() { + assert_eq!(buffer.chars_at(next_buffer_bound).next(), Some('\n')); + } + + assert_eq!( + prev_display_bound, + prev_buffer_bound.to_display_point(&snapshot), + "row boundary before {:?}. reported buffer row boundary: {:?}", + point, + prev_buffer_bound + ); + assert_eq!( + next_display_bound, + next_buffer_bound.to_display_point(&snapshot), + "display row boundary after {:?}. reported buffer row boundary: {:?}", + point, + next_buffer_bound + ); + assert_eq!( + prev_buffer_bound, + prev_display_bound.to_point(&snapshot), + "row boundary before {:?}. reported display row boundary: {:?}", + point, + prev_display_bound + ); + assert_eq!( + next_buffer_bound, + next_display_bound.to_point(&snapshot), + "row boundary after {:?}. reported display row boundary: {:?}", + point, + next_display_bound + ); + } + + // Movement + let min_point = snapshot.clip_point(DisplayPoint::new(DisplayRow(0), 0), Left); + let max_point = snapshot.clip_point(snapshot.max_point(), Right); + for _ in 0..5 { + let row = rng.gen_range(0..=snapshot.max_point().row().0); + let column = rng.gen_range(0..=snapshot.line_len(DisplayRow(row))); + let point = snapshot.clip_point(DisplayPoint::new(DisplayRow(row), column), Left); + + log::info!("Moving from point {:?}", point); + + let moved_right = movement::right(&snapshot, point); + log::info!("Right {:?}", moved_right); + if point < max_point { + assert!(moved_right > point); + if point.column() == snapshot.line_len(point.row()) + || snapshot.soft_wrap_indent(point.row()).is_some() + && point.column() == snapshot.line_len(point.row()) - 1 + { + assert!(moved_right.row() > point.row()); + } + } else { + assert_eq!(moved_right, point); + } + + let moved_left = movement::left(&snapshot, point); + log::info!("Left {:?}", moved_left); + if point > min_point { + assert!(moved_left < point); + if point.column() == 0 { + assert!(moved_left.row() < point.row()); + } + } else { + assert_eq!(moved_left, point); + } + } + } + } + + #[gpui::test(retries = 5)] + async fn test_soft_wraps(cx: &mut gpui::TestAppContext) { + cx.background_executor + .set_block_on_ticks(usize::MAX..=usize::MAX); + cx.update(|cx| { + init_test(cx, |_| {}); + }); + + let mut cx = EditorTestContext::new(cx).await; + let editor = cx.editor.clone(); + let window = cx.window; + + _ = cx.update_window(window, |_, cx| { + let text_layout_details = + editor.update(cx, |editor, cx| editor.text_layout_details(cx)); + + let font_size = px(12.0); + let wrap_width = Some(px(64.)); + + let text = "one two three four five\nsix seven eight"; + let buffer = MultiBuffer::build_simple(text, cx); + let map = cx.new_model(|cx| { + DisplayMap::new( + buffer.clone(), + font("Helvetica"), + font_size, + wrap_width, + 1, + 1, + cx, + ) + }); + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!( + snapshot.text_chunks(DisplayRow(0)).collect::(), + "one two \nthree four \nfive\nsix seven \neight" + ); + assert_eq!( + snapshot.clip_point(DisplayPoint::new(DisplayRow(0), 8), Bias::Left), + DisplayPoint::new(DisplayRow(0), 7) + ); + assert_eq!( + snapshot.clip_point(DisplayPoint::new(DisplayRow(0), 8), Bias::Right), + DisplayPoint::new(DisplayRow(1), 0) + ); + assert_eq!( + movement::right(&snapshot, DisplayPoint::new(DisplayRow(0), 7)), + DisplayPoint::new(DisplayRow(1), 0) + ); + assert_eq!( + movement::left(&snapshot, DisplayPoint::new(DisplayRow(1), 0)), + DisplayPoint::new(DisplayRow(0), 7) + ); + + let x = snapshot + .x_for_display_point(DisplayPoint::new(DisplayRow(1), 10), &text_layout_details); + assert_eq!( + movement::up( + &snapshot, + DisplayPoint::new(DisplayRow(1), 10), + SelectionGoal::None, + false, + &text_layout_details, + ), + ( + DisplayPoint::new(DisplayRow(0), 7), + SelectionGoal::HorizontalPosition(x.0) + ) + ); + assert_eq!( + movement::down( + &snapshot, + DisplayPoint::new(DisplayRow(0), 7), + SelectionGoal::HorizontalPosition(x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(1), 10), + SelectionGoal::HorizontalPosition(x.0) + ) + ); + assert_eq!( + movement::down( + &snapshot, + DisplayPoint::new(DisplayRow(1), 10), + SelectionGoal::HorizontalPosition(x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(2), 4), + SelectionGoal::HorizontalPosition(x.0) + ) + ); + + let ix = snapshot.buffer_snapshot.text().find("seven").unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.edit([(ix..ix, "and ")], None, cx); + }); + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!( + snapshot.text_chunks(DisplayRow(1)).collect::(), + "three four \nfive\nsix and \nseven eight" + ); + + // Re-wrap on font size changes + map.update(cx, |map, cx| { + map.set_font(font("Helvetica"), px(font_size.0 + 3.), cx) + }); + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!( + snapshot.text_chunks(DisplayRow(1)).collect::(), + "three \nfour five\nsix and \nseven \neight" + ) + }); + } + + #[gpui::test] + fn test_text_chunks(cx: &mut gpui::AppContext) { + init_test(cx, |_| {}); + + let text = sample_text(6, 6, 'a'); + let buffer = MultiBuffer::build_simple(&text, cx); + + let font_size = px(14.0); + let map = cx.new_model(|cx| { + DisplayMap::new(buffer.clone(), font("Helvetica"), font_size, None, 1, 1, cx) + }); + + buffer.update(cx, |buffer, cx| { + buffer.edit( + vec![ + ( + MultiBufferPoint::new(1, 0)..MultiBufferPoint::new(1, 0), + "\t", + ), + ( + MultiBufferPoint::new(1, 1)..MultiBufferPoint::new(1, 1), + "\t", + ), + ( + MultiBufferPoint::new(2, 1)..MultiBufferPoint::new(2, 1), + "\t", + ), + ], + None, + cx, + ) + }); + + assert_eq!( + map.update(cx, |map, cx| map.snapshot(cx)) + .text_chunks(DisplayRow(1)) + .collect::() + .lines() + .next(), + Some(" b bbbbb") + ); + assert_eq!( + map.update(cx, |map, cx| map.snapshot(cx)) + .text_chunks(DisplayRow(2)) + .collect::() + .lines() + .next(), + Some("c ccccc") + ); + } + + #[gpui::test] + async fn test_chunks(cx: &mut gpui::TestAppContext) { + use unindent::Unindent as _; + + let text = r#" + fn outer() {} + + mod module { + fn inner() {} + }"# + .unindent(); + + let theme = + SyntaxTheme::new_test(vec![("mod.body", Hsla::red()), ("fn.name", Hsla::blue())]); + let language = Arc::new( + Language::new( + LanguageConfig { + name: "Test".into(), + matcher: LanguageMatcher { + path_suffixes: vec![".test".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_highlights_query( + r#" + (mod_item name: (identifier) body: _ @mod.body) + (function_item name: (identifier) @fn.name) + "#, + ) + .unwrap(), + ); + language.set_theme(&theme); + + cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap()))); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + + let font_size = px(14.0); + + let map = cx + .new_model(|cx| DisplayMap::new(buffer, font("Helvetica"), font_size, None, 1, 1, cx)); + assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(5), &map, &theme, cx)), + vec![ + ("fn ".to_string(), None), + ("outer".to_string(), Some(Hsla::blue())), + ("() {}\n\nmod module ".to_string(), None), + ("{\n fn ".to_string(), Some(Hsla::red())), + ("inner".to_string(), Some(Hsla::blue())), + ("() {}\n}".to_string(), Some(Hsla::red())), + ] + ); + assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(3)..DisplayRow(5), &map, &theme, cx)), + vec![ + (" fn ".to_string(), Some(Hsla::red())), + ("inner".to_string(), Some(Hsla::blue())), + ("() {}\n}".to_string(), Some(Hsla::red())), + ] + ); + + map.update(cx, |map, cx| { + map.fold( + vec![MultiBufferPoint::new(0, 6)..MultiBufferPoint::new(3, 2)], + cx, + ) + }); + assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(2), &map, &theme, cx)), + vec![ + ("fn ".to_string(), None), + ("out".to_string(), Some(Hsla::blue())), + ("⋯".to_string(), None), + (" fn ".to_string(), Some(Hsla::red())), + ("inner".to_string(), Some(Hsla::blue())), + ("() {}\n}".to_string(), Some(Hsla::red())), + ] + ); + } + + #[gpui::test] + async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) { + use unindent::Unindent as _; + + cx.background_executor + .set_block_on_ticks(usize::MAX..=usize::MAX); + + let text = r#" + fn outer() {} + + mod module { + fn inner() {} + }"# + .unindent(); + + let theme = + SyntaxTheme::new_test(vec![("mod.body", Hsla::red()), ("fn.name", Hsla::blue())]); + let language = Arc::new( + Language::new( + LanguageConfig { + name: "Test".into(), + matcher: LanguageMatcher { + path_suffixes: vec![".test".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_highlights_query( + r#" + (mod_item name: (identifier) body: _ @mod.body) + (function_item name: (identifier) @fn.name) + "#, + ) + .unwrap(), + ); + language.set_theme(&theme); + + cx.update(|cx| init_test(cx, |_| {})); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + + let font_size = px(16.0); + + let map = cx.new_model(|cx| { + DisplayMap::new(buffer, font("Courier"), font_size, Some(px(40.0)), 1, 1, cx) + }); + assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(5), &map, &theme, cx)), + [ + ("fn \n".to_string(), None), + ("oute\nr".to_string(), Some(Hsla::blue())), + ("() \n{}\n\n".to_string(), None), + ] + ); + assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(3)..DisplayRow(5), &map, &theme, cx)), + [("{}\n\n".to_string(), None)] + ); + + map.update(cx, |map, cx| { + map.fold( + vec![MultiBufferPoint::new(0, 6)..MultiBufferPoint::new(3, 2)], + cx, + ) + }); + assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(1)..DisplayRow(4), &map, &theme, cx)), + [ + ("out".to_string(), Some(Hsla::blue())), + ("⋯\n".to_string(), None), + (" \nfn ".to_string(), Some(Hsla::red())), + ("i\n".to_string(), Some(Hsla::blue())) + ] + ); + } + + #[gpui::test] + async fn test_chunks_with_text_highlights(cx: &mut gpui::TestAppContext) { + cx.update(|cx| init_test(cx, |_| {})); + + let theme = + SyntaxTheme::new_test(vec![("operator", Hsla::red()), ("string", Hsla::green())]); + let language = Arc::new( + Language::new( + LanguageConfig { + name: "Test".into(), + matcher: LanguageMatcher { + path_suffixes: vec![".test".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_highlights_query( + r#" + ":" @operator + (string_literal) @string + "#, + ) + .unwrap(), + ); + language.set_theme(&theme); + + let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + + let font_size = px(16.0); + let map = + cx.new_model(|cx| DisplayMap::new(buffer, font("Courier"), font_size, None, 1, 1, cx)); + + enum MyType {} + + let style = HighlightStyle { + color: Some(Hsla::blue()), + ..Default::default() + }; + + map.update(cx, |map, _cx| { + map.highlight_text( + TypeId::of::(), + highlighted_ranges + .into_iter() + .map(|range| { + buffer_snapshot.anchor_before(range.start) + ..buffer_snapshot.anchor_before(range.end) + }) + .collect(), + style, + ); + }); + + assert_eq!( + cx.update(|cx| chunks(DisplayRow(0)..DisplayRow(10), &map, &theme, cx)), + [ + ("const ".to_string(), None, None), + ("a".to_string(), None, Some(Hsla::blue())), + (":".to_string(), Some(Hsla::red()), None), + (" B = ".to_string(), None, None), + ("\"c ".to_string(), Some(Hsla::green()), None), + ("d".to_string(), Some(Hsla::green()), Some(Hsla::blue())), + ("\"".to_string(), Some(Hsla::green()), None), + ] + ); + } + + #[gpui::test] + fn test_clip_point(cx: &mut gpui::AppContext) { + init_test(cx, |_| {}); + + fn assert(text: &str, shift_right: bool, bias: Bias, cx: &mut gpui::AppContext) { + let (unmarked_snapshot, mut markers) = marked_display_snapshot(text, cx); + + match bias { + Bias::Left => { + if shift_right { + *markers[1].column_mut() += 1; + } + + assert_eq!(unmarked_snapshot.clip_point(markers[1], bias), markers[0]) + } + Bias::Right => { + if shift_right { + *markers[0].column_mut() += 1; + } + + assert_eq!(unmarked_snapshot.clip_point(markers[0], bias), markers[1]) + } + }; + } + + use Bias::{Left, Right}; + assert("ˇˇα", false, Left, cx); + assert("ˇˇα", true, Left, cx); + assert("ˇˇα", false, Right, cx); + assert("ˇαˇ", true, Right, cx); + assert("ˇˇ✋", false, Left, cx); + assert("ˇˇ✋", true, Left, cx); + assert("ˇˇ✋", false, Right, cx); + assert("ˇ✋ˇ", true, Right, cx); + assert("ˇˇ🍐", false, Left, cx); + assert("ˇˇ🍐", true, Left, cx); + assert("ˇˇ🍐", false, Right, cx); + assert("ˇ🍐ˇ", true, Right, cx); + assert("ˇˇ\t", false, Left, cx); + assert("ˇˇ\t", true, Left, cx); + assert("ˇˇ\t", false, Right, cx); + assert("ˇ\tˇ", true, Right, cx); + assert(" ˇˇ\t", false, Left, cx); + assert(" ˇˇ\t", true, Left, cx); + assert(" ˇˇ\t", false, Right, cx); + assert(" ˇ\tˇ", true, Right, cx); + assert(" ˇˇ\t", false, Left, cx); + assert(" ˇˇ\t", false, Right, cx); + } + + #[gpui::test] + fn test_clip_at_line_ends(cx: &mut gpui::AppContext) { + init_test(cx, |_| {}); + + fn assert(text: &str, cx: &mut gpui::AppContext) { + let (mut unmarked_snapshot, markers) = marked_display_snapshot(text, cx); + unmarked_snapshot.clip_at_line_ends = true; + assert_eq!( + unmarked_snapshot.clip_point(markers[1], Bias::Left), + markers[0] + ); + } + + assert("ˇˇ", cx); + assert("ˇaˇ", cx); + assert("aˇbˇ", cx); + assert("aˇαˇ", cx); + } + + #[gpui::test] + fn test_tabs_with_multibyte_chars(cx: &mut gpui::AppContext) { + init_test(cx, |_| {}); + + let text = "✅\t\tα\nβ\t\n🏀β\t\tγ"; + let buffer = MultiBuffer::build_simple(text, cx); + let font_size = px(14.0); + + let map = cx.new_model(|cx| { + DisplayMap::new(buffer.clone(), font("Helvetica"), font_size, None, 1, 1, cx) + }); + let map = map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!(map.text(), "✅ α\nβ \n🏀β γ"); + assert_eq!( + map.text_chunks(DisplayRow(0)).collect::(), + "✅ α\nβ \n🏀β γ" + ); + assert_eq!( + map.text_chunks(DisplayRow(1)).collect::(), + "β \n🏀β γ" + ); + assert_eq!( + map.text_chunks(DisplayRow(2)).collect::(), + "🏀β γ" + ); + + let point = MultiBufferPoint::new(0, "✅\t\t".len() as u32); + let display_point = DisplayPoint::new(DisplayRow(0), "✅ ".len() as u32); + assert_eq!(point.to_display_point(&map), display_point); + assert_eq!(display_point.to_point(&map), point); + + let point = MultiBufferPoint::new(1, "β\t".len() as u32); + let display_point = DisplayPoint::new(DisplayRow(1), "β ".len() as u32); + assert_eq!(point.to_display_point(&map), display_point); + assert_eq!(display_point.to_point(&map), point,); + + let point = MultiBufferPoint::new(2, "🏀β\t\t".len() as u32); + let display_point = DisplayPoint::new(DisplayRow(2), "🏀β ".len() as u32); + assert_eq!(point.to_display_point(&map), display_point); + assert_eq!(display_point.to_point(&map), point,); + + // Display points inside of expanded tabs + assert_eq!( + DisplayPoint::new(DisplayRow(0), "✅ ".len() as u32).to_point(&map), + MultiBufferPoint::new(0, "✅\t".len() as u32), + ); + assert_eq!( + DisplayPoint::new(DisplayRow(0), "✅ ".len() as u32).to_point(&map), + MultiBufferPoint::new(0, "✅".len() as u32), + ); + + // Clipping display points inside of multi-byte characters + assert_eq!( + map.clip_point( + DisplayPoint::new(DisplayRow(0), "✅".len() as u32 - 1), + Left + ), + DisplayPoint::new(DisplayRow(0), 0) + ); + assert_eq!( + map.clip_point( + DisplayPoint::new(DisplayRow(0), "✅".len() as u32 - 1), + Bias::Right + ), + DisplayPoint::new(DisplayRow(0), "✅".len() as u32) + ); + } + + #[gpui::test] + fn test_max_point(cx: &mut gpui::AppContext) { + init_test(cx, |_| {}); + + let buffer = MultiBuffer::build_simple("aaa\n\t\tbbb", cx); + let font_size = px(14.0); + let map = cx.new_model(|cx| { + DisplayMap::new(buffer.clone(), font("Helvetica"), font_size, None, 1, 1, cx) + }); + assert_eq!( + map.update(cx, |map, cx| map.snapshot(cx)).max_point(), + DisplayPoint::new(DisplayRow(1), 11) + ) + } + + fn syntax_chunks( + rows: Range, + map: &Model, + theme: &SyntaxTheme, + cx: &mut AppContext, + ) -> Vec<(String, Option)> { + chunks(rows, map, theme, cx) + .into_iter() + .map(|(text, color, _)| (text, color)) + .collect() + } + + fn chunks( + rows: Range, + map: &Model, + theme: &SyntaxTheme, + cx: &mut AppContext, + ) -> Vec<(String, Option, Option)> { + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + let mut chunks: Vec<(String, Option, Option)> = Vec::new(); + for chunk in snapshot.chunks(rows, true, HighlightStyles::default()) { + let syntax_color = chunk + .syntax_highlight_id + .and_then(|id| id.style(theme)?.color); + let highlight_color = chunk.highlight_style.and_then(|style| style.color); + if let Some((last_chunk, last_syntax_color, last_highlight_color)) = chunks.last_mut() { + if syntax_color == *last_syntax_color && highlight_color == *last_highlight_color { + last_chunk.push_str(chunk.text); + continue; + } + } + chunks.push((chunk.text.to_string(), syntax_color, highlight_color)); + } + chunks + } + + fn init_test(cx: &mut AppContext, f: impl Fn(&mut AllLanguageSettingsContent)) { + let settings = SettingsStore::test(cx); + cx.set_global(settings); + language::init(cx); + crate::init(cx); + Project::init_settings(cx); + theme::init(LoadThemes::JustBase, cx); + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, f); + }); + } +} diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs new file mode 100644 index 0000000..2ec868f --- /dev/null +++ b/crates/editor/src/display_map/block_map.rs @@ -0,0 +1,1666 @@ +use super::{ + wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}, + Highlights, +}; +use crate::{EditorStyle, GutterDimensions}; +use collections::{Bound, HashMap, HashSet}; +use gpui::{AnyElement, Pixels, WindowContext}; +use language::{BufferSnapshot, Chunk, Patch, Point}; +use multi_buffer::{Anchor, ExcerptId, ExcerptRange, MultiBufferRow, ToPoint as _}; +use parking_lot::Mutex; +use std::{ + cell::RefCell, + cmp::{self, Ordering}, + fmt::Debug, + ops::{Deref, DerefMut, Range}, + sync::{ + atomic::{AtomicUsize, Ordering::SeqCst}, + Arc, + }, +}; +use sum_tree::{Bias, SumTree}; +use text::Edit; + +const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize]; + +/// Tracks custom blocks such as diagnostics that should be displayed within buffer. +/// +/// See the [`display_map` module documentation](crate::display_map) for more information. +pub struct BlockMap { + next_block_id: AtomicUsize, + wrap_snapshot: RefCell, + blocks: Vec>, + transforms: RefCell>, + buffer_header_height: u8, + excerpt_header_height: u8, +} + +pub struct BlockMapWriter<'a>(&'a mut BlockMap); + +#[derive(Clone)] +pub struct BlockSnapshot { + wrap_snapshot: WrapSnapshot, + transforms: SumTree, +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct BlockId(usize); + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct BlockPoint(pub Point); + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct BlockRow(pub(super) u32); + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +struct WrapRow(u32); + +pub type RenderBlock = Box AnyElement>; + +pub struct Block { + id: BlockId, + position: Anchor, + height: u8, + style: BlockStyle, + render: Mutex, + disposition: BlockDisposition, +} + +pub struct BlockProperties

{ + pub position: P, + pub height: u8, + pub style: BlockStyle, + pub render: Box AnyElement>, + pub disposition: BlockDisposition, +} + +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] +pub enum BlockStyle { + Fixed, + Flex, + Sticky, +} + +pub struct BlockContext<'a, 'b> { + pub context: &'b mut WindowContext<'a>, + pub anchor_x: Pixels, + pub max_width: Pixels, + pub gutter_dimensions: &'b GutterDimensions, + pub em_width: Pixels, + pub line_height: Pixels, + pub block_id: usize, + pub editor_style: &'b EditorStyle, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum BlockDisposition { + Above, + Below, +} + +#[derive(Clone, Debug)] +struct Transform { + summary: TransformSummary, + block: Option, +} + +#[allow(clippy::large_enum_variant)] +#[derive(Clone)] +pub enum TransformBlock { + Custom(Arc), + ExcerptHeader { + id: ExcerptId, + buffer: BufferSnapshot, + range: ExcerptRange, + height: u8, + starts_new_buffer: bool, + }, +} + +impl TransformBlock { + fn disposition(&self) -> BlockDisposition { + match self { + TransformBlock::Custom(block) => block.disposition, + TransformBlock::ExcerptHeader { .. } => BlockDisposition::Above, + } + } + + pub fn height(&self) -> u8 { + match self { + TransformBlock::Custom(block) => block.height, + TransformBlock::ExcerptHeader { height, .. } => *height, + } + } +} + +impl Debug for TransformBlock { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Custom(block) => f.debug_struct("Custom").field("block", block).finish(), + Self::ExcerptHeader { buffer, .. } => f + .debug_struct("ExcerptHeader") + .field("path", &buffer.file().map(|f| f.path())) + .finish(), + } + } +} + +#[derive(Clone, Debug, Default)] +struct TransformSummary { + input_rows: u32, + output_rows: u32, +} + +pub struct BlockChunks<'a> { + transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>, + input_chunks: wrap_map::WrapChunks<'a>, + input_chunk: Chunk<'a>, + output_row: u32, + max_output_row: u32, +} + +#[derive(Clone)] +pub struct BlockBufferRows<'a> { + transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>, + input_buffer_rows: wrap_map::WrapBufferRows<'a>, + output_row: BlockRow, + started: bool, +} + +impl BlockMap { + pub fn new( + wrap_snapshot: WrapSnapshot, + buffer_header_height: u8, + excerpt_header_height: u8, + ) -> Self { + let row_count = wrap_snapshot.max_point().row() + 1; + let map = Self { + next_block_id: AtomicUsize::new(0), + blocks: Vec::new(), + transforms: RefCell::new(SumTree::from_item(Transform::isomorphic(row_count), &())), + wrap_snapshot: RefCell::new(wrap_snapshot.clone()), + buffer_header_height, + excerpt_header_height, + }; + map.sync( + &wrap_snapshot, + Patch::new(vec![Edit { + old: 0..row_count, + new: 0..row_count, + }]), + ); + map + } + + pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch) -> BlockSnapshot { + self.sync(&wrap_snapshot, edits); + *self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone(); + BlockSnapshot { + wrap_snapshot, + transforms: self.transforms.borrow().clone(), + } + } + + pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch) -> BlockMapWriter { + self.sync(&wrap_snapshot, edits); + *self.wrap_snapshot.borrow_mut() = wrap_snapshot; + BlockMapWriter(self) + } + + fn sync(&self, wrap_snapshot: &WrapSnapshot, mut edits: Patch) { + let buffer = wrap_snapshot.buffer_snapshot(); + + // Handle changing the last excerpt if it is empty. + if buffer.trailing_excerpt_update_count() + != self + .wrap_snapshot + .borrow() + .buffer_snapshot() + .trailing_excerpt_update_count() + { + let max_point = wrap_snapshot.max_point(); + let edit_start = wrap_snapshot.prev_row_boundary(max_point); + let edit_end = max_point.row() + 1; + edits = edits.compose([WrapEdit { + old: edit_start..edit_end, + new: edit_start..edit_end, + }]); + } + + let edits = edits.into_inner(); + if edits.is_empty() { + return; + } + + let mut transforms = self.transforms.borrow_mut(); + let mut new_transforms = SumTree::new(); + let old_row_count = transforms.summary().input_rows; + let new_row_count = wrap_snapshot.max_point().row() + 1; + let mut cursor = transforms.cursor::(); + let mut last_block_ix = 0; + let mut blocks_in_edit = Vec::new(); + let mut edits = edits.into_iter().peekable(); + + while let Some(edit) = edits.next() { + // Preserve any old transforms that precede this edit. + let old_start = WrapRow(edit.old.start); + let new_start = WrapRow(edit.new.start); + new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &()); + if let Some(transform) = cursor.item() { + if transform.is_isomorphic() && old_start == cursor.end(&()) { + new_transforms.push(transform.clone(), &()); + cursor.next(&()); + while let Some(transform) = cursor.item() { + if transform + .block + .as_ref() + .map_or(false, |b| b.disposition().is_below()) + { + new_transforms.push(transform.clone(), &()); + cursor.next(&()); + } else { + break; + } + } + } + } + + // Preserve any portion of an old transform that precedes this edit. + let extent_before_edit = old_start.0 - cursor.start().0; + push_isomorphic(&mut new_transforms, extent_before_edit); + + // Skip over any old transforms that intersect this edit. + let mut old_end = WrapRow(edit.old.end); + let mut new_end = WrapRow(edit.new.end); + cursor.seek(&old_end, Bias::Left, &()); + cursor.next(&()); + if old_end == *cursor.start() { + while let Some(transform) = cursor.item() { + if transform + .block + .as_ref() + .map_or(false, |b| b.disposition().is_below()) + { + cursor.next(&()); + } else { + break; + } + } + } + + // Combine this edit with any subsequent edits that intersect the same transform. + while let Some(next_edit) = edits.peek() { + if next_edit.old.start <= cursor.start().0 { + old_end = WrapRow(next_edit.old.end); + new_end = WrapRow(next_edit.new.end); + cursor.seek(&old_end, Bias::Left, &()); + cursor.next(&()); + if old_end == *cursor.start() { + while let Some(transform) = cursor.item() { + if transform + .block + .as_ref() + .map_or(false, |b| b.disposition().is_below()) + { + cursor.next(&()); + } else { + break; + } + } + } + edits.next(); + } else { + break; + } + } + + // Find the blocks within this edited region. + let new_buffer_start = + wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left); + let start_bound = Bound::Included(new_buffer_start); + let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| { + probe + .position + .to_point(buffer) + .cmp(&new_buffer_start) + .then(Ordering::Greater) + }) { + Ok(ix) | Err(ix) => last_block_ix + ix, + }; + + let end_bound; + let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() { + end_bound = Bound::Unbounded; + self.blocks.len() + } else { + let new_buffer_end = + wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left); + end_bound = Bound::Excluded(new_buffer_end); + match self.blocks[start_block_ix..].binary_search_by(|probe| { + probe + .position + .to_point(buffer) + .cmp(&new_buffer_end) + .then(Ordering::Greater) + }) { + Ok(ix) | Err(ix) => start_block_ix + ix, + } + }; + last_block_ix = end_block_ix; + + debug_assert!(blocks_in_edit.is_empty()); + blocks_in_edit.extend( + self.blocks[start_block_ix..end_block_ix] + .iter() + .map(|block| { + let mut position = block.position.to_point(buffer); + match block.disposition { + BlockDisposition::Above => position.column = 0, + BlockDisposition::Below => { + position.column = buffer.line_len(MultiBufferRow(position.row)) + } + } + let position = wrap_snapshot.make_wrap_point(position, Bias::Left); + (position.row(), TransformBlock::Custom(block.clone())) + }), + ); + if buffer.show_headers() { + blocks_in_edit.extend( + buffer + .excerpt_boundaries_in_range((start_bound, end_bound)) + .map(|excerpt_boundary| { + ( + wrap_snapshot + .make_wrap_point( + Point::new(excerpt_boundary.row.0, 0), + Bias::Left, + ) + .row(), + TransformBlock::ExcerptHeader { + id: excerpt_boundary.id, + buffer: excerpt_boundary.buffer, + range: excerpt_boundary.range, + height: if excerpt_boundary.starts_new_buffer { + self.buffer_header_height + } else { + self.excerpt_header_height + }, + starts_new_buffer: excerpt_boundary.starts_new_buffer, + }, + ) + }), + ); + } + + // Place excerpt headers above custom blocks on the same row. + blocks_in_edit.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| { + row_a.cmp(row_b).then_with(|| match (block_a, block_b) { + ( + TransformBlock::ExcerptHeader { .. }, + TransformBlock::ExcerptHeader { .. }, + ) => Ordering::Equal, + (TransformBlock::ExcerptHeader { .. }, _) => Ordering::Less, + (_, TransformBlock::ExcerptHeader { .. }) => Ordering::Greater, + (TransformBlock::Custom(block_a), TransformBlock::Custom(block_b)) => block_a + .disposition + .cmp(&block_b.disposition) + .then_with(|| block_a.id.cmp(&block_b.id)), + }) + }); + + // For each of these blocks, insert a new isomorphic transform preceding the block, + // and then insert the block itself. + for (block_row, block) in blocks_in_edit.drain(..) { + let insertion_row = match block.disposition() { + BlockDisposition::Above => block_row, + BlockDisposition::Below => block_row + 1, + }; + let extent_before_block = insertion_row - new_transforms.summary().input_rows; + push_isomorphic(&mut new_transforms, extent_before_block); + new_transforms.push(Transform::block(block), &()); + } + + old_end = WrapRow(old_end.0.min(old_row_count)); + new_end = WrapRow(new_end.0.min(new_row_count)); + + // Insert an isomorphic transform after the final block. + let extent_after_last_block = new_end.0 - new_transforms.summary().input_rows; + push_isomorphic(&mut new_transforms, extent_after_last_block); + + // Preserve any portion of the old transform after this edit. + let extent_after_edit = cursor.start().0 - old_end.0; + push_isomorphic(&mut new_transforms, extent_after_edit); + } + + new_transforms.append(cursor.suffix(&()), &()); + debug_assert_eq!( + new_transforms.summary().input_rows, + wrap_snapshot.max_point().row() + 1 + ); + + drop(cursor); + *transforms = new_transforms; + } + + pub fn replace(&mut self, mut renderers: HashMap) { + for block in &self.blocks { + if let Some(render) = renderers.remove(&block.id) { + *block.render.lock() = render; + } + } + } +} + +fn push_isomorphic(tree: &mut SumTree, rows: u32) { + if rows == 0 { + return; + } + + let mut extent = Some(rows); + tree.update_last( + |last_transform| { + if last_transform.is_isomorphic() { + let extent = extent.take().unwrap(); + last_transform.summary.input_rows += extent; + last_transform.summary.output_rows += extent; + } + }, + &(), + ); + if let Some(extent) = extent { + tree.push(Transform::isomorphic(extent), &()); + } +} + +impl BlockPoint { + pub fn new(row: u32, column: u32) -> Self { + Self(Point::new(row, column)) + } +} + +impl Deref for BlockPoint { + type Target = Point; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl std::ops::DerefMut for BlockPoint { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl<'a> BlockMapWriter<'a> { + pub fn insert( + &mut self, + blocks: impl IntoIterator>, + ) -> Vec { + let mut ids = Vec::new(); + let mut edits = Patch::default(); + let wrap_snapshot = &*self.0.wrap_snapshot.borrow(); + let buffer = wrap_snapshot.buffer_snapshot(); + + for block in blocks { + let id = BlockId(self.0.next_block_id.fetch_add(1, SeqCst)); + ids.push(id); + + let position = block.position; + let point = position.to_point(buffer); + let wrap_row = wrap_snapshot + .make_wrap_point(Point::new(point.row, 0), Bias::Left) + .row(); + let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); + let end_row = wrap_snapshot + .next_row_boundary(WrapPoint::new(wrap_row, 0)) + .unwrap_or(wrap_snapshot.max_point().row() + 1); + + let block_ix = match self + .0 + .blocks + .binary_search_by(|probe| probe.position.cmp(&position, buffer)) + { + Ok(ix) | Err(ix) => ix, + }; + self.0.blocks.insert( + block_ix, + Arc::new(Block { + id, + position, + height: block.height, + render: Mutex::new(block.render), + disposition: block.disposition, + style: block.style, + }), + ); + + edits = edits.compose([Edit { + old: start_row..end_row, + new: start_row..end_row, + }]); + } + + self.0.sync(wrap_snapshot, edits); + ids + } + + pub fn remove(&mut self, block_ids: HashSet) { + let wrap_snapshot = &*self.0.wrap_snapshot.borrow(); + let buffer = wrap_snapshot.buffer_snapshot(); + let mut edits = Patch::default(); + let mut last_block_buffer_row = None; + self.0.blocks.retain(|block| { + if block_ids.contains(&block.id) { + let buffer_row = block.position.to_point(buffer).row; + if last_block_buffer_row != Some(buffer_row) { + last_block_buffer_row = Some(buffer_row); + let wrap_row = wrap_snapshot + .make_wrap_point(Point::new(buffer_row, 0), Bias::Left) + .row(); + let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); + let end_row = wrap_snapshot + .next_row_boundary(WrapPoint::new(wrap_row, 0)) + .unwrap_or(wrap_snapshot.max_point().row() + 1); + edits.push(Edit { + old: start_row..end_row, + new: start_row..end_row, + }) + } + false + } else { + true + } + }); + self.0.sync(wrap_snapshot, edits); + } +} + +impl BlockSnapshot { + #[cfg(test)] + pub fn text(&self) -> String { + self.chunks( + 0..self.transforms.summary().output_rows, + false, + Highlights::default(), + ) + .map(|chunk| chunk.text) + .collect() + } + + pub(crate) fn chunks<'a>( + &'a self, + rows: Range, + language_aware: bool, + highlights: Highlights<'a>, + ) -> BlockChunks<'a> { + let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let input_end = { + cursor.seek(&BlockRow(rows.end), Bias::Right, &()); + let overshoot = if cursor + .item() + .map_or(false, |transform| transform.is_isomorphic()) + { + rows.end - cursor.start().0 .0 + } else { + 0 + }; + cursor.start().1 .0 + overshoot + }; + let input_start = { + cursor.seek(&BlockRow(rows.start), Bias::Right, &()); + let overshoot = if cursor + .item() + .map_or(false, |transform| transform.is_isomorphic()) + { + rows.start - cursor.start().0 .0 + } else { + 0 + }; + cursor.start().1 .0 + overshoot + }; + BlockChunks { + input_chunks: self.wrap_snapshot.chunks( + input_start..input_end, + language_aware, + highlights, + ), + input_chunk: Default::default(), + transforms: cursor, + output_row: rows.start, + max_output_row, + } + } + + pub(super) fn buffer_rows(&self, start_row: BlockRow) -> BlockBufferRows { + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + cursor.seek(&start_row, Bias::Right, &()); + let (output_start, input_start) = cursor.start(); + let overshoot = if cursor.item().map_or(false, |t| t.is_isomorphic()) { + start_row.0 - output_start.0 + } else { + 0 + }; + let input_start_row = input_start.0 + overshoot; + BlockBufferRows { + transforms: cursor, + input_buffer_rows: self.wrap_snapshot.buffer_rows(input_start_row), + output_row: start_row, + started: false, + } + } + + pub fn blocks_in_range( + &self, + rows: Range, + ) -> impl Iterator { + let mut cursor = self.transforms.cursor::(); + cursor.seek(&BlockRow(rows.start), Bias::Right, &()); + std::iter::from_fn(move || { + while let Some(transform) = cursor.item() { + let start_row = cursor.start().0; + if start_row >= rows.end { + break; + } + if let Some(block) = &transform.block { + cursor.next(&()); + return Some((start_row, block)); + } else { + cursor.next(&()); + } + } + None + }) + } + + pub fn max_point(&self) -> BlockPoint { + let row = self.transforms.summary().output_rows - 1; + BlockPoint::new(row, self.line_len(BlockRow(row))) + } + + pub fn longest_row(&self) -> u32 { + let input_row = self.wrap_snapshot.longest_row(); + self.to_block_point(WrapPoint::new(input_row, 0)).row + } + + pub(super) fn line_len(&self, row: BlockRow) -> u32 { + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + cursor.seek(&BlockRow(row.0), Bias::Right, &()); + if let Some(transform) = cursor.item() { + let (output_start, input_start) = cursor.start(); + let overshoot = row.0 - output_start.0; + if transform.block.is_some() { + 0 + } else { + self.wrap_snapshot.line_len(input_start.0 + overshoot) + } + } else { + panic!("row out of range"); + } + } + + pub(super) fn is_block_line(&self, row: BlockRow) -> bool { + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + cursor.seek(&row, Bias::Right, &()); + cursor.item().map_or(false, |t| t.block.is_some()) + } + + pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + cursor.seek(&BlockRow(point.row), Bias::Right, &()); + + let max_input_row = WrapRow(self.transforms.summary().input_rows); + let mut search_left = + (bias == Bias::Left && cursor.start().1 .0 > 0) || cursor.end(&()).1 == max_input_row; + let mut reversed = false; + + loop { + if let Some(transform) = cursor.item() { + if transform.is_isomorphic() { + let (output_start_row, input_start_row) = cursor.start(); + let (output_end_row, input_end_row) = cursor.end(&()); + let output_start = Point::new(output_start_row.0, 0); + let input_start = Point::new(input_start_row.0, 0); + let input_end = Point::new(input_end_row.0, 0); + let input_point = if point.row >= output_end_row.0 { + let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1); + self.wrap_snapshot + .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias) + } else { + let output_overshoot = point.0.saturating_sub(output_start); + self.wrap_snapshot + .clip_point(WrapPoint(input_start + output_overshoot), bias) + }; + + if (input_start..input_end).contains(&input_point.0) { + let input_overshoot = input_point.0.saturating_sub(input_start); + return BlockPoint(output_start + input_overshoot); + } + } + + if search_left { + cursor.prev(&()); + } else { + cursor.next(&()); + } + } else if reversed { + return self.max_point(); + } else { + reversed = true; + search_left = !search_left; + cursor.seek(&BlockRow(point.row), Bias::Right, &()); + } + } + } + + pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); + if let Some(transform) = cursor.item() { + debug_assert!(transform.is_isomorphic()); + } else { + return self.max_point(); + } + + let (input_start_row, output_start_row) = cursor.start(); + let input_start = Point::new(input_start_row.0, 0); + let output_start = Point::new(output_start_row.0, 0); + let input_overshoot = wrap_point.0 - input_start; + BlockPoint(output_start + input_overshoot) + } + + pub fn to_wrap_point(&self, block_point: BlockPoint) -> WrapPoint { + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); + if let Some(transform) = cursor.item() { + match transform.block.as_ref().map(|b| b.disposition()) { + Some(BlockDisposition::Above) => WrapPoint::new(cursor.start().1 .0, 0), + Some(BlockDisposition::Below) => { + let wrap_row = cursor.start().1 .0 - 1; + WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) + } + None => { + let overshoot = block_point.row - cursor.start().0 .0; + let wrap_row = cursor.start().1 .0 + overshoot; + WrapPoint::new(wrap_row, block_point.column) + } + } + } else { + self.wrap_snapshot.max_point() + } + } +} + +impl Transform { + fn isomorphic(rows: u32) -> Self { + Self { + summary: TransformSummary { + input_rows: rows, + output_rows: rows, + }, + block: None, + } + } + + fn block(block: TransformBlock) -> Self { + Self { + summary: TransformSummary { + input_rows: 0, + output_rows: block.height() as u32, + }, + block: Some(block), + } + } + + fn is_isomorphic(&self) -> bool { + self.block.is_none() + } +} + +impl<'a> Iterator for BlockChunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.output_row >= self.max_output_row { + return None; + } + + let transform = self.transforms.item()?; + if transform.block.is_some() { + let block_start = self.transforms.start().0 .0; + let mut block_end = self.transforms.end(&()).0 .0; + self.transforms.next(&()); + if self.transforms.item().is_none() { + block_end -= 1; + } + + let start_in_block = self.output_row - block_start; + let end_in_block = cmp::min(self.max_output_row, block_end) - block_start; + let line_count = end_in_block - start_in_block; + self.output_row += line_count; + + return Some(Chunk { + text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count as usize]) }, + ..Default::default() + }); + } + + if self.input_chunk.text.is_empty() { + if let Some(input_chunk) = self.input_chunks.next() { + self.input_chunk = input_chunk; + } else { + self.output_row += 1; + if self.output_row < self.max_output_row { + self.transforms.next(&()); + return Some(Chunk { + text: "\n", + ..Default::default() + }); + } else { + return None; + } + } + } + + let transform_end = self.transforms.end(&()).0 .0; + let (prefix_rows, prefix_bytes) = + offset_for_row(self.input_chunk.text, transform_end - self.output_row); + self.output_row += prefix_rows; + let (prefix, suffix) = self.input_chunk.text.split_at(prefix_bytes); + self.input_chunk.text = suffix; + if self.output_row == transform_end { + self.transforms.next(&()); + } + + Some(Chunk { + text: prefix, + ..self.input_chunk + }) + } +} + +impl<'a> Iterator for BlockBufferRows<'a> { + type Item = Option; + + fn next(&mut self) -> Option { + if self.started { + self.output_row.0 += 1; + } else { + self.started = true; + } + + if self.output_row.0 >= self.transforms.end(&()).0 .0 { + self.transforms.next(&()); + } + + let transform = self.transforms.item()?; + if transform.block.is_some() { + Some(None) + } else { + Some(self.input_buffer_rows.next().unwrap().map(BlockRow)) + } + } +} + +impl sum_tree::Item for Transform { + type Summary = TransformSummary; + + fn summary(&self) -> Self::Summary { + self.summary.clone() + } +} + +impl sum_tree::Summary for TransformSummary { + type Context = (); + + fn add_summary(&mut self, summary: &Self, _: &()) { + self.input_rows += summary.input_rows; + self.output_rows += summary.output_rows; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapRow { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += summary.input_rows; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for BlockRow { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += summary.output_rows; + } +} + +impl BlockDisposition { + fn is_below(&self) -> bool { + matches!(self, BlockDisposition::Below) + } +} + +impl<'a> Deref for BlockContext<'a, '_> { + type Target = WindowContext<'a>; + + fn deref(&self) -> &Self::Target { + self.context + } +} + +impl DerefMut for BlockContext<'_, '_> { + fn deref_mut(&mut self) -> &mut Self::Target { + self.context + } +} + +impl Block { + pub fn render(&self, cx: &mut BlockContext) -> AnyElement { + self.render.lock()(cx) + } + + pub fn position(&self) -> &Anchor { + &self.position + } + + pub fn style(&self) -> BlockStyle { + self.style + } +} + +impl Debug for Block { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Block") + .field("id", &self.id) + .field("position", &self.position) + .field("disposition", &self.disposition) + .finish() + } +} + +// Count the number of bytes prior to a target point. If the string doesn't contain the target +// point, return its total extent. Otherwise return the target point itself. +fn offset_for_row(s: &str, target: u32) -> (u32, usize) { + let mut row = 0; + let mut offset = 0; + for (ix, line) in s.split('\n').enumerate() { + if ix > 0 { + row += 1; + offset += 1; + } + if row >= target { + break; + } + offset += line.len(); + } + (row, offset) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::display_map::inlay_map::InlayMap; + use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap}; + use gpui::{div, font, px, Element}; + use multi_buffer::MultiBuffer; + use rand::prelude::*; + use settings::SettingsStore; + use std::env; + use util::RandomCharIter; + + #[gpui::test] + fn test_offset_for_row() { + assert_eq!(offset_for_row("", 0), (0, 0)); + assert_eq!(offset_for_row("", 1), (0, 0)); + assert_eq!(offset_for_row("abcd", 0), (0, 0)); + assert_eq!(offset_for_row("abcd", 1), (0, 4)); + assert_eq!(offset_for_row("\n", 0), (0, 0)); + assert_eq!(offset_for_row("\n", 1), (1, 1)); + assert_eq!(offset_for_row("abc\ndef\nghi", 0), (0, 0)); + assert_eq!(offset_for_row("abc\ndef\nghi", 1), (1, 4)); + assert_eq!(offset_for_row("abc\ndef\nghi", 2), (2, 8)); + assert_eq!(offset_for_row("abc\ndef\nghi", 3), (2, 11)); + } + + #[gpui::test] + fn test_basic_blocks(cx: &mut gpui::TestAppContext) { + cx.update(|cx| init_test(cx)); + + let text = "aaa\nbbb\nccc\nddd"; + + let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx)); + let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap()); + let (wrap_map, wraps_snapshot) = + cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx)); + let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1); + + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); + let block_ids = writer.insert(vec![ + BlockProperties { + style: BlockStyle::Fixed, + position: buffer_snapshot.anchor_after(Point::new(1, 0)), + height: 1, + disposition: BlockDisposition::Above, + render: Box::new(|_| div().into_any()), + }, + BlockProperties { + style: BlockStyle::Fixed, + position: buffer_snapshot.anchor_after(Point::new(1, 2)), + height: 2, + disposition: BlockDisposition::Above, + render: Box::new(|_| div().into_any()), + }, + BlockProperties { + style: BlockStyle::Fixed, + position: buffer_snapshot.anchor_after(Point::new(3, 3)), + height: 3, + disposition: BlockDisposition::Below, + render: Box::new(|_| div().into_any()), + }, + ]); + + let snapshot = block_map.read(wraps_snapshot, Default::default()); + assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n"); + + let blocks = snapshot + .blocks_in_range(0..8) + .map(|(start_row, block)| { + let block = block.as_custom().unwrap(); + (start_row..start_row + block.height as u32, block.id) + }) + .collect::>(); + + // When multiple blocks are on the same line, the newer blocks appear first. + assert_eq!( + blocks, + &[ + (1..2, block_ids[0]), + (2..4, block_ids[1]), + (7..10, block_ids[2]), + ] + ); + + assert_eq!( + snapshot.to_block_point(WrapPoint::new(0, 3)), + BlockPoint::new(0, 3) + ); + assert_eq!( + snapshot.to_block_point(WrapPoint::new(1, 0)), + BlockPoint::new(4, 0) + ); + assert_eq!( + snapshot.to_block_point(WrapPoint::new(3, 3)), + BlockPoint::new(6, 3) + ); + + assert_eq!( + snapshot.to_wrap_point(BlockPoint::new(0, 3)), + WrapPoint::new(0, 3) + ); + assert_eq!( + snapshot.to_wrap_point(BlockPoint::new(1, 0)), + WrapPoint::new(1, 0) + ); + assert_eq!( + snapshot.to_wrap_point(BlockPoint::new(3, 0)), + WrapPoint::new(1, 0) + ); + assert_eq!( + snapshot.to_wrap_point(BlockPoint::new(7, 0)), + WrapPoint::new(3, 3) + ); + + assert_eq!( + snapshot.clip_point(BlockPoint::new(1, 0), Bias::Left), + BlockPoint::new(0, 3) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(1, 0), Bias::Right), + BlockPoint::new(4, 0) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(1, 1), Bias::Left), + BlockPoint::new(0, 3) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(1, 1), Bias::Right), + BlockPoint::new(4, 0) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(4, 0), Bias::Left), + BlockPoint::new(4, 0) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(4, 0), Bias::Right), + BlockPoint::new(4, 0) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(6, 3), Bias::Left), + BlockPoint::new(6, 3) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(6, 3), Bias::Right), + BlockPoint::new(6, 3) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(7, 0), Bias::Left), + BlockPoint::new(6, 3) + ); + assert_eq!( + snapshot.clip_point(BlockPoint::new(7, 0), Bias::Right), + BlockPoint::new(6, 3) + ); + + assert_eq!( + snapshot + .buffer_rows(BlockRow(0)) + .map(|row| row.map(|r| r.0)) + .collect::>(), + &[ + Some(0), + None, + None, + None, + Some(1), + Some(2), + Some(3), + None, + None, + None + ] + ); + + // Insert a line break, separating two block decorations into separate lines. + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "!!!\n")], None, cx); + buffer.snapshot(cx) + }); + + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot, subscription.consume().into_inner()); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tab_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap()); + let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { + wrap_map.sync(tab_snapshot, tab_edits, cx) + }); + let snapshot = block_map.read(wraps_snapshot, wrap_edits); + assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n"); + } + + #[gpui::test] + fn test_blocks_on_wrapped_lines(cx: &mut gpui::TestAppContext) { + cx.update(|cx| init_test(cx)); + + let _font_id = cx.text_system().font_id(&font("Helvetica")).unwrap(); + + let text = "one two three\nfour five six\nseven eight"; + + let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx)); + let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, wraps_snapshot) = cx.update(|cx| { + WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), Some(px(60.)), cx) + }); + let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1); + + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); + writer.insert(vec![ + BlockProperties { + style: BlockStyle::Fixed, + position: buffer_snapshot.anchor_after(Point::new(1, 12)), + disposition: BlockDisposition::Above, + render: Box::new(|_| div().into_any()), + height: 1, + }, + BlockProperties { + style: BlockStyle::Fixed, + position: buffer_snapshot.anchor_after(Point::new(1, 1)), + disposition: BlockDisposition::Below, + render: Box::new(|_| div().into_any()), + height: 1, + }, + ]); + + // Blocks with an 'above' disposition go above their corresponding buffer line. + // Blocks with a 'below' disposition go below their corresponding buffer line. + let snapshot = block_map.read(wraps_snapshot, Default::default()); + assert_eq!( + snapshot.text(), + "one two \nthree\n\nfour five \nsix\n\nseven \neight" + ); + } + + #[gpui::test(iterations = 100)] + fn test_random_blocks(cx: &mut gpui::TestAppContext, mut rng: StdRng) { + cx.update(|cx| init_test(cx)); + + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let wrap_width = if rng.gen_bool(0.2) { + None + } else { + Some(px(rng.gen_range(0.0..=100.0))) + }; + let tab_size = 1.try_into().unwrap(); + let font_size = px(14.0); + let buffer_start_header_height = rng.gen_range(1..=5); + let excerpt_header_height = rng.gen_range(1..=5); + + log::info!("Wrap width: {:?}", wrap_width); + log::info!("Excerpt Header Height: {:?}", excerpt_header_height); + + let buffer = if rng.gen() { + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + log::info!("initial buffer text: {:?}", text); + cx.update(|cx| MultiBuffer::build_simple(&text, cx)) + } else { + cx.update(|cx| MultiBuffer::build_random(&mut rng, cx)) + }; + + let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + let (wrap_map, wraps_snapshot) = cx + .update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx)); + let mut block_map = BlockMap::new( + wraps_snapshot, + buffer_start_header_height, + excerpt_header_height, + ); + let mut custom_blocks = Vec::new(); + + for _ in 0..operations { + let mut buffer_edits = Vec::new(); + match rng.gen_range(0..=100) { + 0..=19 => { + let wrap_width = if rng.gen_bool(0.2) { + None + } else { + Some(px(rng.gen_range(0.0..=100.0))) + }; + log::info!("Setting wrap width to {:?}", wrap_width); + wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); + } + 20..=39 => { + let block_count = rng.gen_range(1..=5); + let block_properties = (0..block_count) + .map(|_| { + let buffer = cx.update(|cx| buffer.read(cx).read(cx).clone()); + let position = buffer.anchor_after( + buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left), + ); + + let disposition = if rng.gen() { + BlockDisposition::Above + } else { + BlockDisposition::Below + }; + let height = rng.gen_range(1..5); + log::info!( + "inserting block {:?} {:?} with height {}", + disposition, + position.to_point(&buffer), + height + ); + BlockProperties { + style: BlockStyle::Fixed, + position, + height, + disposition, + render: Box::new(|_| div().into_any()), + } + }) + .collect::>(); + + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot.clone(), vec![]); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tab_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { + wrap_map.sync(tab_snapshot, tab_edits, cx) + }); + let mut block_map = block_map.write(wraps_snapshot, wrap_edits); + let block_ids = + block_map.insert(block_properties.iter().map(|props| BlockProperties { + position: props.position, + height: props.height, + style: props.style, + render: Box::new(|_| div().into_any()), + disposition: props.disposition, + })); + for (block_id, props) in block_ids.into_iter().zip(block_properties) { + custom_blocks.push((block_id, props)); + } + } + 40..=59 if !custom_blocks.is_empty() => { + let block_count = rng.gen_range(1..=4.min(custom_blocks.len())); + let block_ids_to_remove = (0..block_count) + .map(|_| { + custom_blocks + .remove(rng.gen_range(0..custom_blocks.len())) + .0 + }) + .collect(); + + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot.clone(), vec![]); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tab_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { + wrap_map.sync(tab_snapshot, tab_edits, cx) + }); + let mut block_map = block_map.write(wraps_snapshot, wrap_edits); + block_map.remove(block_ids_to_remove); + } + _ => { + buffer.update(cx, |buffer, cx| { + let mutation_count = rng.gen_range(1..=5); + let subscription = buffer.subscribe(); + buffer.randomly_mutate(&mut rng, mutation_count, cx); + buffer_snapshot = buffer.snapshot(cx); + buffer_edits.extend(subscription.consume()); + log::info!("buffer text: {:?}", buffer_snapshot.text()); + }); + } + } + + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot.clone(), buffer_edits); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { + wrap_map.sync(tab_snapshot, tab_edits, cx) + }); + let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); + assert_eq!( + blocks_snapshot.transforms.summary().input_rows, + wraps_snapshot.max_point().row() + 1 + ); + log::info!("blocks text: {:?}", blocks_snapshot.text()); + + let mut expected_blocks = Vec::new(); + expected_blocks.extend(custom_blocks.iter().map(|(id, block)| { + let mut position = block.position.to_point(&buffer_snapshot); + match block.disposition { + BlockDisposition::Above => { + position.column = 0; + } + BlockDisposition::Below => { + position.column = buffer_snapshot.line_len(MultiBufferRow(position.row)); + } + }; + let row = wraps_snapshot.make_wrap_point(position, Bias::Left).row(); + ( + row, + ExpectedBlock::Custom { + disposition: block.disposition, + id: *id, + height: block.height, + }, + ) + })); + expected_blocks.extend(buffer_snapshot.excerpt_boundaries_in_range(0..).map( + |boundary| { + let position = + wraps_snapshot.make_wrap_point(Point::new(boundary.row.0, 0), Bias::Left); + ( + position.row(), + ExpectedBlock::ExcerptHeader { + height: if boundary.starts_new_buffer { + buffer_start_header_height + } else { + excerpt_header_height + }, + starts_new_buffer: boundary.starts_new_buffer, + }, + ) + }, + )); + expected_blocks.sort_unstable(); + let mut sorted_blocks_iter = expected_blocks.into_iter().peekable(); + + let input_buffer_rows = buffer_snapshot + .buffer_rows(MultiBufferRow(0)) + .collect::>(); + let mut expected_buffer_rows = Vec::new(); + let mut expected_text = String::new(); + let mut expected_block_positions = Vec::new(); + let input_text = wraps_snapshot.text(); + for (row, input_line) in input_text.split('\n').enumerate() { + let row = row as u32; + if row > 0 { + expected_text.push('\n'); + } + + let buffer_row = input_buffer_rows[wraps_snapshot + .to_point(WrapPoint::new(row, 0), Bias::Left) + .row as usize]; + + while let Some((block_row, block)) = sorted_blocks_iter.peek() { + if *block_row == row && block.disposition() == BlockDisposition::Above { + let (_, block) = sorted_blocks_iter.next().unwrap(); + let height = block.height() as usize; + expected_block_positions + .push((expected_text.matches('\n').count() as u32, block)); + let text = "\n".repeat(height); + expected_text.push_str(&text); + for _ in 0..height { + expected_buffer_rows.push(None); + } + } else { + break; + } + } + + let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0; + expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row }); + expected_text.push_str(input_line); + + while let Some((block_row, block)) = sorted_blocks_iter.peek() { + if *block_row == row && block.disposition() == BlockDisposition::Below { + let (_, block) = sorted_blocks_iter.next().unwrap(); + let height = block.height() as usize; + expected_block_positions + .push((expected_text.matches('\n').count() as u32 + 1, block)); + let text = "\n".repeat(height); + expected_text.push_str(&text); + for _ in 0..height { + expected_buffer_rows.push(None); + } + } else { + break; + } + } + } + + let expected_lines = expected_text.split('\n').collect::>(); + let expected_row_count = expected_lines.len(); + for start_row in 0..expected_row_count { + let expected_text = expected_lines[start_row..].join("\n"); + let actual_text = blocks_snapshot + .chunks( + start_row as u32..blocks_snapshot.max_point().row + 1, + false, + Highlights::default(), + ) + .map(|chunk| chunk.text) + .collect::(); + assert_eq!( + actual_text, expected_text, + "incorrect text starting from row {}", + start_row + ); + assert_eq!( + blocks_snapshot + .buffer_rows(BlockRow(start_row as u32)) + .map(|row| row.map(|r| r.0)) + .collect::>(), + &expected_buffer_rows[start_row..] + ); + } + + assert_eq!( + blocks_snapshot + .blocks_in_range(0..(expected_row_count as u32)) + .map(|(row, block)| (row, block.clone().into())) + .collect::>(), + expected_block_positions + ); + + let mut expected_longest_rows = Vec::new(); + let mut longest_line_len = -1_isize; + for (row, line) in expected_lines.iter().enumerate() { + let row = row as u32; + + assert_eq!( + blocks_snapshot.line_len(BlockRow(row)), + line.len() as u32, + "invalid line len for row {}", + row + ); + + let line_char_count = line.chars().count() as isize; + match line_char_count.cmp(&longest_line_len) { + Ordering::Less => {} + Ordering::Equal => expected_longest_rows.push(row), + Ordering::Greater => { + longest_line_len = line_char_count; + expected_longest_rows.clear(); + expected_longest_rows.push(row); + } + } + } + + let longest_row = blocks_snapshot.longest_row(); + assert!( + expected_longest_rows.contains(&longest_row), + "incorrect longest row {}. expected {:?} with length {}", + longest_row, + expected_longest_rows, + longest_line_len, + ); + + for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() { + let wrap_point = WrapPoint::new(row, 0); + let block_point = blocks_snapshot.to_block_point(wrap_point); + assert_eq!(blocks_snapshot.to_wrap_point(block_point), wrap_point); + } + + let mut block_point = BlockPoint::new(0, 0); + for c in expected_text.chars() { + let left_point = blocks_snapshot.clip_point(block_point, Bias::Left); + let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left); + assert_eq!( + blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)), + left_point + ); + assert_eq!( + left_buffer_point, + buffer_snapshot.clip_point(left_buffer_point, Bias::Right), + "{:?} is not valid in buffer coordinates", + left_point + ); + + let right_point = blocks_snapshot.clip_point(block_point, Bias::Right); + let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right); + assert_eq!( + blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)), + right_point + ); + assert_eq!( + right_buffer_point, + buffer_snapshot.clip_point(right_buffer_point, Bias::Left), + "{:?} is not valid in buffer coordinates", + right_point + ); + + if c == '\n' { + block_point.0 += Point::new(1, 0); + } else { + block_point.column += c.len_utf8() as u32; + } + } + } + + #[derive(Debug, Eq, PartialEq, Ord, PartialOrd)] + enum ExpectedBlock { + ExcerptHeader { + height: u8, + starts_new_buffer: bool, + }, + Custom { + disposition: BlockDisposition, + id: BlockId, + height: u8, + }, + } + + impl ExpectedBlock { + fn height(&self) -> u8 { + match self { + ExpectedBlock::ExcerptHeader { height, .. } => *height, + ExpectedBlock::Custom { height, .. } => *height, + } + } + + fn disposition(&self) -> BlockDisposition { + match self { + ExpectedBlock::ExcerptHeader { .. } => BlockDisposition::Above, + ExpectedBlock::Custom { disposition, .. } => *disposition, + } + } + } + + impl From for ExpectedBlock { + fn from(block: TransformBlock) -> Self { + match block { + TransformBlock::Custom(block) => ExpectedBlock::Custom { + id: block.id, + disposition: block.disposition, + height: block.height, + }, + TransformBlock::ExcerptHeader { + height, + starts_new_buffer, + .. + } => ExpectedBlock::ExcerptHeader { + height, + starts_new_buffer, + }, + } + } + } + } + + fn init_test(cx: &mut gpui::AppContext) { + let settings = SettingsStore::test(cx); + cx.set_global(settings); + theme::init(theme::LoadThemes::JustBase, cx); + } + + impl TransformBlock { + fn as_custom(&self) -> Option<&Block> { + match self { + TransformBlock::Custom(block) => Some(block), + TransformBlock::ExcerptHeader { .. } => None, + } + } + } + + impl BlockSnapshot { + fn to_point(&self, point: BlockPoint, bias: Bias) -> Point { + self.wrap_snapshot.to_point(self.to_wrap_point(point), bias) + } + } +} diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs new file mode 100644 index 0000000..337395b --- /dev/null +++ b/crates/editor/src/display_map/fold_map.rs @@ -0,0 +1,1726 @@ +use super::{ + inlay_map::{InlayBufferRows, InlayChunks, InlayEdit, InlayOffset, InlayPoint, InlaySnapshot}, + Highlights, +}; +use gpui::{ElementId, HighlightStyle, Hsla}; +use language::{Chunk, Edit, Point, TextSummary}; +use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToOffset}; +use std::{ + cmp::{self, Ordering}, + iter, + ops::{Add, AddAssign, Deref, DerefMut, Range, Sub}, +}; +use sum_tree::{Bias, Cursor, FilterCursor, SumTree}; +use util::post_inc; + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct FoldPoint(pub Point); + +impl FoldPoint { + pub fn new(row: u32, column: u32) -> Self { + Self(Point::new(row, column)) + } + + pub fn row(self) -> u32 { + self.0.row + } + + pub fn column(self) -> u32 { + self.0.column + } + + pub fn row_mut(&mut self) -> &mut u32 { + &mut self.0.row + } + + #[cfg(test)] + pub fn column_mut(&mut self) -> &mut u32 { + &mut self.0.column + } + + pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { + let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(); + cursor.seek(&self, Bias::Right, &()); + let overshoot = self.0 - cursor.start().0 .0; + InlayPoint(cursor.start().1 .0 + overshoot) + } + + pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset { + let mut cursor = snapshot + .transforms + .cursor::<(FoldPoint, TransformSummary)>(); + cursor.seek(&self, Bias::Right, &()); + let overshoot = self.0 - cursor.start().1.output.lines; + let mut offset = cursor.start().1.output.len; + if !overshoot.is_zero() { + let transform = cursor.item().expect("display point out of range"); + assert!(transform.output_text.is_none()); + let end_inlay_offset = snapshot + .inlay_snapshot + .to_offset(InlayPoint(cursor.start().1.input.lines + overshoot)); + offset += end_inlay_offset.0 - cursor.start().1.input.len; + } + FoldOffset(offset) + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += &summary.output.lines; + } +} + +pub(crate) struct FoldMapWriter<'a>(&'a mut FoldMap); + +impl<'a> FoldMapWriter<'a> { + pub(crate) fn fold( + &mut self, + ranges: impl IntoIterator>, + ) -> (FoldSnapshot, Vec) { + let mut edits = Vec::new(); + let mut folds = Vec::new(); + let snapshot = self.0.snapshot.inlay_snapshot.clone(); + for range in ranges.into_iter() { + let buffer = &snapshot.buffer; + let range = range.start.to_offset(&buffer)..range.end.to_offset(&buffer); + + // Ignore any empty ranges. + if range.start == range.end { + continue; + } + + // For now, ignore any ranges that span an excerpt boundary. + let fold_range = + FoldRange(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); + if fold_range.0.start.excerpt_id != fold_range.0.end.excerpt_id { + continue; + } + + folds.push(Fold { + id: FoldId(post_inc(&mut self.0.next_fold_id.0)), + range: fold_range, + }); + + let inlay_range = + snapshot.to_inlay_offset(range.start)..snapshot.to_inlay_offset(range.end); + edits.push(InlayEdit { + old: inlay_range.clone(), + new: inlay_range, + }); + } + + let buffer = &snapshot.buffer; + folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(&a.range, &b.range, buffer)); + + self.0.snapshot.folds = { + let mut new_tree = SumTree::new(); + let mut cursor = self.0.snapshot.folds.cursor::(); + for fold in folds { + new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer); + new_tree.push(fold, buffer); + } + new_tree.append(cursor.suffix(buffer), buffer); + new_tree + }; + + consolidate_inlay_edits(&mut edits); + let edits = self.0.sync(snapshot.clone(), edits); + (self.0.snapshot.clone(), edits) + } + + pub(crate) fn unfold( + &mut self, + ranges: impl IntoIterator>, + inclusive: bool, + ) -> (FoldSnapshot, Vec) { + let mut edits = Vec::new(); + let mut fold_ixs_to_delete = Vec::new(); + let snapshot = self.0.snapshot.inlay_snapshot.clone(); + let buffer = &snapshot.buffer; + for range in ranges.into_iter() { + // Remove intersecting folds and add their ranges to edits that are passed to sync. + let mut folds_cursor = + intersecting_folds(&snapshot, &self.0.snapshot.folds, range, inclusive); + while let Some(fold) = folds_cursor.item() { + let offset_range = + fold.range.start.to_offset(buffer)..fold.range.end.to_offset(buffer); + if offset_range.end > offset_range.start { + let inlay_range = snapshot.to_inlay_offset(offset_range.start) + ..snapshot.to_inlay_offset(offset_range.end); + edits.push(InlayEdit { + old: inlay_range.clone(), + new: inlay_range, + }); + } + fold_ixs_to_delete.push(*folds_cursor.start()); + folds_cursor.next(buffer); + } + } + + fold_ixs_to_delete.sort_unstable(); + fold_ixs_to_delete.dedup(); + + self.0.snapshot.folds = { + let mut cursor = self.0.snapshot.folds.cursor::(); + let mut folds = SumTree::new(); + for fold_ix in fold_ixs_to_delete { + folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); + cursor.next(buffer); + } + folds.append(cursor.suffix(buffer), buffer); + folds + }; + + consolidate_inlay_edits(&mut edits); + let edits = self.0.sync(snapshot.clone(), edits); + (self.0.snapshot.clone(), edits) + } +} + +/// Decides where the fold indicators should be; also tracks parts of a source file that are currently folded. +/// +/// See the [`display_map` module documentation](crate::display_map) for more information. +pub(crate) struct FoldMap { + snapshot: FoldSnapshot, + ellipses_color: Option, + next_fold_id: FoldId, +} + +impl FoldMap { + pub(crate) fn new(inlay_snapshot: InlaySnapshot) -> (Self, FoldSnapshot) { + let this = Self { + snapshot: FoldSnapshot { + folds: Default::default(), + transforms: SumTree::from_item( + Transform { + summary: TransformSummary { + input: inlay_snapshot.text_summary(), + output: inlay_snapshot.text_summary(), + }, + output_text: None, + }, + &(), + ), + inlay_snapshot: inlay_snapshot.clone(), + version: 0, + ellipses_color: None, + }, + ellipses_color: None, + next_fold_id: FoldId::default(), + }; + let snapshot = this.snapshot.clone(); + (this, snapshot) + } + + pub fn read( + &mut self, + inlay_snapshot: InlaySnapshot, + edits: Vec, + ) -> (FoldSnapshot, Vec) { + let edits = self.sync(inlay_snapshot, edits); + self.check_invariants(); + (self.snapshot.clone(), edits) + } + + pub fn write( + &mut self, + inlay_snapshot: InlaySnapshot, + edits: Vec, + ) -> (FoldMapWriter, FoldSnapshot, Vec) { + let (snapshot, edits) = self.read(inlay_snapshot, edits); + (FoldMapWriter(self), snapshot, edits) + } + + pub fn set_ellipses_color(&mut self, color: Hsla) -> bool { + if self.ellipses_color == Some(color) { + false + } else { + self.ellipses_color = Some(color); + true + } + } + + fn check_invariants(&self) { + if cfg!(test) { + assert_eq!( + self.snapshot.transforms.summary().input.len, + self.snapshot.inlay_snapshot.len().0, + "transform tree does not match inlay snapshot's length" + ); + + let mut folds = self.snapshot.folds.iter().peekable(); + while let Some(fold) = folds.next() { + if let Some(next_fold) = folds.peek() { + let comparison = fold + .range + .cmp(&next_fold.range, &self.snapshot.inlay_snapshot.buffer); + assert!(comparison.is_le()); + } + } + } + } + + fn sync( + &mut self, + inlay_snapshot: InlaySnapshot, + inlay_edits: Vec, + ) -> Vec { + if inlay_edits.is_empty() { + if self.snapshot.inlay_snapshot.version != inlay_snapshot.version { + self.snapshot.version += 1; + } + self.snapshot.inlay_snapshot = inlay_snapshot; + Vec::new() + } else { + let mut inlay_edits_iter = inlay_edits.iter().cloned().peekable(); + + let mut new_transforms = SumTree::new(); + let mut cursor = self.snapshot.transforms.cursor::(); + cursor.seek(&InlayOffset(0), Bias::Right, &()); + + while let Some(mut edit) = inlay_edits_iter.next() { + new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &()); + edit.new.start -= edit.old.start - *cursor.start(); + edit.old.start = *cursor.start(); + + cursor.seek(&edit.old.end, Bias::Right, &()); + cursor.next(&()); + + let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize; + loop { + edit.old.end = *cursor.start(); + + if let Some(next_edit) = inlay_edits_iter.peek() { + if next_edit.old.start > edit.old.end { + break; + } + + let next_edit = inlay_edits_iter.next().unwrap(); + delta += next_edit.new_len().0 as isize - next_edit.old_len().0 as isize; + + if next_edit.old.end >= edit.old.end { + edit.old.end = next_edit.old.end; + cursor.seek(&edit.old.end, Bias::Right, &()); + cursor.next(&()); + } + } else { + break; + } + } + + edit.new.end = + InlayOffset(((edit.new.start + edit.old_len()).0 as isize + delta) as usize); + + let anchor = inlay_snapshot + .buffer + .anchor_before(inlay_snapshot.to_buffer_offset(edit.new.start)); + let mut folds_cursor = self.snapshot.folds.cursor::(); + folds_cursor.seek( + &FoldRange(anchor..Anchor::max()), + Bias::Left, + &inlay_snapshot.buffer, + ); + + let mut folds = iter::from_fn({ + let inlay_snapshot = &inlay_snapshot; + move || { + let item = folds_cursor.item().map(|f| { + let buffer_start = f.range.start.to_offset(&inlay_snapshot.buffer); + let buffer_end = f.range.end.to_offset(&inlay_snapshot.buffer); + inlay_snapshot.to_inlay_offset(buffer_start) + ..inlay_snapshot.to_inlay_offset(buffer_end) + }); + folds_cursor.next(&inlay_snapshot.buffer); + item + } + }) + .peekable(); + + while folds.peek().map_or(false, |fold| fold.start < edit.new.end) { + let mut fold = folds.next().unwrap(); + let sum = new_transforms.summary(); + + assert!(fold.start.0 >= sum.input.len); + + while folds + .peek() + .map_or(false, |next_fold| next_fold.start <= fold.end) + { + let next_fold = folds.next().unwrap(); + if next_fold.end > fold.end { + fold.end = next_fold.end; + } + } + + if fold.start.0 > sum.input.len { + let text_summary = inlay_snapshot + .text_summary_for_range(InlayOffset(sum.input.len)..fold.start); + new_transforms.push( + Transform { + summary: TransformSummary { + output: text_summary.clone(), + input: text_summary, + }, + output_text: None, + }, + &(), + ); + } + + if fold.end > fold.start { + let output_text = "⋯"; + new_transforms.push( + Transform { + summary: TransformSummary { + output: TextSummary::from(output_text), + input: inlay_snapshot + .text_summary_for_range(fold.start..fold.end), + }, + output_text: Some(output_text), + }, + &(), + ); + } + } + + let sum = new_transforms.summary(); + if sum.input.len < edit.new.end.0 { + let text_summary = inlay_snapshot + .text_summary_for_range(InlayOffset(sum.input.len)..edit.new.end); + new_transforms.push( + Transform { + summary: TransformSummary { + output: text_summary.clone(), + input: text_summary, + }, + output_text: None, + }, + &(), + ); + } + } + + new_transforms.append(cursor.suffix(&()), &()); + if new_transforms.is_empty() { + let text_summary = inlay_snapshot.text_summary(); + new_transforms.push( + Transform { + summary: TransformSummary { + output: text_summary.clone(), + input: text_summary, + }, + output_text: None, + }, + &(), + ); + } + + drop(cursor); + + let mut fold_edits = Vec::with_capacity(inlay_edits.len()); + { + let mut old_transforms = self + .snapshot + .transforms + .cursor::<(InlayOffset, FoldOffset)>(); + let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(); + + for mut edit in inlay_edits { + old_transforms.seek(&edit.old.start, Bias::Left, &()); + if old_transforms.item().map_or(false, |t| t.is_fold()) { + edit.old.start = old_transforms.start().0; + } + let old_start = + old_transforms.start().1 .0 + (edit.old.start - old_transforms.start().0).0; + + old_transforms.seek_forward(&edit.old.end, Bias::Right, &()); + if old_transforms.item().map_or(false, |t| t.is_fold()) { + old_transforms.next(&()); + edit.old.end = old_transforms.start().0; + } + let old_end = + old_transforms.start().1 .0 + (edit.old.end - old_transforms.start().0).0; + + new_transforms.seek(&edit.new.start, Bias::Left, &()); + if new_transforms.item().map_or(false, |t| t.is_fold()) { + edit.new.start = new_transforms.start().0; + } + let new_start = + new_transforms.start().1 .0 + (edit.new.start - new_transforms.start().0).0; + + new_transforms.seek_forward(&edit.new.end, Bias::Right, &()); + if new_transforms.item().map_or(false, |t| t.is_fold()) { + new_transforms.next(&()); + edit.new.end = new_transforms.start().0; + } + let new_end = + new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0).0; + + fold_edits.push(FoldEdit { + old: FoldOffset(old_start)..FoldOffset(old_end), + new: FoldOffset(new_start)..FoldOffset(new_end), + }); + } + + consolidate_fold_edits(&mut fold_edits); + } + + self.snapshot.transforms = new_transforms; + self.snapshot.inlay_snapshot = inlay_snapshot; + self.snapshot.version += 1; + fold_edits + } + } +} + +#[derive(Clone)] +pub struct FoldSnapshot { + transforms: SumTree, + folds: SumTree, + pub inlay_snapshot: InlaySnapshot, + pub version: usize, + pub ellipses_color: Option, +} + +impl FoldSnapshot { + #[cfg(test)] + pub fn text(&self) -> String { + self.chunks(FoldOffset(0)..self.len(), false, Highlights::default()) + .map(|c| c.text) + .collect() + } + + #[cfg(test)] + pub fn fold_count(&self) -> usize { + self.folds.items(&self.inlay_snapshot.buffer).len() + } + + pub fn text_summary_for_range(&self, range: Range) -> TextSummary { + let mut summary = TextSummary::default(); + + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + cursor.seek(&range.start, Bias::Right, &()); + if let Some(transform) = cursor.item() { + let start_in_transform = range.start.0 - cursor.start().0 .0; + let end_in_transform = cmp::min(range.end, cursor.end(&()).0).0 - cursor.start().0 .0; + if let Some(output_text) = transform.output_text { + summary = TextSummary::from( + &output_text + [start_in_transform.column as usize..end_in_transform.column as usize], + ); + } else { + let inlay_start = self + .inlay_snapshot + .to_offset(InlayPoint(cursor.start().1 .0 + start_in_transform)); + let inlay_end = self + .inlay_snapshot + .to_offset(InlayPoint(cursor.start().1 .0 + end_in_transform)); + summary = self + .inlay_snapshot + .text_summary_for_range(inlay_start..inlay_end); + } + } + + if range.end > cursor.end(&()).0 { + cursor.next(&()); + summary += &cursor + .summary::<_, TransformSummary>(&range.end, Bias::Right, &()) + .output; + if let Some(transform) = cursor.item() { + let end_in_transform = range.end.0 - cursor.start().0 .0; + if let Some(output_text) = transform.output_text { + summary += TextSummary::from(&output_text[..end_in_transform.column as usize]); + } else { + let inlay_start = self.inlay_snapshot.to_offset(cursor.start().1); + let inlay_end = self + .inlay_snapshot + .to_offset(InlayPoint(cursor.start().1 .0 + end_in_transform)); + summary += self + .inlay_snapshot + .text_summary_for_range(inlay_start..inlay_end); + } + } + } + + summary + } + + pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { + let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(); + cursor.seek(&point, Bias::Right, &()); + if cursor.item().map_or(false, |t| t.is_fold()) { + if bias == Bias::Left || point == cursor.start().0 { + cursor.start().1 + } else { + cursor.end(&()).1 + } + } else { + let overshoot = point.0 - cursor.start().0 .0; + FoldPoint(cmp::min( + cursor.start().1 .0 + overshoot, + cursor.end(&()).1 .0, + )) + } + } + + pub fn len(&self) -> FoldOffset { + FoldOffset(self.transforms.summary().output.len) + } + + pub fn line_len(&self, row: u32) -> u32 { + let line_start = FoldPoint::new(row, 0).to_offset(self).0; + let line_end = if row >= self.max_point().row() { + self.len().0 + } else { + FoldPoint::new(row + 1, 0).to_offset(self).0 - 1 + }; + (line_end - line_start) as u32 + } + + pub fn buffer_rows(&self, start_row: u32) -> FoldBufferRows { + if start_row > self.transforms.summary().output.lines.row { + panic!("invalid display row {}", start_row); + } + + let fold_point = FoldPoint::new(start_row, 0); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + cursor.seek(&fold_point, Bias::Left, &()); + + let overshoot = fold_point.0 - cursor.start().0 .0; + let inlay_point = InlayPoint(cursor.start().1 .0 + overshoot); + let input_buffer_rows = self.inlay_snapshot.buffer_rows(inlay_point.row()); + + FoldBufferRows { + fold_point, + input_buffer_rows, + cursor, + } + } + + pub fn max_point(&self) -> FoldPoint { + FoldPoint(self.transforms.summary().output.lines) + } + + #[cfg(test)] + pub fn longest_row(&self) -> u32 { + self.transforms.summary().output.longest_row + } + + pub fn folds_in_range(&self, range: Range) -> impl Iterator + where + T: ToOffset, + { + let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false); + iter::from_fn(move || { + let item = folds.item(); + folds.next(&self.inlay_snapshot.buffer); + item + }) + } + + pub fn intersects_fold(&self, offset: T) -> bool + where + T: ToOffset, + { + let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); + let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); + let mut cursor = self.transforms.cursor::(); + cursor.seek(&inlay_offset, Bias::Right, &()); + cursor.item().map_or(false, |t| t.output_text.is_some()) + } + + pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool { + let mut inlay_point = self + .inlay_snapshot + .to_inlay_point(Point::new(buffer_row.0, 0)); + let mut cursor = self.transforms.cursor::(); + cursor.seek(&inlay_point, Bias::Right, &()); + loop { + match cursor.item() { + Some(transform) => { + let buffer_point = self.inlay_snapshot.to_buffer_point(inlay_point); + if buffer_point.row != buffer_row.0 { + return false; + } else if transform.output_text.is_some() { + return true; + } + } + None => return false, + } + + if cursor.end(&()).row() == inlay_point.row() { + cursor.next(&()); + } else { + inlay_point.0 += Point::new(1, 0); + cursor.seek(&inlay_point, Bias::Right, &()); + } + } + } + + pub(crate) fn chunks<'a>( + &'a self, + range: Range, + language_aware: bool, + highlights: Highlights<'a>, + ) -> FoldChunks<'a> { + let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(); + + let inlay_end = { + transform_cursor.seek(&range.end, Bias::Right, &()); + let overshoot = range.end.0 - transform_cursor.start().0 .0; + transform_cursor.start().1 + InlayOffset(overshoot) + }; + + let inlay_start = { + transform_cursor.seek(&range.start, Bias::Right, &()); + let overshoot = range.start.0 - transform_cursor.start().0 .0; + transform_cursor.start().1 + InlayOffset(overshoot) + }; + + FoldChunks { + transform_cursor, + inlay_chunks: self.inlay_snapshot.chunks( + inlay_start..inlay_end, + language_aware, + highlights, + ), + inlay_chunk: None, + inlay_offset: inlay_start, + output_offset: range.start.0, + max_output_offset: range.end.0, + ellipses_color: self.ellipses_color, + } + } + + pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator { + self.chunks( + start.to_offset(self)..self.len(), + false, + Highlights::default(), + ) + .flat_map(|chunk| chunk.text.chars()) + } + + #[cfg(test)] + pub fn clip_offset(&self, offset: FoldOffset, bias: Bias) -> FoldOffset { + if offset > self.len() { + self.len() + } else { + self.clip_point(offset.to_point(self), bias).to_offset(self) + } + } + + pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + cursor.seek(&point, Bias::Right, &()); + if let Some(transform) = cursor.item() { + let transform_start = cursor.start().0 .0; + if transform.output_text.is_some() { + if point.0 == transform_start || matches!(bias, Bias::Left) { + FoldPoint(transform_start) + } else { + FoldPoint(cursor.end(&()).0 .0) + } + } else { + let overshoot = InlayPoint(point.0 - transform_start); + let inlay_point = cursor.start().1 + overshoot; + let clipped_inlay_point = self.inlay_snapshot.clip_point(inlay_point, bias); + FoldPoint(cursor.start().0 .0 + (clipped_inlay_point - cursor.start().1).0) + } + } else { + FoldPoint(self.transforms.summary().output.lines) + } + } +} + +fn intersecting_folds<'a, T>( + inlay_snapshot: &'a InlaySnapshot, + folds: &'a SumTree, + range: Range, + inclusive: bool, +) -> FilterCursor<'a, impl 'a + FnMut(&FoldSummary) -> bool, Fold, usize> +where + T: ToOffset, +{ + let buffer = &inlay_snapshot.buffer; + let start = buffer.anchor_before(range.start.to_offset(buffer)); + let end = buffer.anchor_after(range.end.to_offset(buffer)); + let mut cursor = folds.filter::<_, usize>(move |summary| { + let start_cmp = start.cmp(&summary.max_end, buffer); + let end_cmp = end.cmp(&summary.min_start, buffer); + + if inclusive { + start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal + } else { + start_cmp == Ordering::Less && end_cmp == Ordering::Greater + } + }); + cursor.next(buffer); + cursor +} + +fn consolidate_inlay_edits(edits: &mut Vec) { + edits.sort_unstable_by(|a, b| { + a.old + .start + .cmp(&b.old.start) + .then_with(|| b.old.end.cmp(&a.old.end)) + }); + + let mut i = 1; + while i < edits.len() { + let edit = edits[i].clone(); + let prev_edit = &mut edits[i - 1]; + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = prev_edit.old.end.max(edit.old.end); + prev_edit.new.start = prev_edit.new.start.min(edit.new.start); + prev_edit.new.end = prev_edit.new.end.max(edit.new.end); + edits.remove(i); + continue; + } + i += 1; + } +} + +fn consolidate_fold_edits(edits: &mut Vec) { + edits.sort_unstable_by(|a, b| { + a.old + .start + .cmp(&b.old.start) + .then_with(|| b.old.end.cmp(&a.old.end)) + }); + + let mut i = 1; + while i < edits.len() { + let edit = edits[i].clone(); + let prev_edit = &mut edits[i - 1]; + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = prev_edit.old.end.max(edit.old.end); + prev_edit.new.start = prev_edit.new.start.min(edit.new.start); + prev_edit.new.end = prev_edit.new.end.max(edit.new.end); + edits.remove(i); + continue; + } + i += 1; + } +} + +#[derive(Clone, Debug, Default, Eq, PartialEq)] +struct Transform { + summary: TransformSummary, + output_text: Option<&'static str>, +} + +impl Transform { + fn is_fold(&self) -> bool { + self.output_text.is_some() + } +} + +#[derive(Clone, Debug, Default, Eq, PartialEq)] +struct TransformSummary { + output: TextSummary, + input: TextSummary, +} + +impl sum_tree::Item for Transform { + type Summary = TransformSummary; + + fn summary(&self) -> Self::Summary { + self.summary.clone() + } +} + +impl sum_tree::Summary for TransformSummary { + type Context = (); + + fn add_summary(&mut self, other: &Self, _: &()) { + self.input += &other.input; + self.output += &other.output; + } +} + +#[derive(Copy, Clone, Eq, PartialEq, Debug, Default)] +pub struct FoldId(usize); + +impl Into for FoldId { + fn into(self) -> ElementId { + ElementId::Integer(self.0) + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Fold { + pub id: FoldId, + pub range: FoldRange, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct FoldRange(Range); + +impl Deref for FoldRange { + type Target = Range; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for FoldRange { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Default for FoldRange { + fn default() -> Self { + Self(Anchor::min()..Anchor::max()) + } +} + +impl sum_tree::Item for Fold { + type Summary = FoldSummary; + + fn summary(&self) -> Self::Summary { + FoldSummary { + start: self.range.start, + end: self.range.end, + min_start: self.range.start, + max_end: self.range.end, + count: 1, + } + } +} + +#[derive(Clone, Debug)] +pub struct FoldSummary { + start: Anchor, + end: Anchor, + min_start: Anchor, + max_end: Anchor, + count: usize, +} + +impl Default for FoldSummary { + fn default() -> Self { + Self { + start: Anchor::min(), + end: Anchor::max(), + min_start: Anchor::max(), + max_end: Anchor::min(), + count: 0, + } + } +} + +impl sum_tree::Summary for FoldSummary { + type Context = MultiBufferSnapshot; + + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { + if other.min_start.cmp(&self.min_start, buffer) == Ordering::Less { + self.min_start = other.min_start; + } + if other.max_end.cmp(&self.max_end, buffer) == Ordering::Greater { + self.max_end = other.max_end; + } + + #[cfg(debug_assertions)] + { + let start_comparison = self.start.cmp(&other.start, buffer); + assert!(start_comparison <= Ordering::Equal); + if start_comparison == Ordering::Equal { + assert!(self.end.cmp(&other.end, buffer) >= Ordering::Equal); + } + } + + self.start = other.start; + self.end = other.end; + self.count += other.count; + } +} + +impl<'a> sum_tree::Dimension<'a, FoldSummary> for FoldRange { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { + self.0.start = summary.start; + self.0.end = summary.end; + } +} + +impl<'a> sum_tree::SeekTarget<'a, FoldSummary, FoldRange> for FoldRange { + fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering { + self.0.cmp(&other.0, buffer) + } +} + +impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { + *self += summary.count; + } +} + +#[derive(Clone)] +pub struct FoldBufferRows<'a> { + cursor: Cursor<'a, Transform, (FoldPoint, InlayPoint)>, + input_buffer_rows: InlayBufferRows<'a>, + fold_point: FoldPoint, +} + +impl<'a> Iterator for FoldBufferRows<'a> { + type Item = Option; + + fn next(&mut self) -> Option { + let mut traversed_fold = false; + while self.fold_point > self.cursor.end(&()).0 { + self.cursor.next(&()); + traversed_fold = true; + if self.cursor.item().is_none() { + break; + } + } + + if self.cursor.item().is_some() { + if traversed_fold { + self.input_buffer_rows.seek(self.cursor.start().1.row()); + self.input_buffer_rows.next(); + } + *self.fold_point.row_mut() += 1; + self.input_buffer_rows.next() + } else { + None + } + } +} + +pub struct FoldChunks<'a> { + transform_cursor: Cursor<'a, Transform, (FoldOffset, InlayOffset)>, + inlay_chunks: InlayChunks<'a>, + inlay_chunk: Option<(InlayOffset, Chunk<'a>)>, + inlay_offset: InlayOffset, + output_offset: usize, + max_output_offset: usize, + ellipses_color: Option, +} + +impl<'a> Iterator for FoldChunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.output_offset >= self.max_output_offset { + return None; + } + + let transform = self.transform_cursor.item()?; + + // If we're in a fold, then return the fold's display text and + // advance the transform and buffer cursors to the end of the fold. + if let Some(output_text) = transform.output_text { + self.inlay_chunk.take(); + self.inlay_offset += InlayOffset(transform.summary.input.len); + self.inlay_chunks.seek(self.inlay_offset); + + while self.inlay_offset >= self.transform_cursor.end(&()).1 + && self.transform_cursor.item().is_some() + { + self.transform_cursor.next(&()); + } + + self.output_offset += output_text.len(); + return Some(Chunk { + text: output_text, + highlight_style: self.ellipses_color.map(|color| HighlightStyle { + color: Some(color), + ..Default::default() + }), + ..Default::default() + }); + } + + // Retrieve a chunk from the current location in the buffer. + if self.inlay_chunk.is_none() { + let chunk_offset = self.inlay_chunks.offset(); + self.inlay_chunk = self.inlay_chunks.next().map(|chunk| (chunk_offset, chunk)); + } + + // Otherwise, take a chunk from the buffer's text. + if let Some((buffer_chunk_start, mut chunk)) = self.inlay_chunk { + let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len()); + let transform_end = self.transform_cursor.end(&()).1; + let chunk_end = buffer_chunk_end.min(transform_end); + + chunk.text = &chunk.text + [(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0]; + + if chunk_end == transform_end { + self.transform_cursor.next(&()); + } else if chunk_end == buffer_chunk_end { + self.inlay_chunk.take(); + } + + self.inlay_offset = chunk_end; + self.output_offset += chunk.text.len(); + return Some(chunk); + } + + None + } +} + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct FoldOffset(pub usize); + +impl FoldOffset { + pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint { + let mut cursor = snapshot + .transforms + .cursor::<(FoldOffset, TransformSummary)>(); + cursor.seek(&self, Bias::Right, &()); + let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { + Point::new(0, (self.0 - cursor.start().0 .0) as u32) + } else { + let inlay_offset = cursor.start().1.input.len + self.0 - cursor.start().0 .0; + let inlay_point = snapshot.inlay_snapshot.to_point(InlayOffset(inlay_offset)); + inlay_point.0 - cursor.start().1.input.lines + }; + FoldPoint(cursor.start().1.output.lines + overshoot) + } + + #[cfg(test)] + pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { + let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(); + cursor.seek(&self, Bias::Right, &()); + let overshoot = self.0 - cursor.start().0 .0; + InlayOffset(cursor.start().1 .0 + overshoot) + } +} + +impl Add for FoldOffset { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl AddAssign for FoldOffset { + fn add_assign(&mut self, rhs: Self) { + self.0 += rhs.0; + } +} + +impl Sub for FoldOffset { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += &summary.output.len; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += &summary.input.lines; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += &summary.input.len; + } +} + +pub type FoldEdit = Edit; + +#[cfg(test)] +mod tests { + use super::*; + use crate::{display_map::inlay_map::InlayMap, MultiBuffer, ToPoint}; + use collections::HashSet; + use rand::prelude::*; + use settings::SettingsStore; + use std::{env, mem}; + use text::Patch; + use util::test::sample_text; + use util::RandomCharIter; + use Bias::{Left, Right}; + + #[gpui::test] + fn test_basic_folds(cx: &mut gpui::AppContext) { + init_test(cx); + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut writer, _, _) = map.write(inlay_snapshot, vec![]); + let (snapshot2, edits) = writer.fold(vec![ + Point::new(0, 2)..Point::new(2, 2), + Point::new(2, 4)..Point::new(4, 1), + ]); + assert_eq!(snapshot2.text(), "aa⋯cc⋯eeeee"); + assert_eq!( + edits, + &[ + FoldEdit { + old: FoldOffset(2)..FoldOffset(16), + new: FoldOffset(2)..FoldOffset(5), + }, + FoldEdit { + old: FoldOffset(18)..FoldOffset(29), + new: FoldOffset(7)..FoldOffset(10) + }, + ] + ); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit( + vec![ + (Point::new(0, 0)..Point::new(0, 1), "123"), + (Point::new(2, 3)..Point::new(2, 3), "123"), + ], + None, + cx, + ); + buffer.snapshot(cx) + }); + + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot, subscription.consume().into_inner()); + let (snapshot3, edits) = map.read(inlay_snapshot, inlay_edits); + assert_eq!(snapshot3.text(), "123a⋯c123c⋯eeeee"); + assert_eq!( + edits, + &[ + FoldEdit { + old: FoldOffset(0)..FoldOffset(1), + new: FoldOffset(0)..FoldOffset(3), + }, + FoldEdit { + old: FoldOffset(6)..FoldOffset(6), + new: FoldOffset(8)..FoldOffset(11), + }, + ] + ); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(2, 6)..Point::new(4, 3), "456")], None, cx); + buffer.snapshot(cx) + }); + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot, subscription.consume().into_inner()); + let (snapshot4, _) = map.read(inlay_snapshot.clone(), inlay_edits); + assert_eq!(snapshot4.text(), "123a⋯c123456eee"); + + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.unfold(Some(Point::new(0, 4)..Point::new(0, 4)), false); + let (snapshot5, _) = map.read(inlay_snapshot.clone(), vec![]); + assert_eq!(snapshot5.text(), "123a⋯c123456eee"); + + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.unfold(Some(Point::new(0, 4)..Point::new(0, 4)), true); + let (snapshot6, _) = map.read(inlay_snapshot, vec![]); + assert_eq!(snapshot6.text(), "123aaaaa\nbbbbbb\nccc123456eee"); + } + + #[gpui::test] + fn test_adjacent_folds(cx: &mut gpui::AppContext) { + init_test(cx); + let buffer = MultiBuffer::build_simple("abcdefghijkl", cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + + { + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![5..8]); + let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + assert_eq!(snapshot.text(), "abcde⋯ijkl"); + + // Create an fold adjacent to the start of the first fold. + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![0..1, 2..5]); + let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + assert_eq!(snapshot.text(), "⋯b⋯ijkl"); + + // Create an fold adjacent to the end of the first fold. + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![11..11, 8..10]); + let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + assert_eq!(snapshot.text(), "⋯b⋯kl"); + } + + { + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + // Create two adjacent folds. + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![0..2, 2..5]); + let (snapshot, _) = map.read(inlay_snapshot, vec![]); + assert_eq!(snapshot.text(), "⋯fghijkl"); + + // Edit within one of the folds. + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(0..1, "12345")], None, cx); + buffer.snapshot(cx) + }); + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot, subscription.consume().into_inner()); + let (snapshot, _) = map.read(inlay_snapshot, inlay_edits); + assert_eq!(snapshot.text(), "12345⋯fghijkl"); + } + } + + #[gpui::test] + fn test_overlapping_folds(cx: &mut gpui::AppContext) { + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![ + Point::new(0, 2)..Point::new(2, 2), + Point::new(0, 4)..Point::new(1, 0), + Point::new(1, 2)..Point::new(3, 2), + Point::new(3, 1)..Point::new(4, 1), + ]); + let (snapshot, _) = map.read(inlay_snapshot, vec![]); + assert_eq!(snapshot.text(), "aa⋯eeeee"); + } + + #[gpui::test] + fn test_merging_folds_via_edit(cx: &mut gpui::AppContext) { + init_test(cx); + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![ + Point::new(0, 2)..Point::new(2, 2), + Point::new(3, 1)..Point::new(4, 1), + ]); + let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + assert_eq!(snapshot.text(), "aa⋯cccc\nd⋯eeeee"); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(2, 2)..Point::new(3, 1), "")], None, cx); + buffer.snapshot(cx) + }); + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot, subscription.consume().into_inner()); + let (snapshot, _) = map.read(inlay_snapshot, inlay_edits); + assert_eq!(snapshot.text(), "aa⋯eeeee"); + } + + #[gpui::test] + fn test_folds_in_range(cx: &mut gpui::AppContext) { + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![ + Point::new(0, 2)..Point::new(2, 2), + Point::new(0, 4)..Point::new(1, 0), + Point::new(1, 2)..Point::new(3, 2), + Point::new(3, 1)..Point::new(4, 1), + ]); + let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + let fold_ranges = snapshot + .folds_in_range(Point::new(1, 0)..Point::new(1, 3)) + .map(|fold| { + fold.range.start.to_point(&buffer_snapshot) + ..fold.range.end.to_point(&buffer_snapshot) + }) + .collect::>(); + assert_eq!( + fold_ranges, + vec![ + Point::new(0, 2)..Point::new(2, 2), + Point::new(1, 2)..Point::new(3, 2) + ] + ); + } + + #[gpui::test(iterations = 100)] + fn test_random_folds(cx: &mut gpui::AppContext, mut rng: StdRng) { + init_test(cx); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + let buffer = if rng.gen() { + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(&mut rng, cx) + }; + let mut buffer_snapshot = buffer.read(cx).snapshot(cx); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut initial_snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); + let mut snapshot_edits = Vec::new(); + + let mut next_inlay_id = 0; + for _ in 0..operations { + log::info!("text: {:?}", buffer_snapshot.text()); + let mut buffer_edits = Vec::new(); + let mut inlay_edits = Vec::new(); + match rng.gen_range(0..=100) { + 0..=39 => { + snapshot_edits.extend(map.randomly_mutate(&mut rng)); + } + 40..=59 => { + let (_, edits) = inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng); + inlay_edits = edits; + } + _ => buffer.update(cx, |buffer, cx| { + let subscription = buffer.subscribe(); + let edit_count = rng.gen_range(1..=5); + buffer.randomly_mutate(&mut rng, edit_count, cx); + buffer_snapshot = buffer.snapshot(cx); + let edits = subscription.consume().into_inner(); + log::info!("editing {:?}", edits); + buffer_edits.extend(edits); + }), + }; + + let (inlay_snapshot, new_inlay_edits) = + inlay_map.sync(buffer_snapshot.clone(), buffer_edits); + log::info!("inlay text {:?}", inlay_snapshot.text()); + + let inlay_edits = Patch::new(inlay_edits) + .compose(new_inlay_edits) + .into_inner(); + let (snapshot, edits) = map.read(inlay_snapshot.clone(), inlay_edits); + snapshot_edits.push((snapshot.clone(), edits)); + + let mut expected_text: String = inlay_snapshot.text().to_string(); + for fold_range in map.merged_fold_ranges().into_iter().rev() { + let fold_inlay_start = inlay_snapshot.to_inlay_offset(fold_range.start); + let fold_inlay_end = inlay_snapshot.to_inlay_offset(fold_range.end); + expected_text.replace_range(fold_inlay_start.0..fold_inlay_end.0, "⋯"); + } + + assert_eq!(snapshot.text(), expected_text); + log::info!( + "fold text {:?} ({} lines)", + expected_text, + expected_text.matches('\n').count() + 1 + ); + + let mut prev_row = 0; + let mut expected_buffer_rows = Vec::new(); + for fold_range in map.merged_fold_ranges().into_iter() { + let fold_start = inlay_snapshot + .to_point(inlay_snapshot.to_inlay_offset(fold_range.start)) + .row(); + let fold_end = inlay_snapshot + .to_point(inlay_snapshot.to_inlay_offset(fold_range.end)) + .row(); + expected_buffer_rows.extend( + inlay_snapshot + .buffer_rows(prev_row) + .take((1 + fold_start - prev_row) as usize), + ); + prev_row = 1 + fold_end; + } + expected_buffer_rows.extend(inlay_snapshot.buffer_rows(prev_row)); + + assert_eq!( + expected_buffer_rows.len(), + expected_text.matches('\n').count() + 1, + "wrong expected buffer rows {:?}. text: {:?}", + expected_buffer_rows, + expected_text + ); + + for (output_row, line) in expected_text.lines().enumerate() { + let line_len = snapshot.line_len(output_row as u32); + assert_eq!(line_len, line.len() as u32); + } + + let longest_row = snapshot.longest_row(); + let longest_char_column = expected_text + .split('\n') + .nth(longest_row as usize) + .unwrap() + .chars() + .count(); + let mut fold_point = FoldPoint::new(0, 0); + let mut fold_offset = FoldOffset(0); + let mut char_column = 0; + for c in expected_text.chars() { + let inlay_point = fold_point.to_inlay_point(&snapshot); + let inlay_offset = fold_offset.to_inlay_offset(&snapshot); + assert_eq!( + snapshot.to_fold_point(inlay_point, Right), + fold_point, + "{:?} -> fold point", + inlay_point, + ); + assert_eq!( + inlay_snapshot.to_offset(inlay_point), + inlay_offset, + "inlay_snapshot.to_offset({:?})", + inlay_point, + ); + assert_eq!( + fold_point.to_offset(&snapshot), + fold_offset, + "fold_point.to_offset({:?})", + fold_point, + ); + + if c == '\n' { + *fold_point.row_mut() += 1; + *fold_point.column_mut() = 0; + char_column = 0; + } else { + *fold_point.column_mut() += c.len_utf8() as u32; + char_column += 1; + } + fold_offset.0 += c.len_utf8(); + if char_column > longest_char_column { + panic!( + "invalid longest row {:?} (chars {}), found row {:?} (chars: {})", + longest_row, + longest_char_column, + fold_point.row(), + char_column + ); + } + } + + for _ in 0..5 { + let mut start = snapshot + .clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Left); + let mut end = snapshot + .clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right); + if start > end { + mem::swap(&mut start, &mut end); + } + + let text = &expected_text[start.0..end.0]; + assert_eq!( + snapshot + .chunks(start..end, false, Highlights::default()) + .map(|c| c.text) + .collect::(), + text, + ); + } + + let mut fold_row = 0; + while fold_row < expected_buffer_rows.len() as u32 { + assert_eq!( + snapshot.buffer_rows(fold_row).collect::>(), + expected_buffer_rows[(fold_row as usize)..], + "wrong buffer rows starting at fold row {}", + fold_row, + ); + fold_row += 1; + } + + let folded_buffer_rows = map + .merged_fold_ranges() + .iter() + .flat_map(|range| { + let start_row = range.start.to_point(&buffer_snapshot).row; + let end = range.end.to_point(&buffer_snapshot); + if end.column == 0 { + start_row..end.row + } else { + start_row..end.row + 1 + } + }) + .collect::>(); + for row in 0..=buffer_snapshot.max_point().row { + assert_eq!( + snapshot.is_line_folded(MultiBufferRow(row)), + folded_buffer_rows.contains(&row), + "expected buffer row {}{} to be folded", + row, + if folded_buffer_rows.contains(&row) { + "" + } else { + " not" + } + ); + } + + for _ in 0..5 { + let end = + buffer_snapshot.clip_offset(rng.gen_range(0..=buffer_snapshot.len()), Right); + let start = buffer_snapshot.clip_offset(rng.gen_range(0..=end), Left); + let expected_folds = map + .snapshot + .folds + .items(&buffer_snapshot) + .into_iter() + .filter(|fold| { + let start = buffer_snapshot.anchor_before(start); + let end = buffer_snapshot.anchor_after(end); + start.cmp(&fold.range.end, &buffer_snapshot) == Ordering::Less + && end.cmp(&fold.range.start, &buffer_snapshot) == Ordering::Greater + }) + .collect::>(); + + assert_eq!( + snapshot + .folds_in_range(start..end) + .cloned() + .collect::>(), + expected_folds + ); + } + + let text = snapshot.text(); + for _ in 0..5 { + let start_row = rng.gen_range(0..=snapshot.max_point().row()); + let start_column = rng.gen_range(0..=snapshot.line_len(start_row)); + let end_row = rng.gen_range(0..=snapshot.max_point().row()); + let end_column = rng.gen_range(0..=snapshot.line_len(end_row)); + let mut start = + snapshot.clip_point(FoldPoint::new(start_row, start_column), Bias::Left); + let mut end = snapshot.clip_point(FoldPoint::new(end_row, end_column), Bias::Right); + if start > end { + mem::swap(&mut start, &mut end); + } + + let lines = start..end; + let bytes = start.to_offset(&snapshot)..end.to_offset(&snapshot); + assert_eq!( + snapshot.text_summary_for_range(lines), + TextSummary::from(&text[bytes.start.0..bytes.end.0]) + ) + } + + let mut text = initial_snapshot.text(); + for (snapshot, edits) in snapshot_edits.drain(..) { + let new_text = snapshot.text(); + for edit in edits { + let old_bytes = edit.new.start.0..edit.new.start.0 + edit.old_len().0; + let new_bytes = edit.new.start.0..edit.new.end.0; + text.replace_range(old_bytes, &new_text[new_bytes]); + } + + assert_eq!(text, new_text); + initial_snapshot = snapshot; + } + } + } + + #[gpui::test] + fn test_buffer_rows(cx: &mut gpui::AppContext) { + let text = sample_text(6, 6, 'a') + "\n"; + let buffer = MultiBuffer::build_simple(&text, cx); + + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); + let mut map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![ + Point::new(0, 2)..Point::new(2, 2), + Point::new(3, 1)..Point::new(4, 1), + ]); + + let (snapshot, _) = map.read(inlay_snapshot, vec![]); + assert_eq!(snapshot.text(), "aa⋯cccc\nd⋯eeeee\nffffff\n"); + assert_eq!( + snapshot.buffer_rows(0).collect::>(), + [Some(0), Some(3), Some(5), Some(6)] + ); + assert_eq!(snapshot.buffer_rows(3).collect::>(), [Some(6)]); + } + + fn init_test(cx: &mut gpui::AppContext) { + let store = SettingsStore::test(cx); + cx.set_global(store); + } + + impl FoldMap { + fn merged_fold_ranges(&self) -> Vec> { + let inlay_snapshot = self.snapshot.inlay_snapshot.clone(); + let buffer = &inlay_snapshot.buffer; + let mut folds = self.snapshot.folds.items(buffer); + // Ensure sorting doesn't change how folds get merged and displayed. + folds.sort_by(|a, b| a.range.cmp(&b.range, buffer)); + let mut fold_ranges = folds + .iter() + .map(|fold| fold.range.start.to_offset(buffer)..fold.range.end.to_offset(buffer)) + .peekable(); + + let mut merged_ranges = Vec::new(); + while let Some(mut fold_range) = fold_ranges.next() { + while let Some(next_range) = fold_ranges.peek() { + if fold_range.end >= next_range.start { + if next_range.end > fold_range.end { + fold_range.end = next_range.end; + } + fold_ranges.next(); + } else { + break; + } + } + if fold_range.end > fold_range.start { + merged_ranges.push(fold_range); + } + } + merged_ranges + } + + pub fn randomly_mutate( + &mut self, + rng: &mut impl Rng, + ) -> Vec<(FoldSnapshot, Vec)> { + let mut snapshot_edits = Vec::new(); + match rng.gen_range(0..=100) { + 0..=39 if !self.snapshot.folds.is_empty() => { + let inlay_snapshot = self.snapshot.inlay_snapshot.clone(); + let buffer = &inlay_snapshot.buffer; + let mut to_unfold = Vec::new(); + for _ in 0..rng.gen_range(1..=3) { + let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); + let start = buffer.clip_offset(rng.gen_range(0..=end), Left); + to_unfold.push(start..end); + } + log::info!("unfolding {:?}", to_unfold); + let (mut writer, snapshot, edits) = self.write(inlay_snapshot, vec![]); + snapshot_edits.push((snapshot, edits)); + let (snapshot, edits) = writer.fold(to_unfold); + snapshot_edits.push((snapshot, edits)); + } + _ => { + let inlay_snapshot = self.snapshot.inlay_snapshot.clone(); + let buffer = &inlay_snapshot.buffer; + let mut to_fold = Vec::new(); + for _ in 0..rng.gen_range(1..=2) { + let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); + let start = buffer.clip_offset(rng.gen_range(0..=end), Left); + to_fold.push(start..end); + } + log::info!("folding {:?}", to_fold); + let (mut writer, snapshot, edits) = self.write(inlay_snapshot, vec![]); + snapshot_edits.push((snapshot, edits)); + let (snapshot, edits) = writer.fold(to_fold); + snapshot_edits.push((snapshot, edits)); + } + } + snapshot_edits + } + } +} diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs new file mode 100644 index 0000000..214b7aa --- /dev/null +++ b/crates/editor/src/display_map/inlay_map.rs @@ -0,0 +1,1900 @@ +use crate::{HighlightStyles, InlayId}; +use collections::{BTreeMap, BTreeSet}; +use gpui::HighlightStyle; +use language::{Chunk, Edit, Point, TextSummary}; +use multi_buffer::{ + Anchor, MultiBufferChunks, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, ToOffset, +}; +use std::{ + any::TypeId, + cmp, + iter::Peekable, + ops::{Add, AddAssign, Range, Sub, SubAssign}, + sync::Arc, + vec, +}; +use sum_tree::{Bias, Cursor, SumTree, TreeMap}; +use text::{Patch, Rope}; + +use super::Highlights; + +/// Decides where the [`Inlay`]s should be displayed. +/// +/// See the [`display_map` module documentation](crate::display_map) for more information. +pub struct InlayMap { + snapshot: InlaySnapshot, + inlays: Vec, +} + +#[derive(Clone)] +pub struct InlaySnapshot { + pub buffer: MultiBufferSnapshot, + transforms: SumTree, + pub version: usize, +} + +#[derive(Clone, Debug)] +enum Transform { + Isomorphic(TextSummary), + Inlay(Inlay), +} + +#[derive(Debug, Clone)] +pub(crate) struct Inlay { + pub(crate) id: InlayId, + pub position: Anchor, + pub text: text::Rope, +} + +impl Inlay { + pub fn hint(id: usize, position: Anchor, hint: &project::InlayHint) -> Self { + let mut text = hint.text(); + if hint.padding_right && !text.ends_with(' ') { + text.push(' '); + } + if hint.padding_left && !text.starts_with(' ') { + text.insert(0, ' '); + } + Self { + id: InlayId::Hint(id), + position, + text: text.into(), + } + } + + pub fn suggestion>(id: usize, position: Anchor, text: T) -> Self { + Self { + id: InlayId::Suggestion(id), + position, + text: text.into(), + } + } +} + +impl sum_tree::Item for Transform { + type Summary = TransformSummary; + + fn summary(&self) -> Self::Summary { + match self { + Transform::Isomorphic(summary) => TransformSummary { + input: summary.clone(), + output: summary.clone(), + }, + Transform::Inlay(inlay) => TransformSummary { + input: TextSummary::default(), + output: inlay.text.summary(), + }, + } + } +} + +#[derive(Clone, Debug, Default)] +struct TransformSummary { + input: TextSummary, + output: TextSummary, +} + +impl sum_tree::Summary for TransformSummary { + type Context = (); + + fn add_summary(&mut self, other: &Self, _: &()) { + self.input += &other.input; + self.output += &other.output; + } +} + +pub type InlayEdit = Edit; + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct InlayOffset(pub usize); + +impl Add for InlayOffset { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl Sub for InlayOffset { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +impl AddAssign for InlayOffset { + fn add_assign(&mut self, rhs: Self) { + self.0 += rhs.0; + } +} + +impl SubAssign for InlayOffset { + fn sub_assign(&mut self, rhs: Self) { + self.0 -= rhs.0; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += &summary.output.len; + } +} + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct InlayPoint(pub Point); + +impl Add for InlayPoint { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl Sub for InlayPoint { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += &summary.output.lines; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + *self += &summary.input.len; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for Point { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + *self += &summary.input.lines; + } +} + +#[derive(Clone)] +pub struct InlayBufferRows<'a> { + transforms: Cursor<'a, Transform, (InlayPoint, Point)>, + buffer_rows: MultiBufferRows<'a>, + inlay_row: u32, + max_buffer_row: MultiBufferRow, +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +struct HighlightEndpoint { + offset: InlayOffset, + is_start: bool, + tag: Option, + style: HighlightStyle, +} + +impl PartialOrd for HighlightEndpoint { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for HighlightEndpoint { + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.offset + .cmp(&other.offset) + .then_with(|| other.is_start.cmp(&self.is_start)) + } +} + +pub struct InlayChunks<'a> { + transforms: Cursor<'a, Transform, (InlayOffset, usize)>, + buffer_chunks: MultiBufferChunks<'a>, + buffer_chunk: Option>, + inlay_chunks: Option>, + inlay_chunk: Option<&'a str>, + output_offset: InlayOffset, + max_output_offset: InlayOffset, + highlight_styles: HighlightStyles, + highlight_endpoints: Peekable>, + active_highlights: BTreeMap, HighlightStyle>, + highlights: Highlights<'a>, + snapshot: &'a InlaySnapshot, +} + +impl<'a> InlayChunks<'a> { + pub fn seek(&mut self, offset: InlayOffset) { + self.transforms.seek(&offset, Bias::Right, &()); + + let buffer_offset = self.snapshot.to_buffer_offset(offset); + self.buffer_chunks.seek(buffer_offset); + self.inlay_chunks = None; + self.buffer_chunk = None; + self.output_offset = offset; + } + + pub fn offset(&self) -> InlayOffset { + self.output_offset + } +} + +impl<'a> Iterator for InlayChunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.output_offset == self.max_output_offset { + return None; + } + + let mut next_highlight_endpoint = InlayOffset(usize::MAX); + while let Some(endpoint) = self.highlight_endpoints.peek().copied() { + if endpoint.offset <= self.output_offset { + if endpoint.is_start { + self.active_highlights.insert(endpoint.tag, endpoint.style); + } else { + self.active_highlights.remove(&endpoint.tag); + } + self.highlight_endpoints.next(); + } else { + next_highlight_endpoint = endpoint.offset; + break; + } + } + + let chunk = match self.transforms.item()? { + Transform::Isomorphic(_) => { + let chunk = self + .buffer_chunk + .get_or_insert_with(|| self.buffer_chunks.next().unwrap()); + if chunk.text.is_empty() { + *chunk = self.buffer_chunks.next().unwrap(); + } + + let (prefix, suffix) = chunk.text.split_at( + chunk + .text + .len() + .min(self.transforms.end(&()).0 .0 - self.output_offset.0) + .min(next_highlight_endpoint.0 - self.output_offset.0), + ); + + chunk.text = suffix; + self.output_offset.0 += prefix.len(); + let mut prefix = Chunk { + text: prefix, + ..*chunk + }; + if !self.active_highlights.is_empty() { + let mut highlight_style = HighlightStyle::default(); + for active_highlight in self.active_highlights.values() { + highlight_style.highlight(*active_highlight); + } + prefix.highlight_style = Some(highlight_style); + } + prefix + } + Transform::Inlay(inlay) => { + let mut inlay_style_and_highlight = None; + if let Some(inlay_highlights) = self.highlights.inlay_highlights { + for (_, inlay_id_to_data) in inlay_highlights.iter() { + let style_and_highlight = inlay_id_to_data.get(&inlay.id); + if style_and_highlight.is_some() { + inlay_style_and_highlight = style_and_highlight; + break; + } + } + } + + let mut highlight_style = match inlay.id { + InlayId::Suggestion(_) => self.highlight_styles.suggestion, + InlayId::Hint(_) => self.highlight_styles.inlay_hint, + }; + let next_inlay_highlight_endpoint; + let offset_in_inlay = self.output_offset - self.transforms.start().0; + if let Some((style, highlight)) = inlay_style_and_highlight { + let range = &highlight.range; + if offset_in_inlay.0 < range.start { + next_inlay_highlight_endpoint = range.start - offset_in_inlay.0; + } else if offset_in_inlay.0 >= range.end { + next_inlay_highlight_endpoint = usize::MAX; + } else { + next_inlay_highlight_endpoint = range.end - offset_in_inlay.0; + highlight_style + .get_or_insert_with(|| Default::default()) + .highlight(*style); + } + } else { + next_inlay_highlight_endpoint = usize::MAX; + } + + let inlay_chunks = self.inlay_chunks.get_or_insert_with(|| { + let start = offset_in_inlay; + let end = cmp::min(self.max_output_offset, self.transforms.end(&()).0) + - self.transforms.start().0; + inlay.text.chunks_in_range(start.0..end.0) + }); + let inlay_chunk = self + .inlay_chunk + .get_or_insert_with(|| inlay_chunks.next().unwrap()); + let (chunk, remainder) = + inlay_chunk.split_at(inlay_chunk.len().min(next_inlay_highlight_endpoint)); + *inlay_chunk = remainder; + if inlay_chunk.is_empty() { + self.inlay_chunk = None; + } + + self.output_offset.0 += chunk.len(); + + if !self.active_highlights.is_empty() { + for active_highlight in self.active_highlights.values() { + highlight_style + .get_or_insert(Default::default()) + .highlight(*active_highlight); + } + } + Chunk { + text: chunk, + highlight_style, + ..Default::default() + } + } + }; + + if self.output_offset == self.transforms.end(&()).0 { + self.inlay_chunks = None; + self.transforms.next(&()); + } + + Some(chunk) + } +} + +impl<'a> InlayBufferRows<'a> { + pub fn seek(&mut self, row: u32) { + let inlay_point = InlayPoint::new(row, 0); + self.transforms.seek(&inlay_point, Bias::Left, &()); + + let mut buffer_point = self.transforms.start().1; + let buffer_row = MultiBufferRow(if row == 0 { + 0 + } else { + match self.transforms.item() { + Some(Transform::Isomorphic(_)) => { + buffer_point += inlay_point.0 - self.transforms.start().0 .0; + buffer_point.row + } + _ => cmp::min(buffer_point.row + 1, self.max_buffer_row.0), + } + }); + self.inlay_row = inlay_point.row(); + self.buffer_rows.seek(buffer_row); + } +} + +impl<'a> Iterator for InlayBufferRows<'a> { + type Item = Option; + + fn next(&mut self) -> Option { + let buffer_row = if self.inlay_row == 0 { + self.buffer_rows.next().unwrap() + } else { + match self.transforms.item()? { + Transform::Inlay(_) => None, + Transform::Isomorphic(_) => self.buffer_rows.next().unwrap(), + } + }; + + self.inlay_row += 1; + self.transforms + .seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left, &()); + + Some(buffer_row) + } +} + +impl InlayPoint { + pub fn new(row: u32, column: u32) -> Self { + Self(Point::new(row, column)) + } + + pub fn row(self) -> u32 { + self.0.row + } +} + +impl InlayMap { + pub fn new(buffer: MultiBufferSnapshot) -> (Self, InlaySnapshot) { + let version = 0; + let snapshot = InlaySnapshot { + buffer: buffer.clone(), + transforms: SumTree::from_iter(Some(Transform::Isomorphic(buffer.text_summary())), &()), + version, + }; + + ( + Self { + snapshot: snapshot.clone(), + inlays: Vec::new(), + }, + snapshot, + ) + } + + pub fn sync( + &mut self, + buffer_snapshot: MultiBufferSnapshot, + mut buffer_edits: Vec>, + ) -> (InlaySnapshot, Vec) { + let snapshot = &mut self.snapshot; + + if buffer_edits.is_empty() { + if snapshot.buffer.trailing_excerpt_update_count() + != buffer_snapshot.trailing_excerpt_update_count() + { + buffer_edits.push(Edit { + old: snapshot.buffer.len()..snapshot.buffer.len(), + new: buffer_snapshot.len()..buffer_snapshot.len(), + }); + } + } + + if buffer_edits.is_empty() { + if snapshot.buffer.edit_count() != buffer_snapshot.edit_count() + || snapshot.buffer.parse_count() != buffer_snapshot.parse_count() + || snapshot.buffer.diagnostics_update_count() + != buffer_snapshot.diagnostics_update_count() + || snapshot.buffer.git_diff_update_count() + != buffer_snapshot.git_diff_update_count() + || snapshot.buffer.trailing_excerpt_update_count() + != buffer_snapshot.trailing_excerpt_update_count() + { + snapshot.version += 1; + } + + snapshot.buffer = buffer_snapshot; + (snapshot.clone(), Vec::new()) + } else { + let mut inlay_edits = Patch::default(); + let mut new_transforms = SumTree::new(); + let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(); + let mut buffer_edits_iter = buffer_edits.iter().peekable(); + while let Some(buffer_edit) = buffer_edits_iter.next() { + new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); + if let Some(Transform::Isomorphic(transform)) = cursor.item() { + if cursor.end(&()).0 == buffer_edit.old.start { + push_isomorphic(&mut new_transforms, transform.clone()); + cursor.next(&()); + } + } + + // Remove all the inlays and transforms contained by the edit. + let old_start = + cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0); + cursor.seek(&buffer_edit.old.end, Bias::Right, &()); + let old_end = + cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0); + + // Push the unchanged prefix. + let prefix_start = new_transforms.summary().input.len; + let prefix_end = buffer_edit.new.start; + push_isomorphic( + &mut new_transforms, + buffer_snapshot.text_summary_for_range(prefix_start..prefix_end), + ); + let new_start = InlayOffset(new_transforms.summary().output.len); + + let start_ix = match self.inlays.binary_search_by(|probe| { + probe + .position + .to_offset(&buffer_snapshot) + .cmp(&buffer_edit.new.start) + .then(std::cmp::Ordering::Greater) + }) { + Ok(ix) | Err(ix) => ix, + }; + + for inlay in &self.inlays[start_ix..] { + let buffer_offset = inlay.position.to_offset(&buffer_snapshot); + if buffer_offset > buffer_edit.new.end { + break; + } + + let prefix_start = new_transforms.summary().input.len; + let prefix_end = buffer_offset; + push_isomorphic( + &mut new_transforms, + buffer_snapshot.text_summary_for_range(prefix_start..prefix_end), + ); + + if inlay.position.is_valid(&buffer_snapshot) { + new_transforms.push(Transform::Inlay(inlay.clone()), &()); + } + } + + // Apply the rest of the edit. + let transform_start = new_transforms.summary().input.len; + push_isomorphic( + &mut new_transforms, + buffer_snapshot.text_summary_for_range(transform_start..buffer_edit.new.end), + ); + let new_end = InlayOffset(new_transforms.summary().output.len); + inlay_edits.push(Edit { + old: old_start..old_end, + new: new_start..new_end, + }); + + // If the next edit doesn't intersect the current isomorphic transform, then + // we can push its remainder. + if buffer_edits_iter + .peek() + .map_or(true, |edit| edit.old.start >= cursor.end(&()).0) + { + let transform_start = new_transforms.summary().input.len; + let transform_end = + buffer_edit.new.end + (cursor.end(&()).0 - buffer_edit.old.end); + push_isomorphic( + &mut new_transforms, + buffer_snapshot.text_summary_for_range(transform_start..transform_end), + ); + cursor.next(&()); + } + } + + new_transforms.append(cursor.suffix(&()), &()); + if new_transforms.is_empty() { + new_transforms.push(Transform::Isomorphic(Default::default()), &()); + } + + drop(cursor); + snapshot.transforms = new_transforms; + snapshot.version += 1; + snapshot.buffer = buffer_snapshot; + snapshot.check_invariants(); + + (snapshot.clone(), inlay_edits.into_inner()) + } + } + + pub fn splice( + &mut self, + to_remove: Vec, + to_insert: Vec, + ) -> (InlaySnapshot, Vec) { + let snapshot = &mut self.snapshot; + let mut edits = BTreeSet::new(); + + self.inlays.retain(|inlay| { + let retain = !to_remove.contains(&inlay.id); + if !retain { + let offset = inlay.position.to_offset(&snapshot.buffer); + edits.insert(offset); + } + retain + }); + + for inlay_to_insert in to_insert { + // Avoid inserting empty inlays. + if inlay_to_insert.text.is_empty() { + continue; + } + + let offset = inlay_to_insert.position.to_offset(&snapshot.buffer); + match self.inlays.binary_search_by(|probe| { + probe + .position + .cmp(&inlay_to_insert.position, &snapshot.buffer) + }) { + Ok(ix) | Err(ix) => { + self.inlays.insert(ix, inlay_to_insert); + } + } + + edits.insert(offset); + } + + let buffer_edits = edits + .into_iter() + .map(|offset| Edit { + old: offset..offset, + new: offset..offset, + }) + .collect(); + let buffer_snapshot = snapshot.buffer.clone(); + let (snapshot, edits) = self.sync(buffer_snapshot, buffer_edits); + (snapshot, edits) + } + + pub fn current_inlays(&self) -> impl Iterator { + self.inlays.iter() + } + + #[cfg(test)] + pub(crate) fn randomly_mutate( + &mut self, + next_inlay_id: &mut usize, + rng: &mut rand::rngs::StdRng, + ) -> (InlaySnapshot, Vec) { + use rand::prelude::*; + use util::post_inc; + + let mut to_remove = Vec::new(); + let mut to_insert = Vec::new(); + let snapshot = &mut self.snapshot; + for i in 0..rng.gen_range(1..=5) { + if self.inlays.is_empty() || rng.gen() { + let position = snapshot.buffer.random_byte_range(0, rng).start; + let bias = if rng.gen() { Bias::Left } else { Bias::Right }; + let len = if rng.gen_bool(0.01) { + 0 + } else { + rng.gen_range(1..=5) + }; + let text = util::RandomCharIter::new(&mut *rng) + .filter(|ch| *ch != '\r') + .take(len) + .collect::(); + + let inlay_id = if i % 2 == 0 { + InlayId::Hint(post_inc(next_inlay_id)) + } else { + InlayId::Suggestion(post_inc(next_inlay_id)) + }; + log::info!( + "creating inlay {:?} at buffer offset {} with bias {:?} and text {:?}", + inlay_id, + position, + bias, + text + ); + + to_insert.push(Inlay { + id: inlay_id, + position: snapshot.buffer.anchor_at(position, bias), + text: text.into(), + }); + } else { + to_remove.push( + self.inlays + .iter() + .choose(rng) + .map(|inlay| inlay.id) + .unwrap(), + ); + } + } + log::info!("removing inlays: {:?}", to_remove); + + let (snapshot, edits) = self.splice(to_remove, to_insert); + (snapshot, edits) + } +} + +impl InlaySnapshot { + pub fn to_point(&self, offset: InlayOffset) -> InlayPoint { + let mut cursor = self + .transforms + .cursor::<(InlayOffset, (InlayPoint, usize))>(); + cursor.seek(&offset, Bias::Right, &()); + let overshoot = offset.0 - cursor.start().0 .0; + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + let buffer_offset_start = cursor.start().1 .1; + let buffer_offset_end = buffer_offset_start + overshoot; + let buffer_start = self.buffer.offset_to_point(buffer_offset_start); + let buffer_end = self.buffer.offset_to_point(buffer_offset_end); + InlayPoint(cursor.start().1 .0 .0 + (buffer_end - buffer_start)) + } + Some(Transform::Inlay(inlay)) => { + let overshoot = inlay.text.offset_to_point(overshoot); + InlayPoint(cursor.start().1 .0 .0 + overshoot) + } + None => self.max_point(), + } + } + + pub fn len(&self) -> InlayOffset { + InlayOffset(self.transforms.summary().output.len) + } + + pub fn max_point(&self) -> InlayPoint { + InlayPoint(self.transforms.summary().output.lines) + } + + pub fn to_offset(&self, point: InlayPoint) -> InlayOffset { + let mut cursor = self + .transforms + .cursor::<(InlayPoint, (InlayOffset, Point))>(); + cursor.seek(&point, Bias::Right, &()); + let overshoot = point.0 - cursor.start().0 .0; + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + let buffer_point_start = cursor.start().1 .1; + let buffer_point_end = buffer_point_start + overshoot; + let buffer_offset_start = self.buffer.point_to_offset(buffer_point_start); + let buffer_offset_end = self.buffer.point_to_offset(buffer_point_end); + InlayOffset(cursor.start().1 .0 .0 + (buffer_offset_end - buffer_offset_start)) + } + Some(Transform::Inlay(inlay)) => { + let overshoot = inlay.text.point_to_offset(overshoot); + InlayOffset(cursor.start().1 .0 .0 + overshoot) + } + None => self.len(), + } + } + + pub fn to_buffer_point(&self, point: InlayPoint) -> Point { + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + cursor.seek(&point, Bias::Right, &()); + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + let overshoot = point.0 - cursor.start().0 .0; + cursor.start().1 + overshoot + } + Some(Transform::Inlay(_)) => cursor.start().1, + None => self.buffer.max_point(), + } + } + + pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + cursor.seek(&offset, Bias::Right, &()); + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + let overshoot = offset - cursor.start().0; + cursor.start().1 + overshoot.0 + } + Some(Transform::Inlay(_)) => cursor.start().1, + None => self.buffer.len(), + } + } + + pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { + let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(); + cursor.seek(&offset, Bias::Left, &()); + loop { + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + if offset == cursor.end(&()).0 { + while let Some(Transform::Inlay(inlay)) = cursor.next_item() { + if inlay.position.bias() == Bias::Right { + break; + } else { + cursor.next(&()); + } + } + return cursor.end(&()).1; + } else { + let overshoot = offset - cursor.start().0; + return InlayOffset(cursor.start().1 .0 + overshoot); + } + } + Some(Transform::Inlay(inlay)) => { + if inlay.position.bias() == Bias::Left { + cursor.next(&()); + } else { + return cursor.start().1; + } + } + None => { + return self.len(); + } + } + } + } + + pub fn to_inlay_point(&self, point: Point) -> InlayPoint { + let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(); + cursor.seek(&point, Bias::Left, &()); + loop { + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + if point == cursor.end(&()).0 { + while let Some(Transform::Inlay(inlay)) = cursor.next_item() { + if inlay.position.bias() == Bias::Right { + break; + } else { + cursor.next(&()); + } + } + return cursor.end(&()).1; + } else { + let overshoot = point - cursor.start().0; + return InlayPoint(cursor.start().1 .0 + overshoot); + } + } + Some(Transform::Inlay(inlay)) => { + if inlay.position.bias() == Bias::Left { + cursor.next(&()); + } else { + return cursor.start().1; + } + } + None => { + return self.max_point(); + } + } + } + } + + pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + cursor.seek(&point, Bias::Left, &()); + loop { + match cursor.item() { + Some(Transform::Isomorphic(transform)) => { + if cursor.start().0 == point { + if let Some(Transform::Inlay(inlay)) = cursor.prev_item() { + if inlay.position.bias() == Bias::Left { + return point; + } else if bias == Bias::Left { + cursor.prev(&()); + } else if transform.first_line_chars == 0 { + point.0 += Point::new(1, 0); + } else { + point.0 += Point::new(0, 1); + } + } else { + return point; + } + } else if cursor.end(&()).0 == point { + if let Some(Transform::Inlay(inlay)) = cursor.next_item() { + if inlay.position.bias() == Bias::Right { + return point; + } else if bias == Bias::Right { + cursor.next(&()); + } else if point.0.column == 0 { + point.0.row -= 1; + point.0.column = self.line_len(point.0.row); + } else { + point.0.column -= 1; + } + } else { + return point; + } + } else { + let overshoot = point.0 - cursor.start().0 .0; + let buffer_point = cursor.start().1 + overshoot; + let clipped_buffer_point = self.buffer.clip_point(buffer_point, bias); + let clipped_overshoot = clipped_buffer_point - cursor.start().1; + let clipped_point = InlayPoint(cursor.start().0 .0 + clipped_overshoot); + if clipped_point == point { + return clipped_point; + } else { + point = clipped_point; + } + } + } + Some(Transform::Inlay(inlay)) => { + if point == cursor.start().0 && inlay.position.bias() == Bias::Right { + match cursor.prev_item() { + Some(Transform::Inlay(inlay)) => { + if inlay.position.bias() == Bias::Left { + return point; + } + } + _ => return point, + } + } else if point == cursor.end(&()).0 && inlay.position.bias() == Bias::Left { + match cursor.next_item() { + Some(Transform::Inlay(inlay)) => { + if inlay.position.bias() == Bias::Right { + return point; + } + } + _ => return point, + } + } + + if bias == Bias::Left { + point = cursor.start().0; + cursor.prev(&()); + } else { + cursor.next(&()); + point = cursor.start().0; + } + } + None => { + bias = bias.invert(); + if bias == Bias::Left { + point = cursor.start().0; + cursor.prev(&()); + } else { + cursor.next(&()); + point = cursor.start().0; + } + } + } + } + } + + pub fn text_summary(&self) -> TextSummary { + self.transforms.summary().output.clone() + } + + pub fn text_summary_for_range(&self, range: Range) -> TextSummary { + let mut summary = TextSummary::default(); + + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + cursor.seek(&range.start, Bias::Right, &()); + + let overshoot = range.start.0 - cursor.start().0 .0; + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + let buffer_start = cursor.start().1; + let suffix_start = buffer_start + overshoot; + let suffix_end = + buffer_start + (cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0 .0); + summary = self.buffer.text_summary_for_range(suffix_start..suffix_end); + cursor.next(&()); + } + Some(Transform::Inlay(inlay)) => { + let suffix_start = overshoot; + let suffix_end = cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0 .0; + summary = inlay.text.cursor(suffix_start).summary(suffix_end); + cursor.next(&()); + } + None => {} + } + + if range.end > cursor.start().0 { + summary += cursor + .summary::<_, TransformSummary>(&range.end, Bias::Right, &()) + .output; + + let overshoot = range.end.0 - cursor.start().0 .0; + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + let prefix_start = cursor.start().1; + let prefix_end = prefix_start + overshoot; + summary += self + .buffer + .text_summary_for_range::(prefix_start..prefix_end); + } + Some(Transform::Inlay(inlay)) => { + let prefix_end = overshoot; + summary += inlay.text.cursor(0).summary::(prefix_end); + } + None => {} + } + } + + summary + } + + pub fn buffer_rows(&self, row: u32) -> InlayBufferRows<'_> { + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let inlay_point = InlayPoint::new(row, 0); + cursor.seek(&inlay_point, Bias::Left, &()); + + let max_buffer_row = MultiBufferRow(self.buffer.max_point().row); + let mut buffer_point = cursor.start().1; + let buffer_row = if row == 0 { + MultiBufferRow(0) + } else { + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + buffer_point += inlay_point.0 - cursor.start().0 .0; + MultiBufferRow(buffer_point.row) + } + _ => cmp::min(MultiBufferRow(buffer_point.row + 1), max_buffer_row), + } + }; + + InlayBufferRows { + transforms: cursor, + inlay_row: inlay_point.row(), + buffer_rows: self.buffer.buffer_rows(buffer_row), + max_buffer_row, + } + } + + pub fn line_len(&self, row: u32) -> u32 { + let line_start = self.to_offset(InlayPoint::new(row, 0)).0; + let line_end = if row >= self.max_point().row() { + self.len().0 + } else { + self.to_offset(InlayPoint::new(row + 1, 0)).0 - 1 + }; + (line_end - line_start) as u32 + } + + pub(crate) fn chunks<'a>( + &'a self, + range: Range, + language_aware: bool, + highlights: Highlights<'a>, + ) -> InlayChunks<'a> { + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + cursor.seek(&range.start, Bias::Right, &()); + + let mut highlight_endpoints = Vec::new(); + if let Some(text_highlights) = highlights.text_highlights { + if !text_highlights.is_empty() { + self.apply_text_highlights( + &mut cursor, + &range, + text_highlights, + &mut highlight_endpoints, + ); + cursor.seek(&range.start, Bias::Right, &()); + } + } + highlight_endpoints.sort(); + let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end); + let buffer_chunks = self.buffer.chunks(buffer_range, language_aware); + + InlayChunks { + transforms: cursor, + buffer_chunks, + inlay_chunks: None, + inlay_chunk: None, + buffer_chunk: None, + output_offset: range.start, + max_output_offset: range.end, + highlight_styles: highlights.styles, + highlight_endpoints: highlight_endpoints.into_iter().peekable(), + active_highlights: Default::default(), + highlights, + snapshot: self, + } + } + + fn apply_text_highlights( + &self, + cursor: &mut Cursor<'_, Transform, (InlayOffset, usize)>, + range: &Range, + text_highlights: &TreeMap, Arc<(HighlightStyle, Vec>)>>, + highlight_endpoints: &mut Vec, + ) { + while cursor.start().0 < range.end { + let transform_start = self + .buffer + .anchor_after(self.to_buffer_offset(cmp::max(range.start, cursor.start().0))); + let transform_end = + { + let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0); + self.buffer.anchor_before(self.to_buffer_offset(cmp::min( + cursor.end(&()).0, + cursor.start().0 + overshoot, + ))) + }; + + for (tag, text_highlights) in text_highlights.iter() { + let style = text_highlights.0; + let ranges = &text_highlights.1; + + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe.end.cmp(&transform_start, &self.buffer); + if cmp.is_gt() { + cmp::Ordering::Greater + } else { + cmp::Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + for range in &ranges[start_ix..] { + if range.start.cmp(&transform_end, &self.buffer).is_ge() { + break; + } + + highlight_endpoints.push(HighlightEndpoint { + offset: self.to_inlay_offset(range.start.to_offset(&self.buffer)), + is_start: true, + tag: *tag, + style, + }); + highlight_endpoints.push(HighlightEndpoint { + offset: self.to_inlay_offset(range.end.to_offset(&self.buffer)), + is_start: false, + tag: *tag, + style, + }); + } + } + + cursor.next(&()); + } + } + + #[cfg(test)] + pub fn text(&self) -> String { + self.chunks(Default::default()..self.len(), false, Highlights::default()) + .map(|chunk| chunk.text) + .collect() + } + + fn check_invariants(&self) { + #[cfg(any(debug_assertions, feature = "test-support"))] + { + assert_eq!(self.transforms.summary().input, self.buffer.text_summary()); + let mut transforms = self.transforms.iter().peekable(); + while let Some(transform) = transforms.next() { + let transform_is_isomorphic = matches!(transform, Transform::Isomorphic(_)); + if let Some(next_transform) = transforms.peek() { + let next_transform_is_isomorphic = + matches!(next_transform, Transform::Isomorphic(_)); + assert!( + !transform_is_isomorphic || !next_transform_is_isomorphic, + "two adjacent isomorphic transforms" + ); + } + } + } + } +} + +fn push_isomorphic(sum_tree: &mut SumTree, summary: TextSummary) { + if summary.len == 0 { + return; + } + + let mut summary = Some(summary); + sum_tree.update_last( + |transform| { + if let Transform::Isomorphic(transform) = transform { + *transform += summary.take().unwrap(); + } + }, + &(), + ); + + if let Some(summary) = summary { + sum_tree.push(Transform::Isomorphic(summary), &()); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + display_map::{InlayHighlights, TextHighlights}, + hover_links::InlayHighlight, + InlayId, MultiBuffer, + }; + use gpui::AppContext; + use project::{InlayHint, InlayHintLabel, ResolveState}; + use rand::prelude::*; + use settings::SettingsStore; + use std::{cmp::Reverse, env, sync::Arc}; + use text::Patch; + use util::post_inc; + + #[test] + fn test_inlay_properties_label_padding() { + assert_eq!( + Inlay::hint( + 0, + Anchor::min(), + &InlayHint { + label: InlayHintLabel::String("a".to_string()), + position: text::Anchor::default(), + padding_left: false, + padding_right: false, + tooltip: None, + kind: None, + resolve_state: ResolveState::Resolved, + }, + ) + .text + .to_string(), + "a", + "Should not pad label if not requested" + ); + + assert_eq!( + Inlay::hint( + 0, + Anchor::min(), + &InlayHint { + label: InlayHintLabel::String("a".to_string()), + position: text::Anchor::default(), + padding_left: true, + padding_right: true, + tooltip: None, + kind: None, + resolve_state: ResolveState::Resolved, + }, + ) + .text + .to_string(), + " a ", + "Should pad label for every side requested" + ); + + assert_eq!( + Inlay::hint( + 0, + Anchor::min(), + &InlayHint { + label: InlayHintLabel::String(" a ".to_string()), + position: text::Anchor::default(), + padding_left: false, + padding_right: false, + tooltip: None, + kind: None, + resolve_state: ResolveState::Resolved, + }, + ) + .text + .to_string(), + " a ", + "Should not change already padded label" + ); + + assert_eq!( + Inlay::hint( + 0, + Anchor::min(), + &InlayHint { + label: InlayHintLabel::String(" a ".to_string()), + position: text::Anchor::default(), + padding_left: true, + padding_right: true, + tooltip: None, + kind: None, + resolve_state: ResolveState::Resolved, + }, + ) + .text + .to_string(), + " a ", + "Should not change already padded label" + ); + } + + #[gpui::test] + fn test_basic_inlays(cx: &mut AppContext) { + let buffer = MultiBuffer::build_simple("abcdefghi", cx); + let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer.read(cx).snapshot(cx)); + assert_eq!(inlay_snapshot.text(), "abcdefghi"); + let mut next_inlay_id = 0; + + let (inlay_snapshot, _) = inlay_map.splice( + Vec::new(), + vec![Inlay { + id: InlayId::Hint(post_inc(&mut next_inlay_id)), + position: buffer.read(cx).snapshot(cx).anchor_after(3), + text: "|123|".into(), + }], + ); + assert_eq!(inlay_snapshot.text(), "abc|123|defghi"); + assert_eq!( + inlay_snapshot.to_inlay_point(Point::new(0, 0)), + InlayPoint::new(0, 0) + ); + assert_eq!( + inlay_snapshot.to_inlay_point(Point::new(0, 1)), + InlayPoint::new(0, 1) + ); + assert_eq!( + inlay_snapshot.to_inlay_point(Point::new(0, 2)), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.to_inlay_point(Point::new(0, 3)), + InlayPoint::new(0, 3) + ); + assert_eq!( + inlay_snapshot.to_inlay_point(Point::new(0, 4)), + InlayPoint::new(0, 9) + ); + assert_eq!( + inlay_snapshot.to_inlay_point(Point::new(0, 5)), + InlayPoint::new(0, 10) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 0), Bias::Left), + InlayPoint::new(0, 0) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 0), Bias::Right), + InlayPoint::new(0, 0) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 3), Bias::Left), + InlayPoint::new(0, 3) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 3), Bias::Right), + InlayPoint::new(0, 3) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 4), Bias::Left), + InlayPoint::new(0, 3) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 4), Bias::Right), + InlayPoint::new(0, 9) + ); + + // Edits before or after the inlay should not affect it. + buffer.update(cx, |buffer, cx| { + buffer.edit([(2..3, "x"), (3..3, "y"), (4..4, "z")], None, cx) + }); + let (inlay_snapshot, _) = inlay_map.sync( + buffer.read(cx).snapshot(cx), + buffer_edits.consume().into_inner(), + ); + assert_eq!(inlay_snapshot.text(), "abxy|123|dzefghi"); + + // An edit surrounding the inlay should invalidate it. + buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "D")], None, cx)); + let (inlay_snapshot, _) = inlay_map.sync( + buffer.read(cx).snapshot(cx), + buffer_edits.consume().into_inner(), + ); + assert_eq!(inlay_snapshot.text(), "abxyDzefghi"); + + let (inlay_snapshot, _) = inlay_map.splice( + Vec::new(), + vec![ + Inlay { + id: InlayId::Hint(post_inc(&mut next_inlay_id)), + position: buffer.read(cx).snapshot(cx).anchor_before(3), + text: "|123|".into(), + }, + Inlay { + id: InlayId::Suggestion(post_inc(&mut next_inlay_id)), + position: buffer.read(cx).snapshot(cx).anchor_after(3), + text: "|456|".into(), + }, + ], + ); + assert_eq!(inlay_snapshot.text(), "abx|123||456|yDzefghi"); + + // Edits ending where the inlay starts should not move it if it has a left bias. + buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "JKL")], None, cx)); + let (inlay_snapshot, _) = inlay_map.sync( + buffer.read(cx).snapshot(cx), + buffer_edits.consume().into_inner(), + ); + assert_eq!(inlay_snapshot.text(), "abx|123|JKL|456|yDzefghi"); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 0), Bias::Left), + InlayPoint::new(0, 0) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 0), Bias::Right), + InlayPoint::new(0, 0) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 1), Bias::Left), + InlayPoint::new(0, 1) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 1), Bias::Right), + InlayPoint::new(0, 1) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 2), Bias::Left), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 2), Bias::Right), + InlayPoint::new(0, 2) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 3), Bias::Left), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 3), Bias::Right), + InlayPoint::new(0, 8) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 4), Bias::Left), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 4), Bias::Right), + InlayPoint::new(0, 8) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 5), Bias::Left), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 5), Bias::Right), + InlayPoint::new(0, 8) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 6), Bias::Left), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 6), Bias::Right), + InlayPoint::new(0, 8) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 7), Bias::Left), + InlayPoint::new(0, 2) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 7), Bias::Right), + InlayPoint::new(0, 8) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 8), Bias::Left), + InlayPoint::new(0, 8) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 8), Bias::Right), + InlayPoint::new(0, 8) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 9), Bias::Left), + InlayPoint::new(0, 9) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 9), Bias::Right), + InlayPoint::new(0, 9) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 10), Bias::Left), + InlayPoint::new(0, 10) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 10), Bias::Right), + InlayPoint::new(0, 10) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 11), Bias::Left), + InlayPoint::new(0, 11) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 11), Bias::Right), + InlayPoint::new(0, 11) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 12), Bias::Left), + InlayPoint::new(0, 11) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 12), Bias::Right), + InlayPoint::new(0, 17) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 13), Bias::Left), + InlayPoint::new(0, 11) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 13), Bias::Right), + InlayPoint::new(0, 17) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 14), Bias::Left), + InlayPoint::new(0, 11) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 14), Bias::Right), + InlayPoint::new(0, 17) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 15), Bias::Left), + InlayPoint::new(0, 11) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 15), Bias::Right), + InlayPoint::new(0, 17) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 16), Bias::Left), + InlayPoint::new(0, 11) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 16), Bias::Right), + InlayPoint::new(0, 17) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 17), Bias::Left), + InlayPoint::new(0, 17) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 17), Bias::Right), + InlayPoint::new(0, 17) + ); + + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 18), Bias::Left), + InlayPoint::new(0, 18) + ); + assert_eq!( + inlay_snapshot.clip_point(InlayPoint::new(0, 18), Bias::Right), + InlayPoint::new(0, 18) + ); + + // The inlays can be manually removed. + let (inlay_snapshot, _) = inlay_map.splice( + inlay_map.inlays.iter().map(|inlay| inlay.id).collect(), + Vec::new(), + ); + assert_eq!(inlay_snapshot.text(), "abxJKLyDzefghi"); + } + + #[gpui::test] + fn test_inlay_buffer_rows(cx: &mut AppContext) { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi", cx); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer.read(cx).snapshot(cx)); + assert_eq!(inlay_snapshot.text(), "abc\ndef\nghi"); + let mut next_inlay_id = 0; + + let (inlay_snapshot, _) = inlay_map.splice( + Vec::new(), + vec![ + Inlay { + id: InlayId::Hint(post_inc(&mut next_inlay_id)), + position: buffer.read(cx).snapshot(cx).anchor_before(0), + text: "|123|\n".into(), + }, + Inlay { + id: InlayId::Hint(post_inc(&mut next_inlay_id)), + position: buffer.read(cx).snapshot(cx).anchor_before(4), + text: "|456|".into(), + }, + Inlay { + id: InlayId::Suggestion(post_inc(&mut next_inlay_id)), + position: buffer.read(cx).snapshot(cx).anchor_before(7), + text: "\n|567|\n".into(), + }, + ], + ); + assert_eq!(inlay_snapshot.text(), "|123|\nabc\n|456|def\n|567|\n\nghi"); + assert_eq!( + inlay_snapshot.buffer_rows(0).collect::>(), + vec![Some(0), None, Some(1), None, None, Some(2)] + ); + } + + #[gpui::test(iterations = 100)] + fn test_random_inlays(cx: &mut AppContext, mut rng: StdRng) { + init_test(cx); + + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let len = rng.gen_range(0..30); + let buffer = if rng.gen() { + let text = util::RandomCharIter::new(&mut rng) + .take(len) + .collect::(); + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(&mut rng, cx) + }; + let mut buffer_snapshot = buffer.read(cx).snapshot(cx); + let mut next_inlay_id = 0; + log::info!("buffer text: {:?}", buffer_snapshot.text()); + let (mut inlay_map, mut inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + for _ in 0..operations { + let mut inlay_edits = Patch::default(); + + let mut prev_inlay_text = inlay_snapshot.text(); + let mut buffer_edits = Vec::new(); + match rng.gen_range(0..=100) { + 0..=50 => { + let (snapshot, edits) = inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng); + log::info!("mutated text: {:?}", snapshot.text()); + inlay_edits = Patch::new(edits); + } + _ => buffer.update(cx, |buffer, cx| { + let subscription = buffer.subscribe(); + let edit_count = rng.gen_range(1..=5); + buffer.randomly_mutate(&mut rng, edit_count, cx); + buffer_snapshot = buffer.snapshot(cx); + let edits = subscription.consume().into_inner(); + log::info!("editing {:?}", edits); + buffer_edits.extend(edits); + }), + }; + + let (new_inlay_snapshot, new_inlay_edits) = + inlay_map.sync(buffer_snapshot.clone(), buffer_edits); + inlay_snapshot = new_inlay_snapshot; + inlay_edits = inlay_edits.compose(new_inlay_edits); + + log::info!("buffer text: {:?}", buffer_snapshot.text()); + log::info!("inlay text: {:?}", inlay_snapshot.text()); + + let inlays = inlay_map + .inlays + .iter() + .filter(|inlay| inlay.position.is_valid(&buffer_snapshot)) + .map(|inlay| { + let offset = inlay.position.to_offset(&buffer_snapshot); + (offset, inlay.clone()) + }) + .collect::>(); + let mut expected_text = Rope::from(buffer_snapshot.text()); + for (offset, inlay) in inlays.iter().rev() { + expected_text.replace(*offset..*offset, &inlay.text.to_string()); + } + assert_eq!(inlay_snapshot.text(), expected_text.to_string()); + + let expected_buffer_rows = inlay_snapshot.buffer_rows(0).collect::>(); + assert_eq!( + expected_buffer_rows.len() as u32, + expected_text.max_point().row + 1 + ); + for row_start in 0..expected_buffer_rows.len() { + assert_eq!( + inlay_snapshot + .buffer_rows(row_start as u32) + .collect::>(), + &expected_buffer_rows[row_start..], + "incorrect buffer rows starting at {}", + row_start + ); + } + + let mut text_highlights = TextHighlights::default(); + let text_highlight_count = rng.gen_range(0_usize..10); + let mut text_highlight_ranges = (0..text_highlight_count) + .map(|_| buffer_snapshot.random_byte_range(0, &mut rng)) + .collect::>(); + text_highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end))); + log::info!("highlighting text ranges {text_highlight_ranges:?}"); + text_highlights.insert( + Some(TypeId::of::<()>()), + Arc::new(( + HighlightStyle::default(), + text_highlight_ranges + .into_iter() + .map(|range| { + buffer_snapshot.anchor_before(range.start) + ..buffer_snapshot.anchor_after(range.end) + }) + .collect(), + )), + ); + + let mut inlay_highlights = InlayHighlights::default(); + if !inlays.is_empty() { + let inlay_highlight_count = rng.gen_range(0..inlays.len()); + let mut inlay_indices = BTreeSet::default(); + while inlay_indices.len() < inlay_highlight_count { + inlay_indices.insert(rng.gen_range(0..inlays.len())); + } + let new_highlights = TreeMap::from_ordered_entries( + inlay_indices + .into_iter() + .filter_map(|i| { + let (_, inlay) = &inlays[i]; + let inlay_text_len = inlay.text.len(); + match inlay_text_len { + 0 => None, + 1 => Some(InlayHighlight { + inlay: inlay.id, + inlay_position: inlay.position, + range: 0..1, + }), + n => { + let inlay_text = inlay.text.to_string(); + let mut highlight_end = rng.gen_range(1..n); + let mut highlight_start = rng.gen_range(0..highlight_end); + while !inlay_text.is_char_boundary(highlight_end) { + highlight_end += 1; + } + while !inlay_text.is_char_boundary(highlight_start) { + highlight_start -= 1; + } + Some(InlayHighlight { + inlay: inlay.id, + inlay_position: inlay.position, + range: highlight_start..highlight_end, + }) + } + } + }) + .map(|highlight| (highlight.inlay, (HighlightStyle::default(), highlight))), + ); + log::info!("highlighting inlay ranges {new_highlights:?}"); + inlay_highlights.insert(TypeId::of::<()>(), new_highlights); + } + + for _ in 0..5 { + let mut end = rng.gen_range(0..=inlay_snapshot.len().0); + end = expected_text.clip_offset(end, Bias::Right); + let mut start = rng.gen_range(0..=end); + start = expected_text.clip_offset(start, Bias::Right); + + let range = InlayOffset(start)..InlayOffset(end); + log::info!("calling inlay_snapshot.chunks({range:?})"); + let actual_text = inlay_snapshot + .chunks( + range, + false, + Highlights { + text_highlights: Some(&text_highlights), + inlay_highlights: Some(&inlay_highlights), + ..Highlights::default() + }, + ) + .map(|chunk| chunk.text) + .collect::(); + assert_eq!( + actual_text, + expected_text.slice(start..end).to_string(), + "incorrect text in range {:?}", + start..end + ); + + assert_eq!( + inlay_snapshot.text_summary_for_range(InlayOffset(start)..InlayOffset(end)), + expected_text.slice(start..end).summary() + ); + } + + for edit in inlay_edits { + prev_inlay_text.replace_range( + edit.new.start.0..edit.new.start.0 + edit.old_len().0, + &inlay_snapshot.text()[edit.new.start.0..edit.new.end.0], + ); + } + assert_eq!(prev_inlay_text, inlay_snapshot.text()); + + assert_eq!(expected_text.max_point(), inlay_snapshot.max_point().0); + assert_eq!(expected_text.len(), inlay_snapshot.len().0); + + let mut buffer_point = Point::default(); + let mut inlay_point = inlay_snapshot.to_inlay_point(buffer_point); + let mut buffer_chars = buffer_snapshot.chars_at(0); + loop { + // Ensure conversion from buffer coordinates to inlay coordinates + // is consistent. + let buffer_offset = buffer_snapshot.point_to_offset(buffer_point); + assert_eq!( + inlay_snapshot.to_point(inlay_snapshot.to_inlay_offset(buffer_offset)), + inlay_point + ); + + // No matter which bias we clip an inlay point with, it doesn't move + // because it was constructed from a buffer point. + assert_eq!( + inlay_snapshot.clip_point(inlay_point, Bias::Left), + inlay_point, + "invalid inlay point for buffer point {:?} when clipped left", + buffer_point + ); + assert_eq!( + inlay_snapshot.clip_point(inlay_point, Bias::Right), + inlay_point, + "invalid inlay point for buffer point {:?} when clipped right", + buffer_point + ); + + if let Some(ch) = buffer_chars.next() { + if ch == '\n' { + buffer_point += Point::new(1, 0); + } else { + buffer_point += Point::new(0, ch.len_utf8() as u32); + } + + // Ensure that moving forward in the buffer always moves the inlay point forward as well. + let new_inlay_point = inlay_snapshot.to_inlay_point(buffer_point); + assert!(new_inlay_point > inlay_point); + inlay_point = new_inlay_point; + } else { + break; + } + } + + let mut inlay_point = InlayPoint::default(); + let mut inlay_offset = InlayOffset::default(); + for ch in expected_text.chars() { + assert_eq!( + inlay_snapshot.to_offset(inlay_point), + inlay_offset, + "invalid to_offset({:?})", + inlay_point + ); + assert_eq!( + inlay_snapshot.to_point(inlay_offset), + inlay_point, + "invalid to_point({:?})", + inlay_offset + ); + + let mut bytes = [0; 4]; + for byte in ch.encode_utf8(&mut bytes).as_bytes() { + inlay_offset.0 += 1; + if *byte == b'\n' { + inlay_point.0 += Point::new(1, 0); + } else { + inlay_point.0 += Point::new(0, 1); + } + + let clipped_left_point = inlay_snapshot.clip_point(inlay_point, Bias::Left); + let clipped_right_point = inlay_snapshot.clip_point(inlay_point, Bias::Right); + assert!( + clipped_left_point <= clipped_right_point, + "inlay point {:?} when clipped left is greater than when clipped right ({:?} > {:?})", + inlay_point, + clipped_left_point, + clipped_right_point + ); + + // Ensure the clipped points are at valid text locations. + assert_eq!( + clipped_left_point.0, + expected_text.clip_point(clipped_left_point.0, Bias::Left) + ); + assert_eq!( + clipped_right_point.0, + expected_text.clip_point(clipped_right_point.0, Bias::Right) + ); + + // Ensure the clipped points never overshoot the end of the map. + assert!(clipped_left_point <= inlay_snapshot.max_point()); + assert!(clipped_right_point <= inlay_snapshot.max_point()); + + // Ensure the clipped points are at valid buffer locations. + assert_eq!( + inlay_snapshot + .to_inlay_point(inlay_snapshot.to_buffer_point(clipped_left_point)), + clipped_left_point, + "to_buffer_point({:?}) = {:?}", + clipped_left_point, + inlay_snapshot.to_buffer_point(clipped_left_point), + ); + assert_eq!( + inlay_snapshot + .to_inlay_point(inlay_snapshot.to_buffer_point(clipped_right_point)), + clipped_right_point, + "to_buffer_point({:?}) = {:?}", + clipped_right_point, + inlay_snapshot.to_buffer_point(clipped_right_point), + ); + } + } + } + } + + fn init_test(cx: &mut AppContext) { + let store = SettingsStore::test(cx); + cx.set_global(store); + theme::init(theme::LoadThemes::JustBase, cx); + } +} diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs new file mode 100644 index 0000000..0187973 --- /dev/null +++ b/crates/editor/src/display_map/tab_map.rs @@ -0,0 +1,768 @@ +use super::{ + fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot}, + Highlights, +}; +use language::{Chunk, Point}; +use multi_buffer::MultiBufferSnapshot; +use std::{cmp, mem, num::NonZeroU32, ops::Range}; +use sum_tree::Bias; + +const MAX_EXPANSION_COLUMN: u32 = 256; + +/// Keeps track of hard tabs in a text buffer. +/// +/// See the [`display_map` module documentation](crate::display_map) for more information. +pub struct TabMap(TabSnapshot); + +impl TabMap { + pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) { + let snapshot = TabSnapshot { + fold_snapshot, + tab_size, + max_expansion_column: MAX_EXPANSION_COLUMN, + version: 0, + }; + (Self(snapshot.clone()), snapshot) + } + + #[cfg(test)] + pub fn set_max_expansion_column(&mut self, column: u32) -> TabSnapshot { + self.0.max_expansion_column = column; + self.0.clone() + } + + pub fn sync( + &mut self, + fold_snapshot: FoldSnapshot, + mut fold_edits: Vec, + tab_size: NonZeroU32, + ) -> (TabSnapshot, Vec) { + let old_snapshot = &mut self.0; + let mut new_snapshot = TabSnapshot { + fold_snapshot, + tab_size, + max_expansion_column: old_snapshot.max_expansion_column, + version: old_snapshot.version, + }; + + if old_snapshot.fold_snapshot.version != new_snapshot.fold_snapshot.version { + new_snapshot.version += 1; + } + + let mut tab_edits = Vec::with_capacity(fold_edits.len()); + + if old_snapshot.tab_size == new_snapshot.tab_size { + // Expand each edit to include the next tab on the same line as the edit, + // and any subsequent tabs on that line that moved across the tab expansion + // boundary. + for fold_edit in &mut fold_edits { + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let old_end_row_successor_offset = cmp::min( + FoldPoint::new(old_end.row() + 1, 0), + old_snapshot.fold_snapshot.max_point(), + ) + .to_offset(&old_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + + let mut offset_from_edit = 0; + let mut first_tab_offset = None; + let mut last_tab_with_changed_expansion_offset = None; + 'outer: for chunk in old_snapshot.fold_snapshot.chunks( + fold_edit.old.end..old_end_row_successor_offset, + false, + Highlights::default(), + ) { + for (ix, _) in chunk.text.match_indices('\t') { + let offset_from_edit = offset_from_edit + (ix as u32); + if first_tab_offset.is_none() { + first_tab_offset = Some(offset_from_edit); + } + + let old_column = old_end.column() + offset_from_edit; + let new_column = new_end.column() + offset_from_edit; + let was_expanded = old_column < old_snapshot.max_expansion_column; + let is_expanded = new_column < new_snapshot.max_expansion_column; + if was_expanded != is_expanded { + last_tab_with_changed_expansion_offset = Some(offset_from_edit); + } else if !was_expanded && !is_expanded { + break 'outer; + } + } + + offset_from_edit += chunk.text.len() as u32; + if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column + && new_end.column() + offset_from_edit >= new_snapshot.max_expansion_column + { + break; + } + } + + if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) { + fold_edit.old.end.0 += offset as usize + 1; + fold_edit.new.end.0 += offset as usize + 1; + } + } + + // Combine any edits that overlap due to the expansion. + let mut ix = 1; + while ix < fold_edits.len() { + let (prev_edits, next_edits) = fold_edits.split_at_mut(ix); + let prev_edit = prev_edits.last_mut().unwrap(); + let edit = &next_edits[0]; + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = edit.old.end; + prev_edit.new.end = edit.new.end; + fold_edits.remove(ix); + } else { + ix += 1; + } + } + + for fold_edit in fold_edits { + let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot); + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + tab_edits.push(TabEdit { + old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end), + new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end), + }); + } + } else { + new_snapshot.version += 1; + tab_edits.push(TabEdit { + old: TabPoint::zero()..old_snapshot.max_point(), + new: TabPoint::zero()..new_snapshot.max_point(), + }); + } + + *old_snapshot = new_snapshot; + (old_snapshot.clone(), tab_edits) + } +} + +#[derive(Clone)] +pub struct TabSnapshot { + pub fold_snapshot: FoldSnapshot, + pub tab_size: NonZeroU32, + pub max_expansion_column: u32, + pub version: usize, +} + +impl TabSnapshot { + pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { + &self.fold_snapshot.inlay_snapshot.buffer + } + + pub fn line_len(&self, row: u32) -> u32 { + let max_point = self.max_point(); + if row < max_point.row() { + self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row))) + .0 + .column + } else { + max_point.column() + } + } + + pub fn text_summary(&self) -> TextSummary { + self.text_summary_for_range(TabPoint::zero()..self.max_point()) + } + + pub fn text_summary_for_range(&self, range: Range) -> TextSummary { + let input_start = self.to_fold_point(range.start, Bias::Left).0; + let input_end = self.to_fold_point(range.end, Bias::Right).0; + let input_summary = self + .fold_snapshot + .text_summary_for_range(input_start..input_end); + + let mut first_line_chars = 0; + let line_end = if range.start.row() == range.end.row() { + range.end + } else { + self.max_point() + }; + for c in self + .chunks(range.start..line_end, false, Highlights::default()) + .flat_map(|chunk| chunk.text.chars()) + { + if c == '\n' { + break; + } + first_line_chars += 1; + } + + let mut last_line_chars = 0; + if range.start.row() == range.end.row() { + last_line_chars = first_line_chars; + } else { + for _ in self + .chunks( + TabPoint::new(range.end.row(), 0)..range.end, + false, + Highlights::default(), + ) + .flat_map(|chunk| chunk.text.chars()) + { + last_line_chars += 1; + } + } + + TextSummary { + lines: range.end.0 - range.start.0, + first_line_chars, + last_line_chars, + longest_row: input_summary.longest_row, + longest_row_chars: input_summary.longest_row_chars, + } + } + + pub fn chunks<'a>( + &'a self, + range: Range, + language_aware: bool, + highlights: Highlights<'a>, + ) -> TabChunks<'a> { + let (input_start, expanded_char_column, to_next_stop) = + self.to_fold_point(range.start, Bias::Left); + let input_column = input_start.column(); + let input_start = input_start.to_offset(&self.fold_snapshot); + let input_end = self + .to_fold_point(range.end, Bias::Right) + .0 + .to_offset(&self.fold_snapshot); + let to_next_stop = if range.start.0 + Point::new(0, to_next_stop) > range.end.0 { + range.end.column() - range.start.column() + } else { + to_next_stop + }; + + TabChunks { + fold_chunks: self.fold_snapshot.chunks( + input_start..input_end, + language_aware, + highlights, + ), + input_column, + column: expanded_char_column, + max_expansion_column: self.max_expansion_column, + output_position: range.start.0, + max_output_position: range.end.0, + tab_size: self.tab_size, + chunk: Chunk { + text: &SPACES[0..(to_next_stop as usize)], + is_tab: true, + ..Default::default() + }, + inside_leading_tab: to_next_stop > 0, + } + } + + pub fn buffer_rows(&self, row: u32) -> fold_map::FoldBufferRows<'_> { + self.fold_snapshot.buffer_rows(row) + } + + #[cfg(test)] + pub fn text(&self) -> String { + self.chunks( + TabPoint::zero()..self.max_point(), + false, + Highlights::default(), + ) + .map(|chunk| chunk.text) + .collect() + } + + pub fn max_point(&self) -> TabPoint { + self.to_tab_point(self.fold_snapshot.max_point()) + } + + pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint { + self.to_tab_point( + self.fold_snapshot + .clip_point(self.to_fold_point(point, bias).0, bias), + ) + } + + pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint { + let chars = self.fold_snapshot.chars_at(FoldPoint::new(input.row(), 0)); + let expanded = self.expand_tabs(chars, input.column()); + TabPoint::new(input.row(), expanded) + } + + pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) { + let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0)); + let expanded = output.column(); + let (collapsed, expanded_char_column, to_next_stop) = + self.collapse_tabs(chars, expanded, bias); + ( + FoldPoint::new(output.row(), collapsed), + expanded_char_column, + to_next_stop, + ) + } + + pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint { + let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point); + let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias); + self.to_tab_point(fold_point) + } + + pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point { + let fold_point = self.to_fold_point(point, bias).0; + let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot); + self.fold_snapshot + .inlay_snapshot + .to_buffer_point(inlay_point) + } + + fn expand_tabs(&self, chars: impl Iterator, column: u32) -> u32 { + let tab_size = self.tab_size.get(); + + let mut expanded_chars = 0; + let mut expanded_bytes = 0; + let mut collapsed_bytes = 0; + let end_column = column.min(self.max_expansion_column); + for c in chars { + if collapsed_bytes >= end_column { + break; + } + if c == '\t' { + let tab_len = tab_size - expanded_chars % tab_size; + expanded_bytes += tab_len; + expanded_chars += tab_len; + } else { + expanded_bytes += c.len_utf8() as u32; + expanded_chars += 1; + } + collapsed_bytes += c.len_utf8() as u32; + } + expanded_bytes + column.saturating_sub(collapsed_bytes) + } + + fn collapse_tabs( + &self, + chars: impl Iterator, + column: u32, + bias: Bias, + ) -> (u32, u32, u32) { + let tab_size = self.tab_size.get(); + + let mut expanded_bytes = 0; + let mut expanded_chars = 0; + let mut collapsed_bytes = 0; + for c in chars { + if expanded_bytes >= column { + break; + } + if collapsed_bytes >= self.max_expansion_column { + break; + } + + if c == '\t' { + let tab_len = tab_size - (expanded_chars % tab_size); + expanded_chars += tab_len; + expanded_bytes += tab_len; + if expanded_bytes > column { + expanded_chars -= expanded_bytes - column; + return match bias { + Bias::Left => (collapsed_bytes, expanded_chars, expanded_bytes - column), + Bias::Right => (collapsed_bytes + 1, expanded_chars, 0), + }; + } + } else { + expanded_chars += 1; + expanded_bytes += c.len_utf8() as u32; + } + + if expanded_bytes > column && matches!(bias, Bias::Left) { + expanded_chars -= 1; + break; + } + + collapsed_bytes += c.len_utf8() as u32; + } + ( + collapsed_bytes + column.saturating_sub(expanded_bytes), + expanded_chars, + 0, + ) + } +} + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct TabPoint(pub Point); + +impl TabPoint { + pub fn new(row: u32, column: u32) -> Self { + Self(Point::new(row, column)) + } + + pub fn zero() -> Self { + Self::new(0, 0) + } + + pub fn row(self) -> u32 { + self.0.row + } + + pub fn column(self) -> u32 { + self.0.column + } +} + +impl From for TabPoint { + fn from(point: Point) -> Self { + Self(point) + } +} + +pub type TabEdit = text::Edit; + +#[derive(Clone, Debug, Default, Eq, PartialEq)] +pub struct TextSummary { + pub lines: Point, + pub first_line_chars: u32, + pub last_line_chars: u32, + pub longest_row: u32, + pub longest_row_chars: u32, +} + +impl<'a> From<&'a str> for TextSummary { + fn from(text: &'a str) -> Self { + let sum = text::TextSummary::from(text); + + TextSummary { + lines: sum.lines, + first_line_chars: sum.first_line_chars, + last_line_chars: sum.last_line_chars, + longest_row: sum.longest_row, + longest_row_chars: sum.longest_row_chars, + } + } +} + +impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { + fn add_assign(&mut self, other: &'a Self) { + let joined_chars = self.last_line_chars + other.first_line_chars; + if joined_chars > self.longest_row_chars { + self.longest_row = self.lines.row; + self.longest_row_chars = joined_chars; + } + if other.longest_row_chars > self.longest_row_chars { + self.longest_row = self.lines.row + other.longest_row; + self.longest_row_chars = other.longest_row_chars; + } + + if self.lines.row == 0 { + self.first_line_chars += other.first_line_chars; + } + + if other.lines.row == 0 { + self.last_line_chars += other.first_line_chars; + } else { + self.last_line_chars = other.last_line_chars; + } + + self.lines += &other.lines; + } +} + +// Handles a tab width <= 16 +const SPACES: &str = " "; + +pub struct TabChunks<'a> { + fold_chunks: FoldChunks<'a>, + chunk: Chunk<'a>, + column: u32, + max_expansion_column: u32, + output_position: Point, + input_column: u32, + max_output_position: Point, + tab_size: NonZeroU32, + inside_leading_tab: bool, +} + +impl<'a> Iterator for TabChunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.chunk.text.is_empty() { + if let Some(chunk) = self.fold_chunks.next() { + self.chunk = chunk; + if self.inside_leading_tab { + self.chunk.text = &self.chunk.text[1..]; + self.inside_leading_tab = false; + self.input_column += 1; + } + } else { + return None; + } + } + + for (ix, c) in self.chunk.text.char_indices() { + match c { + '\t' => { + if ix > 0 { + let (prefix, suffix) = self.chunk.text.split_at(ix); + self.chunk.text = suffix; + return Some(Chunk { + text: prefix, + ..self.chunk + }); + } else { + self.chunk.text = &self.chunk.text[1..]; + let tab_size = if self.input_column < self.max_expansion_column { + self.tab_size.get() + } else { + 1 + }; + let mut len = tab_size - self.column % tab_size; + let next_output_position = cmp::min( + self.output_position + Point::new(0, len), + self.max_output_position, + ); + len = next_output_position.column - self.output_position.column; + self.column += len; + self.input_column += 1; + self.output_position = next_output_position; + return Some(Chunk { + text: &SPACES[..len as usize], + is_tab: true, + ..self.chunk + }); + } + } + '\n' => { + self.column = 0; + self.input_column = 0; + self.output_position += Point::new(1, 0); + } + _ => { + self.column += 1; + if !self.inside_leading_tab { + self.input_column += c.len_utf8() as u32; + } + self.output_position.column += c.len_utf8() as u32; + } + } + } + + Some(mem::take(&mut self.chunk)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + display_map::{fold_map::FoldMap, inlay_map::InlayMap}, + MultiBuffer, + }; + use rand::{prelude::StdRng, Rng}; + + #[gpui::test] + fn test_expand_tabs(cx: &mut gpui::AppContext) { + let buffer = MultiBuffer::build_simple("", cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + + assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 0), 0); + assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 1), 4); + assert_eq!(tab_snapshot.expand_tabs("\ta".chars(), 2), 5); + } + + #[gpui::test] + fn test_long_lines(cx: &mut gpui::AppContext) { + let max_expansion_column = 12; + let input = "A\tBC\tDEF\tG\tHI\tJ\tK\tL\tM"; + let output = "A BC DEF G HI J K L M"; + + let buffer = MultiBuffer::build_simple(input, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + + tab_snapshot.max_expansion_column = max_expansion_column; + assert_eq!(tab_snapshot.text(), output); + + for (ix, c) in input.char_indices() { + assert_eq!( + tab_snapshot + .chunks( + TabPoint::new(0, ix as u32)..tab_snapshot.max_point(), + false, + Highlights::default(), + ) + .map(|c| c.text) + .collect::(), + &output[ix..], + "text from index {ix}" + ); + + if c != '\t' { + let input_point = Point::new(0, ix as u32); + let output_point = Point::new(0, output.find(c).unwrap() as u32); + assert_eq!( + tab_snapshot.to_tab_point(FoldPoint(input_point)), + TabPoint(output_point), + "to_tab_point({input_point:?})" + ); + assert_eq!( + tab_snapshot + .to_fold_point(TabPoint(output_point), Bias::Left) + .0, + FoldPoint(input_point), + "to_fold_point({output_point:?})" + ); + } + } + } + + #[gpui::test] + fn test_long_lines_with_character_spanning_max_expansion_column(cx: &mut gpui::AppContext) { + let max_expansion_column = 8; + let input = "abcdefg⋯hij"; + + let buffer = MultiBuffer::build_simple(input, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + + tab_snapshot.max_expansion_column = max_expansion_column; + assert_eq!(tab_snapshot.text(), input); + } + + #[gpui::test] + fn test_marking_tabs(cx: &mut gpui::AppContext) { + let input = "\t \thello"; + + let buffer = MultiBuffer::build_simple(&input, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + + assert_eq!( + chunks(&tab_snapshot, TabPoint::zero()), + vec![ + (" ".to_string(), true), + (" ".to_string(), false), + (" ".to_string(), true), + ("hello".to_string(), false), + ] + ); + assert_eq!( + chunks(&tab_snapshot, TabPoint::new(0, 2)), + vec![ + (" ".to_string(), true), + (" ".to_string(), false), + (" ".to_string(), true), + ("hello".to_string(), false), + ] + ); + + fn chunks(snapshot: &TabSnapshot, start: TabPoint) -> Vec<(String, bool)> { + let mut chunks = Vec::new(); + let mut was_tab = false; + let mut text = String::new(); + for chunk in snapshot.chunks(start..snapshot.max_point(), false, Highlights::default()) + { + if chunk.is_tab != was_tab { + if !text.is_empty() { + chunks.push((mem::take(&mut text), was_tab)); + } + was_tab = chunk.is_tab; + } + text.push_str(chunk.text); + } + + if !text.is_empty() { + chunks.push((text, was_tab)); + } + chunks + } + } + + #[gpui::test(iterations = 100)] + fn test_random_tabs(cx: &mut gpui::AppContext, mut rng: StdRng) { + let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); + let len = rng.gen_range(0..30); + let buffer = if rng.gen() { + let text = util::RandomCharIter::new(&mut rng) + .take(len) + .collect::(); + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(&mut rng, cx) + }; + let buffer_snapshot = buffer.read(cx).snapshot(cx); + log::info!("Buffer text: {:?}", buffer_snapshot.text()); + + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + log::info!("InlayMap text: {:?}", inlay_snapshot.text()); + let (mut fold_map, _) = FoldMap::new(inlay_snapshot.clone()); + fold_map.randomly_mutate(&mut rng); + let (fold_snapshot, _) = fold_map.read(inlay_snapshot, vec![]); + log::info!("FoldMap text: {:?}", fold_snapshot.text()); + let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng); + log::info!("InlayMap text: {:?}", inlay_snapshot.text()); + + let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size); + let tabs_snapshot = tab_map.set_max_expansion_column(32); + + let text = text::Rope::from(tabs_snapshot.text().as_str()); + log::info!( + "TabMap text (tab size: {}): {:?}", + tab_size, + tabs_snapshot.text(), + ); + + for _ in 0..5 { + let end_row = rng.gen_range(0..=text.max_point().row); + let end_column = rng.gen_range(0..=text.line_len(end_row)); + let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right)); + let start_row = rng.gen_range(0..=text.max_point().row); + let start_column = rng.gen_range(0..=text.line_len(start_row)); + let mut start = + TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left)); + if start > end { + mem::swap(&mut start, &mut end); + } + + let expected_text = text + .chunks_in_range(text.point_to_offset(start.0)..text.point_to_offset(end.0)) + .collect::(); + let expected_summary = TextSummary::from(expected_text.as_str()); + assert_eq!( + tabs_snapshot + .chunks(start..end, false, Highlights::default()) + .map(|c| c.text) + .collect::(), + expected_text, + "chunks({:?}..{:?})", + start, + end + ); + + let mut actual_summary = tabs_snapshot.text_summary_for_range(start..end); + if tab_size.get() > 1 && inlay_snapshot.text().contains('\t') { + actual_summary.longest_row = expected_summary.longest_row; + actual_summary.longest_row_chars = expected_summary.longest_row_chars; + } + assert_eq!(actual_summary, expected_summary); + } + + for row in 0..=text.max_point().row { + assert_eq!( + tabs_snapshot.line_len(row), + text.line_len(row), + "line_len({row})" + ); + } + } +} diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs new file mode 100644 index 0000000..a292f9f --- /dev/null +++ b/crates/editor/src/display_map/wrap_map.rs @@ -0,0 +1,1356 @@ +use super::{ + fold_map::FoldBufferRows, + tab_map::{self, TabEdit, TabPoint, TabSnapshot}, + Highlights, +}; +use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task}; +use language::{Chunk, Point}; +use lazy_static::lazy_static; +use multi_buffer::MultiBufferSnapshot; +use smol::future::yield_now; +use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration}; +use sum_tree::{Bias, Cursor, SumTree}; +use text::Patch; + +pub use super::tab_map::TextSummary; +pub type WrapEdit = text::Edit; + +/// Handles soft wrapping of text. +/// +/// See the [`display_map` module documentation](crate::display_map) for more information. +pub struct WrapMap { + snapshot: WrapSnapshot, + pending_edits: VecDeque<(TabSnapshot, Vec)>, + interpolated_edits: Patch, + edits_since_sync: Patch, + wrap_width: Option, + background_task: Option>, + font_with_size: (Font, Pixels), +} + +#[derive(Clone)] +pub struct WrapSnapshot { + tab_snapshot: TabSnapshot, + transforms: SumTree, + interpolated: bool, +} + +#[derive(Clone, Debug, Default, Eq, PartialEq)] +struct Transform { + summary: TransformSummary, + display_text: Option<&'static str>, +} + +#[derive(Clone, Debug, Default, Eq, PartialEq)] +struct TransformSummary { + input: TextSummary, + output: TextSummary, +} + +#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] +pub struct WrapPoint(pub Point); + +pub struct WrapChunks<'a> { + input_chunks: tab_map::TabChunks<'a>, + input_chunk: Chunk<'a>, + output_position: WrapPoint, + max_output_row: u32, + transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, +} + +#[derive(Clone)] +pub struct WrapBufferRows<'a> { + input_buffer_rows: FoldBufferRows<'a>, + input_buffer_row: Option, + output_row: u32, + soft_wrapped: bool, + max_output_row: u32, + transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, +} + +impl WrapMap { + pub fn new( + tab_snapshot: TabSnapshot, + font: Font, + font_size: Pixels, + wrap_width: Option, + cx: &mut AppContext, + ) -> (Model, WrapSnapshot) { + let handle = cx.new_model(|cx| { + let mut this = Self { + font_with_size: (font, font_size), + wrap_width: None, + pending_edits: Default::default(), + interpolated_edits: Default::default(), + edits_since_sync: Default::default(), + snapshot: WrapSnapshot::new(tab_snapshot), + background_task: None, + }; + this.set_wrap_width(wrap_width, cx); + mem::take(&mut this.edits_since_sync); + this + }); + let snapshot = handle.read(cx).snapshot.clone(); + (handle, snapshot) + } + + #[cfg(test)] + pub fn is_rewrapping(&self) -> bool { + self.background_task.is_some() + } + + pub fn sync( + &mut self, + tab_snapshot: TabSnapshot, + edits: Vec, + cx: &mut ModelContext, + ) -> (WrapSnapshot, Patch) { + if self.wrap_width.is_some() { + self.pending_edits.push_back((tab_snapshot, edits)); + self.flush_edits(cx); + } else { + self.edits_since_sync = self + .edits_since_sync + .compose(&self.snapshot.interpolate(tab_snapshot, &edits)); + self.snapshot.interpolated = false; + } + + (self.snapshot.clone(), mem::take(&mut self.edits_since_sync)) + } + + pub fn set_font_with_size( + &mut self, + font: Font, + font_size: Pixels, + cx: &mut ModelContext, + ) -> bool { + let font_with_size = (font, font_size); + + if font_with_size == self.font_with_size { + false + } else { + self.font_with_size = font_with_size; + self.rewrap(cx); + true + } + } + + pub fn set_wrap_width( + &mut self, + wrap_width: Option, + cx: &mut ModelContext, + ) -> bool { + if wrap_width == self.wrap_width { + return false; + } + + self.wrap_width = wrap_width; + self.rewrap(cx); + true + } + + fn rewrap(&mut self, cx: &mut ModelContext) { + self.background_task.take(); + self.interpolated_edits.clear(); + self.pending_edits.clear(); + + if let Some(wrap_width) = self.wrap_width { + let mut new_snapshot = self.snapshot.clone(); + + let text_system = cx.text_system().clone(); + let (font, font_size) = self.font_with_size.clone(); + let task = cx.background_executor().spawn(async move { + let mut line_wrapper = text_system.line_wrapper(font, font_size); + let tab_snapshot = new_snapshot.tab_snapshot.clone(); + let range = TabPoint::zero()..tab_snapshot.max_point(); + let edits = new_snapshot + .update( + tab_snapshot, + &[TabEdit { + old: range.clone(), + new: range.clone(), + }], + wrap_width, + &mut line_wrapper, + ) + .await; + (new_snapshot, edits) + }); + + match cx + .background_executor() + .block_with_timeout(Duration::from_millis(5), task) + { + Ok((snapshot, edits)) => { + self.snapshot = snapshot; + self.edits_since_sync = self.edits_since_sync.compose(&edits); + } + Err(wrap_task) => { + self.background_task = Some(cx.spawn(|this, mut cx| async move { + let (snapshot, edits) = wrap_task.await; + this.update(&mut cx, |this, cx| { + this.snapshot = snapshot; + this.edits_since_sync = this + .edits_since_sync + .compose(mem::take(&mut this.interpolated_edits).invert()) + .compose(&edits); + this.background_task = None; + this.flush_edits(cx); + cx.notify(); + }) + .ok(); + })); + } + } + } else { + let old_rows = self.snapshot.transforms.summary().output.lines.row + 1; + self.snapshot.transforms = SumTree::new(); + let summary = self.snapshot.tab_snapshot.text_summary(); + if !summary.lines.is_zero() { + self.snapshot + .transforms + .push(Transform::isomorphic(summary), &()); + } + let new_rows = self.snapshot.transforms.summary().output.lines.row + 1; + self.snapshot.interpolated = false; + self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![WrapEdit { + old: 0..old_rows, + new: 0..new_rows, + }])); + } + } + + fn flush_edits(&mut self, cx: &mut ModelContext) { + if !self.snapshot.interpolated { + let mut to_remove_len = 0; + for (tab_snapshot, _) in &self.pending_edits { + if tab_snapshot.version <= self.snapshot.tab_snapshot.version { + to_remove_len += 1; + } else { + break; + } + } + self.pending_edits.drain(..to_remove_len); + } + + if self.pending_edits.is_empty() { + return; + } + + if let Some(wrap_width) = self.wrap_width { + if self.background_task.is_none() { + let pending_edits = self.pending_edits.clone(); + let mut snapshot = self.snapshot.clone(); + let text_system = cx.text_system().clone(); + let (font, font_size) = self.font_with_size.clone(); + let update_task = cx.background_executor().spawn(async move { + let mut edits = Patch::default(); + let mut line_wrapper = text_system.line_wrapper(font, font_size); + for (tab_snapshot, tab_edits) in pending_edits { + let wrap_edits = snapshot + .update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper) + .await; + edits = edits.compose(&wrap_edits); + } + (snapshot, edits) + }); + + match cx + .background_executor() + .block_with_timeout(Duration::from_millis(1), update_task) + { + Ok((snapshot, output_edits)) => { + self.snapshot = snapshot; + self.edits_since_sync = self.edits_since_sync.compose(&output_edits); + } + Err(update_task) => { + self.background_task = Some(cx.spawn(|this, mut cx| async move { + let (snapshot, edits) = update_task.await; + this.update(&mut cx, |this, cx| { + this.snapshot = snapshot; + this.edits_since_sync = this + .edits_since_sync + .compose(mem::take(&mut this.interpolated_edits).invert()) + .compose(&edits); + this.background_task = None; + this.flush_edits(cx); + cx.notify(); + }) + .ok(); + })); + } + } + } + } + + let was_interpolated = self.snapshot.interpolated; + let mut to_remove_len = 0; + for (tab_snapshot, edits) in &self.pending_edits { + if tab_snapshot.version <= self.snapshot.tab_snapshot.version { + to_remove_len += 1; + } else { + let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits); + self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits); + self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits); + } + } + + if !was_interpolated { + self.pending_edits.drain(..to_remove_len); + } + } +} + +impl WrapSnapshot { + fn new(tab_snapshot: TabSnapshot) -> Self { + let mut transforms = SumTree::new(); + let extent = tab_snapshot.text_summary(); + if !extent.lines.is_zero() { + transforms.push(Transform::isomorphic(extent), &()); + } + Self { + transforms, + tab_snapshot, + interpolated: true, + } + } + + pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { + self.tab_snapshot.buffer_snapshot() + } + + fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch { + let mut new_transforms; + if tab_edits.is_empty() { + new_transforms = self.transforms.clone(); + } else { + let mut old_cursor = self.transforms.cursor::(); + + let mut tab_edits_iter = tab_edits.iter().peekable(); + new_transforms = + old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &()); + + while let Some(edit) = tab_edits_iter.next() { + if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) { + let summary = new_tab_snapshot.text_summary_for_range( + TabPoint::from(new_transforms.summary().input.lines)..edit.new.start, + ); + new_transforms.push_or_extend(Transform::isomorphic(summary)); + } + + if !edit.new.is_empty() { + new_transforms.push_or_extend(Transform::isomorphic( + new_tab_snapshot.text_summary_for_range(edit.new.clone()), + )); + } + + old_cursor.seek_forward(&edit.old.end, Bias::Right, &()); + if let Some(next_edit) = tab_edits_iter.peek() { + if next_edit.old.start > old_cursor.end(&()) { + if old_cursor.end(&()) > edit.old.end { + let summary = self + .tab_snapshot + .text_summary_for_range(edit.old.end..old_cursor.end(&())); + new_transforms.push_or_extend(Transform::isomorphic(summary)); + } + + old_cursor.next(&()); + new_transforms.append( + old_cursor.slice(&next_edit.old.start, Bias::Right, &()), + &(), + ); + } + } else { + if old_cursor.end(&()) > edit.old.end { + let summary = self + .tab_snapshot + .text_summary_for_range(edit.old.end..old_cursor.end(&())); + new_transforms.push_or_extend(Transform::isomorphic(summary)); + } + old_cursor.next(&()); + new_transforms.append(old_cursor.suffix(&()), &()); + } + } + } + + let old_snapshot = mem::replace( + self, + WrapSnapshot { + tab_snapshot: new_tab_snapshot, + transforms: new_transforms, + interpolated: true, + }, + ); + self.check_invariants(); + old_snapshot.compute_edits(tab_edits, self) + } + + async fn update( + &mut self, + new_tab_snapshot: TabSnapshot, + tab_edits: &[TabEdit], + wrap_width: Pixels, + line_wrapper: &mut LineWrapper, + ) -> Patch { + #[derive(Debug)] + struct RowEdit { + old_rows: Range, + new_rows: Range, + } + + let mut tab_edits_iter = tab_edits.iter().peekable(); + let mut row_edits = Vec::new(); + while let Some(edit) = tab_edits_iter.next() { + let mut row_edit = RowEdit { + old_rows: edit.old.start.row()..edit.old.end.row() + 1, + new_rows: edit.new.start.row()..edit.new.end.row() + 1, + }; + + while let Some(next_edit) = tab_edits_iter.peek() { + if next_edit.old.start.row() <= row_edit.old_rows.end { + row_edit.old_rows.end = next_edit.old.end.row() + 1; + row_edit.new_rows.end = next_edit.new.end.row() + 1; + tab_edits_iter.next(); + } else { + break; + } + } + + row_edits.push(row_edit); + } + + let mut new_transforms; + if row_edits.is_empty() { + new_transforms = self.transforms.clone(); + } else { + let mut row_edits = row_edits.into_iter().peekable(); + let mut old_cursor = self.transforms.cursor::(); + + new_transforms = old_cursor.slice( + &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), + Bias::Right, + &(), + ); + + while let Some(edit) = row_edits.next() { + if edit.new_rows.start > new_transforms.summary().input.lines.row { + let summary = new_tab_snapshot.text_summary_for_range( + TabPoint(new_transforms.summary().input.lines) + ..TabPoint::new(edit.new_rows.start, 0), + ); + new_transforms.push_or_extend(Transform::isomorphic(summary)); + } + + let mut line = String::new(); + let mut remaining = None; + let mut chunks = new_tab_snapshot.chunks( + TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(), + false, + Highlights::default(), + ); + let mut edit_transforms = Vec::::new(); + for _ in edit.new_rows.start..edit.new_rows.end { + while let Some(chunk) = + remaining.take().or_else(|| chunks.next().map(|c| c.text)) + { + if let Some(ix) = chunk.find('\n') { + line.push_str(&chunk[..ix + 1]); + remaining = Some(&chunk[ix + 1..]); + break; + } else { + line.push_str(chunk) + } + } + + if line.is_empty() { + break; + } + + let mut prev_boundary_ix = 0; + for boundary in line_wrapper.wrap_line(&line, wrap_width) { + let wrapped = &line[prev_boundary_ix..boundary.ix]; + push_isomorphic(&mut edit_transforms, TextSummary::from(wrapped)); + edit_transforms.push(Transform::wrap(boundary.next_indent)); + prev_boundary_ix = boundary.ix; + } + + if prev_boundary_ix < line.len() { + push_isomorphic( + &mut edit_transforms, + TextSummary::from(&line[prev_boundary_ix..]), + ); + } + + line.clear(); + yield_now().await; + } + + let mut edit_transforms = edit_transforms.into_iter(); + if let Some(transform) = edit_transforms.next() { + new_transforms.push_or_extend(transform); + } + new_transforms.extend(edit_transforms, &()); + + old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &()); + if let Some(next_edit) = row_edits.peek() { + if next_edit.old_rows.start > old_cursor.end(&()).row() { + if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { + let summary = self.tab_snapshot.text_summary_for_range( + TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), + ); + new_transforms.push_or_extend(Transform::isomorphic(summary)); + } + old_cursor.next(&()); + new_transforms.append( + old_cursor.slice( + &TabPoint::new(next_edit.old_rows.start, 0), + Bias::Right, + &(), + ), + &(), + ); + } + } else { + if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { + let summary = self.tab_snapshot.text_summary_for_range( + TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), + ); + new_transforms.push_or_extend(Transform::isomorphic(summary)); + } + old_cursor.next(&()); + new_transforms.append(old_cursor.suffix(&()), &()); + } + } + } + + let old_snapshot = mem::replace( + self, + WrapSnapshot { + tab_snapshot: new_tab_snapshot, + transforms: new_transforms, + interpolated: false, + }, + ); + self.check_invariants(); + old_snapshot.compute_edits(tab_edits, self) + } + + fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch { + let mut wrap_edits = Vec::new(); + let mut old_cursor = self.transforms.cursor::(); + let mut new_cursor = new_snapshot.transforms.cursor::(); + for mut tab_edit in tab_edits.iter().cloned() { + tab_edit.old.start.0.column = 0; + tab_edit.old.end.0 += Point::new(1, 0); + tab_edit.new.start.0.column = 0; + tab_edit.new.end.0 += Point::new(1, 0); + + old_cursor.seek(&tab_edit.old.start, Bias::Right, &()); + let mut old_start = old_cursor.start().output.lines; + old_start += tab_edit.old.start.0 - old_cursor.start().input.lines; + + old_cursor.seek(&tab_edit.old.end, Bias::Right, &()); + let mut old_end = old_cursor.start().output.lines; + old_end += tab_edit.old.end.0 - old_cursor.start().input.lines; + + new_cursor.seek(&tab_edit.new.start, Bias::Right, &()); + let mut new_start = new_cursor.start().output.lines; + new_start += tab_edit.new.start.0 - new_cursor.start().input.lines; + + new_cursor.seek(&tab_edit.new.end, Bias::Right, &()); + let mut new_end = new_cursor.start().output.lines; + new_end += tab_edit.new.end.0 - new_cursor.start().input.lines; + + wrap_edits.push(WrapEdit { + old: old_start.row..old_end.row, + new: new_start.row..new_end.row, + }); + } + + consolidate_wrap_edits(&mut wrap_edits); + Patch::new(wrap_edits) + } + + pub(crate) fn chunks<'a>( + &'a self, + rows: Range, + language_aware: bool, + highlights: Highlights<'a>, + ) -> WrapChunks<'a> { + let output_start = WrapPoint::new(rows.start, 0); + let output_end = WrapPoint::new(rows.end, 0); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + transforms.seek(&output_start, Bias::Right, &()); + let mut input_start = TabPoint(transforms.start().1 .0); + if transforms.item().map_or(false, |t| t.is_isomorphic()) { + input_start.0 += output_start.0 - transforms.start().0 .0; + } + let input_end = self + .to_tab_point(output_end) + .min(self.tab_snapshot.max_point()); + WrapChunks { + input_chunks: self.tab_snapshot.chunks( + input_start..input_end, + language_aware, + highlights, + ), + input_chunk: Default::default(), + output_position: output_start, + max_output_row: rows.end, + transforms, + } + } + + pub fn max_point(&self) -> WrapPoint { + WrapPoint(self.transforms.summary().output.lines) + } + + pub fn line_len(&self, row: u32) -> u32 { + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); + if cursor + .item() + .map_or(false, |transform| transform.is_isomorphic()) + { + let overshoot = row - cursor.start().0.row(); + let tab_row = cursor.start().1.row() + overshoot; + let tab_line_len = self.tab_snapshot.line_len(tab_row); + if overshoot == 0 { + cursor.start().0.column() + (tab_line_len - cursor.start().1.column()) + } else { + tab_line_len + } + } else { + cursor.start().0.column() + } + } + + pub fn soft_wrap_indent(&self, row: u32) -> Option { + let mut cursor = self.transforms.cursor::(); + cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); + cursor.item().and_then(|transform| { + if transform.is_isomorphic() { + None + } else { + Some(transform.summary.output.lines.column) + } + }) + } + + pub fn longest_row(&self) -> u32 { + self.transforms.summary().output.longest_row + } + + pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); + let mut input_row = transforms.start().1.row(); + if transforms.item().map_or(false, |t| t.is_isomorphic()) { + input_row += start_row - transforms.start().0.row(); + } + let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic()); + let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row); + let input_buffer_row = input_buffer_rows.next().unwrap(); + WrapBufferRows { + transforms, + input_buffer_row, + input_buffer_rows, + output_row: start_row, + soft_wrapped, + max_output_row: self.max_point().row(), + } + } + + pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + cursor.seek(&point, Bias::Right, &()); + let mut tab_point = cursor.start().1 .0; + if cursor.item().map_or(false, |t| t.is_isomorphic()) { + tab_point += point.0 - cursor.start().0 .0; + } + TabPoint(tab_point) + } + + pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point { + self.tab_snapshot.to_point(self.to_tab_point(point), bias) + } + + pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint { + self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias)) + } + + pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { + let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(); + cursor.seek(&point, Bias::Right, &()); + WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)) + } + + pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { + if bias == Bias::Left { + let mut cursor = self.transforms.cursor::(); + cursor.seek(&point, Bias::Right, &()); + if cursor.item().map_or(false, |t| !t.is_isomorphic()) { + point = *cursor.start(); + *point.column_mut() -= 1; + } + } + + self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias)) + } + + pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 { + if self.transforms.is_empty() { + return 0; + } + + *point.column_mut() = 0; + + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + cursor.seek(&point, Bias::Right, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + + while let Some(transform) = cursor.item() { + if transform.is_isomorphic() && cursor.start().1.column() == 0 { + return cmp::min(cursor.end(&()).0.row(), point.row()); + } else { + cursor.prev(&()); + } + } + + unreachable!() + } + + pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { + point.0 += Point::new(1, 0); + + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + cursor.seek(&point, Bias::Right, &()); + while let Some(transform) = cursor.item() { + if transform.is_isomorphic() && cursor.start().1.column() == 0 { + return Some(cmp::max(cursor.start().0.row(), point.row())); + } else { + cursor.next(&()); + } + } + + None + } + + fn check_invariants(&self) { + #[cfg(test)] + { + assert_eq!( + TabPoint::from(self.transforms.summary().input.lines), + self.tab_snapshot.max_point() + ); + + { + let mut transforms = self.transforms.cursor::<()>().peekable(); + while let Some(transform) = transforms.next() { + if let Some(next_transform) = transforms.peek() { + assert!(transform.is_isomorphic() != next_transform.is_isomorphic()); + } + } + } + + let text = language::Rope::from(self.text().as_str()); + let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0); + let mut expected_buffer_rows = Vec::new(); + let mut prev_tab_row = 0; + for display_row in 0..=self.max_point().row() { + let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0)); + if tab_point.row() == prev_tab_row && display_row != 0 { + expected_buffer_rows.push(None); + } else { + expected_buffer_rows.push(input_buffer_rows.next().unwrap()); + } + + prev_tab_row = tab_point.row(); + assert_eq!(self.line_len(display_row), text.line_len(display_row)); + } + + for start_display_row in 0..expected_buffer_rows.len() { + assert_eq!( + self.buffer_rows(start_display_row as u32) + .collect::>(), + &expected_buffer_rows[start_display_row..], + "invalid buffer_rows({}..)", + start_display_row + ); + } + } + } +} + +impl<'a> Iterator for WrapChunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.output_position.row() >= self.max_output_row { + return None; + } + + let transform = self.transforms.item()?; + if let Some(display_text) = transform.display_text { + let mut start_ix = 0; + let mut end_ix = display_text.len(); + let mut summary = transform.summary.output.lines; + + if self.output_position > self.transforms.start().0 { + // Exclude newline starting prior to the desired row. + start_ix = 1; + summary.row = 0; + } else if self.output_position.row() + 1 >= self.max_output_row { + // Exclude soft indentation ending after the desired row. + end_ix = 1; + summary.column = 0; + } + + self.output_position.0 += summary; + self.transforms.next(&()); + return Some(Chunk { + text: &display_text[start_ix..end_ix], + ..self.input_chunk + }); + } + + if self.input_chunk.text.is_empty() { + self.input_chunk = self.input_chunks.next().unwrap(); + } + + let mut input_len = 0; + let transform_end = self.transforms.end(&()).0; + for c in self.input_chunk.text.chars() { + let char_len = c.len_utf8(); + input_len += char_len; + if c == '\n' { + *self.output_position.row_mut() += 1; + *self.output_position.column_mut() = 0; + } else { + *self.output_position.column_mut() += char_len as u32; + } + + if self.output_position >= transform_end { + self.transforms.next(&()); + break; + } + } + + let (prefix, suffix) = self.input_chunk.text.split_at(input_len); + self.input_chunk.text = suffix; + Some(Chunk { + text: prefix, + ..self.input_chunk + }) + } +} + +impl<'a> Iterator for WrapBufferRows<'a> { + type Item = Option; + + fn next(&mut self) -> Option { + if self.output_row > self.max_output_row { + return None; + } + + let buffer_row = self.input_buffer_row; + let soft_wrapped = self.soft_wrapped; + + self.output_row += 1; + self.transforms + .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left, &()); + if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { + self.input_buffer_row = self.input_buffer_rows.next().unwrap(); + self.soft_wrapped = false; + } else { + self.soft_wrapped = true; + } + + Some(if soft_wrapped { None } else { buffer_row }) + } +} + +impl Transform { + fn isomorphic(summary: TextSummary) -> Self { + #[cfg(test)] + assert!(!summary.lines.is_zero()); + + Self { + summary: TransformSummary { + input: summary.clone(), + output: summary, + }, + display_text: None, + } + } + + fn wrap(indent: u32) -> Self { + lazy_static! { + static ref WRAP_TEXT: String = { + let mut wrap_text = String::new(); + wrap_text.push('\n'); + wrap_text.extend((0..LineWrapper::MAX_INDENT as usize).map(|_| ' ')); + wrap_text + }; + } + + Self { + summary: TransformSummary { + input: TextSummary::default(), + output: TextSummary { + lines: Point::new(1, indent), + first_line_chars: 0, + last_line_chars: indent, + longest_row: 1, + longest_row_chars: indent, + }, + }, + display_text: Some(&WRAP_TEXT[..1 + indent as usize]), + } + } + + fn is_isomorphic(&self) -> bool { + self.display_text.is_none() + } +} + +impl sum_tree::Item for Transform { + type Summary = TransformSummary; + + fn summary(&self) -> Self::Summary { + self.summary.clone() + } +} + +fn push_isomorphic(transforms: &mut Vec, summary: TextSummary) { + if let Some(last_transform) = transforms.last_mut() { + if last_transform.is_isomorphic() { + last_transform.summary.input += &summary; + last_transform.summary.output += &summary; + return; + } + } + transforms.push(Transform::isomorphic(summary)); +} + +trait SumTreeExt { + fn push_or_extend(&mut self, transform: Transform); +} + +impl SumTreeExt for SumTree { + fn push_or_extend(&mut self, transform: Transform) { + let mut transform = Some(transform); + self.update_last( + |last_transform| { + if last_transform.is_isomorphic() && transform.as_ref().unwrap().is_isomorphic() { + let transform = transform.take().unwrap(); + last_transform.summary.input += &transform.summary.input; + last_transform.summary.output += &transform.summary.output; + } + }, + &(), + ); + + if let Some(transform) = transform { + self.push(transform, &()); + } + } +} + +impl WrapPoint { + pub fn new(row: u32, column: u32) -> Self { + Self(Point::new(row, column)) + } + + pub fn row(self) -> u32 { + self.0.row + } + + pub fn row_mut(&mut self) -> &mut u32 { + &mut self.0.row + } + + pub fn column(self) -> u32 { + self.0.column + } + + pub fn column_mut(&mut self) -> &mut u32 { + &mut self.0.column + } +} + +impl sum_tree::Summary for TransformSummary { + type Context = (); + + fn add_summary(&mut self, other: &Self, _: &()) { + self.input += &other.input; + self.output += &other.output; + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += summary.input.lines; + } +} + +impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoint { + fn cmp(&self, cursor_location: &TransformSummary, _: &()) -> std::cmp::Ordering { + Ord::cmp(&self.0, &cursor_location.input.lines) + } +} + +impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { + self.0 += summary.output.lines; + } +} + +fn consolidate_wrap_edits(edits: &mut Vec) { + let mut i = 1; + while i < edits.len() { + let edit = edits[i].clone(); + let prev_edit = &mut edits[i - 1]; + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = edit.old.end; + prev_edit.new.end = edit.new.end; + edits.remove(i); + continue; + } + i += 1; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap}, + MultiBuffer, + }; + use gpui::{font, px, test::observe}; + use rand::prelude::*; + use settings::SettingsStore; + use smol::stream::StreamExt; + use std::{cmp, env, num::NonZeroU32}; + use text::Rope; + use theme::LoadThemes; + + #[gpui::test(iterations = 100)] + async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) { + // todo this test is flaky + init_test(cx); + + cx.background_executor.set_block_on_ticks(0..=50); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let text_system = cx.read(|cx| cx.text_system().clone()); + let mut wrap_width = if rng.gen_bool(0.1) { + None + } else { + Some(px(rng.gen_range(0.0..=1000.0))) + }; + let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); + let font = font("Helvetica"); + let _font_id = text_system.font_id(&font); + let font_size = px(14.0); + + log::info!("Tab size: {}", tab_size); + log::info!("Wrap width: {:?}", wrap_width); + + let buffer = cx.update(|cx| { + if rng.gen() { + MultiBuffer::build_random(&mut rng, cx) + } else { + let len = rng.gen_range(0..10); + let text = util::RandomCharIter::new(&mut rng) + .take(len) + .collect::(); + MultiBuffer::build_simple(&text, cx) + } + }); + let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + log::info!("Buffer text: {:?}", buffer_snapshot.text()); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + log::info!("InlayMap text: {:?}", inlay_snapshot.text()); + let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone()); + log::info!("FoldMap text: {:?}", fold_snapshot.text()); + let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size); + let tabs_snapshot = tab_map.set_max_expansion_column(32); + log::info!("TabMap text: {:?}", tabs_snapshot.text()); + + let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size); + let unwrapped_text = tabs_snapshot.text(); + let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper); + + let (wrap_map, _) = + cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font, font_size, wrap_width, cx)); + let mut notifications = observe(&wrap_map, cx); + + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { + notifications.next().await.unwrap(); + } + + let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| { + assert!(!map.is_rewrapping()); + map.sync(tabs_snapshot.clone(), Vec::new(), cx) + }); + + let actual_text = initial_snapshot.text(); + assert_eq!( + actual_text, expected_text, + "unwrapped text is: {:?}", + unwrapped_text + ); + log::info!("Wrapped text: {:?}", actual_text); + + let mut next_inlay_id = 0; + let mut edits = Vec::new(); + for _i in 0..operations { + log::info!("{} ==============================================", _i); + + let mut buffer_edits = Vec::new(); + match rng.gen_range(0..=100) { + 0..=19 => { + wrap_width = if rng.gen_bool(0.2) { + None + } else { + Some(px(rng.gen_range(0.0..=1000.0))) + }; + log::info!("Setting wrap width to {:?}", wrap_width); + wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); + } + 20..=39 => { + for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) { + let (tabs_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (mut snapshot, wrap_edits) = + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); + snapshot.check_invariants(); + snapshot.verify_chunks(&mut rng); + edits.push((snapshot, wrap_edits)); + } + } + 40..=59 => { + let (inlay_snapshot, inlay_edits) = + inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tabs_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (mut snapshot, wrap_edits) = + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); + snapshot.check_invariants(); + snapshot.verify_chunks(&mut rng); + edits.push((snapshot, wrap_edits)); + } + _ => { + buffer.update(cx, |buffer, cx| { + let subscription = buffer.subscribe(); + let edit_count = rng.gen_range(1..=5); + buffer.randomly_mutate(&mut rng, edit_count, cx); + buffer_snapshot = buffer.snapshot(cx); + buffer_edits.extend(subscription.consume()); + }); + } + } + + log::info!("Buffer text: {:?}", buffer_snapshot.text()); + let (inlay_snapshot, inlay_edits) = + inlay_map.sync(buffer_snapshot.clone(), buffer_edits); + log::info!("InlayMap text: {:?}", inlay_snapshot.text()); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + log::info!("FoldMap text: {:?}", fold_snapshot.text()); + let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); + log::info!("TabMap text: {:?}", tabs_snapshot.text()); + + let unwrapped_text = tabs_snapshot.text(); + let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper); + let (mut snapshot, wrap_edits) = + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx)); + snapshot.check_invariants(); + snapshot.verify_chunks(&mut rng); + edits.push((snapshot, wrap_edits)); + + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) { + log::info!("Waiting for wrapping to finish"); + while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { + notifications.next().await.unwrap(); + } + wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty())); + } + + if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { + let (mut wrapped_snapshot, wrap_edits) = + wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx)); + let actual_text = wrapped_snapshot.text(); + let actual_longest_row = wrapped_snapshot.longest_row(); + log::info!("Wrapping finished: {:?}", actual_text); + wrapped_snapshot.check_invariants(); + wrapped_snapshot.verify_chunks(&mut rng); + edits.push((wrapped_snapshot.clone(), wrap_edits)); + assert_eq!( + actual_text, expected_text, + "unwrapped text is: {:?}", + unwrapped_text + ); + + let mut summary = TextSummary::default(); + for (ix, item) in wrapped_snapshot + .transforms + .items(&()) + .into_iter() + .enumerate() + { + summary += &item.summary.output; + log::info!("{} summary: {:?}", ix, item.summary.output,); + } + + if tab_size.get() == 1 + || !wrapped_snapshot + .tab_snapshot + .fold_snapshot + .text() + .contains('\t') + { + let mut expected_longest_rows = Vec::new(); + let mut longest_line_len = -1; + for (row, line) in expected_text.split('\n').enumerate() { + let line_char_count = line.chars().count() as isize; + if line_char_count > longest_line_len { + expected_longest_rows.clear(); + longest_line_len = line_char_count; + } + if line_char_count >= longest_line_len { + expected_longest_rows.push(row as u32); + } + } + + assert!( + expected_longest_rows.contains(&actual_longest_row), + "incorrect longest row {}. expected {:?} with length {}", + actual_longest_row, + expected_longest_rows, + longest_line_len, + ) + } + } + } + + let mut initial_text = Rope::from(initial_snapshot.text().as_str()); + for (snapshot, patch) in edits { + let snapshot_text = Rope::from(snapshot.text().as_str()); + for edit in &patch { + let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0)); + let old_end = initial_text.point_to_offset(cmp::min( + Point::new(edit.new.start + edit.old.len() as u32, 0), + initial_text.max_point(), + )); + let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0)); + let new_end = snapshot_text.point_to_offset(cmp::min( + Point::new(edit.new.end, 0), + snapshot_text.max_point(), + )); + let new_text = snapshot_text + .chunks_in_range(new_start..new_end) + .collect::(); + + initial_text.replace(old_start..old_end, &new_text); + } + assert_eq!(initial_text.to_string(), snapshot_text.to_string()); + } + + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { + log::info!("Waiting for wrapping to finish"); + while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { + notifications.next().await.unwrap(); + } + } + wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty())); + } + + fn init_test(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + let settings = SettingsStore::test(cx); + cx.set_global(settings); + theme::init(LoadThemes::JustBase, cx); + }); + } + + fn wrap_text( + unwrapped_text: &str, + wrap_width: Option, + line_wrapper: &mut LineWrapper, + ) -> String { + if let Some(wrap_width) = wrap_width { + let mut wrapped_text = String::new(); + for (row, line) in unwrapped_text.split('\n').enumerate() { + if row > 0 { + wrapped_text.push('\n') + } + + let mut prev_ix = 0; + for boundary in line_wrapper.wrap_line(line, wrap_width) { + wrapped_text.push_str(&line[prev_ix..boundary.ix]); + wrapped_text.push('\n'); + wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize)); + prev_ix = boundary.ix; + } + wrapped_text.push_str(&line[prev_ix..]); + } + wrapped_text + } else { + unwrapped_text.to_string() + } + } + + impl WrapSnapshot { + pub fn text(&self) -> String { + self.text_chunks(0).collect() + } + + pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator { + self.chunks( + wrap_row..self.max_point().row() + 1, + false, + Highlights::default(), + ) + .map(|h| h.text) + } + + fn verify_chunks(&mut self, rng: &mut impl Rng) { + for _ in 0..5 { + let mut end_row = rng.gen_range(0..=self.max_point().row()); + let start_row = rng.gen_range(0..=end_row); + end_row += 1; + + let mut expected_text = self.text_chunks(start_row).collect::(); + if expected_text.ends_with('\n') { + expected_text.push('\n'); + } + let mut expected_text = expected_text + .lines() + .take((end_row - start_row) as usize) + .collect::>() + .join("\n"); + if end_row <= self.max_point().row() { + expected_text.push('\n'); + } + + let actual_text = self + .chunks(start_row..end_row, true, Highlights::default()) + .map(|c| c.text) + .collect::(); + assert_eq!( + expected_text, + actual_text, + "chunks != highlighted_chunks for rows {:?}", + start_row..end_row + ); + } + } + } +} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs new file mode 100644 index 0000000..c40807e --- /dev/null +++ b/crates/editor/src/editor.rs @@ -0,0 +1,11854 @@ +#![allow(rustdoc::private_intra_doc_links)] +//! This is the place where everything editor-related is stored (data-wise) and displayed (ui-wise). +//! The main point of interest in this crate is [`Editor`] type, which is used in every other Zed part as a user input element. +//! It comes in different flavors: single line, multiline and a fixed height one. +//! +//! Editor contains of multiple large submodules: +//! * [`element`] — the place where all rendering happens +//! * [`display_map`] - chunks up text in the editor into the logical blocks, establishes coordinates and mapping between each of them. +//! Contains all metadata related to text transformations (folds, fake inlay text insertions, soft wraps, tab markup, etc.). +//! * [`inlay_hint_cache`] - is a storage of inlay hints out of LSP requests, responsible for querying LSP and updating `display_map`'s state accordingly. +//! +//! All other submodules and structs are mostly concerned with holding editor data about the way it displays current buffer region(s). +//! +//! If you're looking to improve Vim mode, you should check out Vim crate that wraps Editor and overrides its behaviour. +pub mod actions; +mod blame_entry_tooltip; +mod blink_manager; +pub mod display_map; +mod editor_settings; +mod element; +mod hunk_diff; +mod inlay_hint_cache; + +mod debounced_delay; +mod git; +mod highlight_matching_bracket; +mod hover_links; +mod hover_popover; +mod inline_completion_provider; +pub mod items; +mod mouse_context_menu; +pub mod movement; +mod persistence; +mod rust_analyzer_ext; +pub mod scroll; +mod selections_collection; +pub mod tasks; + +#[cfg(test)] +mod editor_tests; +#[cfg(any(test, feature = "test-support"))] +pub mod test; +use ::git::diff::{DiffHunk, DiffHunkStatus}; +use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; +pub(crate) use actions::*; +use aho_corasick::AhoCorasick; +use anyhow::{anyhow, Context as _, Result}; +use blink_manager::BlinkManager; +use client::{Collaborator, ParticipantIndex}; +use clock::ReplicaId; +use collections::{BTreeMap, Bound, HashMap, HashSet, VecDeque}; +use convert_case::{Case, Casing}; +use debounced_delay::DebouncedDelay; +pub use display_map::DisplayPoint; +use display_map::*; +use editor_settings::CurrentLineHighlight; +pub use editor_settings::EditorSettings; +use element::LineWithInvisibles; +pub use element::{ + CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, +}; +use futures::FutureExt; +use fuzzy::{StringMatch, StringMatchCandidate}; +use git::blame::GitBlame; +use git::diff_hunk_to_display; +use gpui::{ + div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement, + AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardItem, + Context, DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusableView, FontId, FontStyle, + FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, Model, MouseButton, PaintQuad, + ParentElement, Pixels, Render, SharedString, Size, StrikethroughStyle, Styled, StyledText, + Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle, View, ViewContext, + ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext, +}; +use highlight_matching_bracket::refresh_matching_bracket_highlights; +use hover_popover::{hide_hover, HoverState}; +use hunk_diff::ExpandedHunks; +pub(crate) use hunk_diff::HunkToExpand; +use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; +pub use inline_completion_provider::*; +pub use items::MAX_TAB_TITLE_LEN; +use itertools::Itertools; +use language::{ + char_kind, + language_settings::{self, all_language_settings, InlayHintSettings}, + markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CharKind, CodeLabel, + CursorShape, Diagnostic, Documentation, IndentKind, IndentSize, Language, OffsetRangeExt, + Point, Selection, SelectionGoal, TransactionId, +}; +use language::{BufferRow, Runnable, RunnableRange}; +use task::{ResolvedTask, TaskTemplate, TaskVariables}; + +use hover_links::{HoverLink, HoveredLinkState, InlayHighlight}; +use lsp::{DiagnosticSeverity, LanguageServerId}; +use mouse_context_menu::MouseContextMenu; +use movement::TextLayoutDetails; +pub use multi_buffer::{ + Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset, + ToPoint, +}; +use multi_buffer::{MultiBufferPoint, MultiBufferRow, ToOffsetUtf16}; +use ordered_float::OrderedFloat; +use parking_lot::{Mutex, RwLock}; +use project::project_settings::{GitGutterSetting, ProjectSettings}; +use project::{ + CodeAction, Completion, FormatTrigger, Item, Location, Project, ProjectPath, + ProjectTransaction, TaskSourceKind, WorktreeId, +}; +use rand::prelude::*; +use rpc::{proto::*, ErrorExt}; +use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide}; +use selections_collection::{resolve_multiple, MutableSelectionsCollection, SelectionsCollection}; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsStore}; +use smallvec::SmallVec; +use snippet::Snippet; +use std::ops::Not as _; +use std::{ + any::TypeId, + borrow::Cow, + cmp::{self, Ordering, Reverse}, + mem, + num::NonZeroU32, + ops::{ControlFlow, Deref, DerefMut, Range, RangeInclusive}, + path::Path, + sync::Arc, + time::{Duration, Instant}, +}; +pub use sum_tree::Bias; +use sum_tree::TreeMap; +use text::{BufferId, OffsetUtf16, Rope}; +use theme::{ + observe_buffer_font_size_adjustment, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, + ThemeColors, ThemeSettings, +}; +use ui::{ + h_flex, prelude::*, ButtonSize, ButtonStyle, IconButton, IconName, IconSize, ListItem, Popover, + Tooltip, +}; +use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt}; +use workspace::item::{ItemHandle, PreviewTabsSettings}; +use workspace::notifications::{DetachAndPromptErr, NotificationId}; +use workspace::{ + searchable::SearchEvent, ItemNavHistory, SplitDirection, ViewId, Workspace, WorkspaceId, +}; +use workspace::{OpenInTerminal, OpenTerminal, Toast}; + +use crate::hover_links::find_url; + +pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2; +const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); +const MAX_LINE_LEN: usize = 1024; +const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10; +const MAX_SELECTION_HISTORY_LEN: usize = 1024; +pub(crate) const CURSORS_VISIBLE_FOR: Duration = Duration::from_millis(2000); +#[doc(hidden)] +pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250); +#[doc(hidden)] +pub const DOCUMENT_HIGHLIGHTS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); + +pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(2); + +pub fn render_parsed_markdown( + element_id: impl Into, + parsed: &language::ParsedMarkdown, + editor_style: &EditorStyle, + workspace: Option>, + cx: &mut WindowContext, +) -> InteractiveText { + let code_span_background_color = cx + .theme() + .colors() + .editor_document_highlight_read_background; + + let highlights = gpui::combine_highlights( + parsed.highlights.iter().filter_map(|(range, highlight)| { + let highlight = highlight.to_highlight_style(&editor_style.syntax)?; + Some((range.clone(), highlight)) + }), + parsed + .regions + .iter() + .zip(&parsed.region_ranges) + .filter_map(|(region, range)| { + if region.code { + Some(( + range.clone(), + HighlightStyle { + background_color: Some(code_span_background_color), + ..Default::default() + }, + )) + } else { + None + } + }), + ); + + let mut links = Vec::new(); + let mut link_ranges = Vec::new(); + for (range, region) in parsed.region_ranges.iter().zip(&parsed.regions) { + if let Some(link) = region.link.clone() { + links.push(link); + link_ranges.push(range.clone()); + } + } + + InteractiveText::new( + element_id, + StyledText::new(parsed.text.clone()).with_highlights(&editor_style.text, highlights), + ) + .on_click(link_ranges, move |clicked_range_ix, cx| { + match &links[clicked_range_ix] { + markdown::Link::Web { url } => cx.open_url(url), + markdown::Link::Path { path } => { + if let Some(workspace) = &workspace { + _ = workspace.update(cx, |workspace, cx| { + workspace.open_abs_path(path.clone(), false, cx).detach(); + }); + } + } + } + }) +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub(crate) enum InlayId { + Suggestion(usize), + Hint(usize), +} + +impl InlayId { + fn id(&self) -> usize { + match self { + Self::Suggestion(id) => *id, + Self::Hint(id) => *id, + } + } +} + +enum DiffRowHighlight {} +enum DocumentHighlightRead {} +enum DocumentHighlightWrite {} +enum InputComposition {} + +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum Direction { + Prev, + Next, +} + +pub fn init_settings(cx: &mut AppContext) { + EditorSettings::register(cx); +} + +pub fn init(cx: &mut AppContext) { + init_settings(cx); + + workspace::register_project_item::(cx); + workspace::register_followable_item::(cx); + workspace::register_deserializable_item::(cx); + cx.observe_new_views( + |workspace: &mut Workspace, _cx: &mut ViewContext| { + workspace.register_action(Editor::new_file); + workspace.register_action(Editor::new_file_in_direction); + }, + ) + .detach(); + + cx.on_action(move |_: &workspace::NewFile, cx| { + let app_state = workspace::AppState::global(cx); + if let Some(app_state) = app_state.upgrade() { + workspace::open_new(app_state, cx, |workspace, cx| { + Editor::new_file(workspace, &Default::default(), cx) + }) + .detach(); + } + }); + cx.on_action(move |_: &workspace::NewWindow, cx| { + let app_state = workspace::AppState::global(cx); + if let Some(app_state) = app_state.upgrade() { + workspace::open_new(app_state, cx, |workspace, cx| { + Editor::new_file(workspace, &Default::default(), cx) + }) + .detach(); + } + }); +} + +pub struct SearchWithinRange; + +trait InvalidationRegion { + fn ranges(&self) -> &[Range]; +} + +#[derive(Clone, Debug, PartialEq)] +pub enum SelectPhase { + Begin { + position: DisplayPoint, + add: bool, + click_count: usize, + }, + BeginColumnar { + position: DisplayPoint, + reset: bool, + goal_column: u32, + }, + Extend { + position: DisplayPoint, + click_count: usize, + }, + Update { + position: DisplayPoint, + goal_column: u32, + scroll_delta: gpui::Point, + }, + End, +} + +#[derive(Clone, Debug)] +pub enum SelectMode { + Character, + Word(Range), + Line(Range), + All, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum EditorMode { + SingleLine, + AutoHeight { max_lines: usize }, + Full, +} + +#[derive(Clone, Debug)] +pub enum SoftWrap { + None, + PreferLine, + EditorWidth, + Column(u32), +} + +#[derive(Clone)] +pub struct EditorStyle { + pub background: Hsla, + pub local_player: PlayerColor, + pub text: TextStyle, + pub scrollbar_width: Pixels, + pub syntax: Arc, + pub status: StatusColors, + pub inlay_hints_style: HighlightStyle, + pub suggestions_style: HighlightStyle, +} + +impl Default for EditorStyle { + fn default() -> Self { + Self { + background: Hsla::default(), + local_player: PlayerColor::default(), + text: TextStyle::default(), + scrollbar_width: Pixels::default(), + syntax: Default::default(), + // HACK: Status colors don't have a real default. + // We should look into removing the status colors from the editor + // style and retrieve them directly from the theme. + status: StatusColors::dark(), + inlay_hints_style: HighlightStyle::default(), + suggestions_style: HighlightStyle::default(), + } + } +} + +type CompletionId = usize; + +// type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor; +// type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option; + +type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Arc<[Range]>); + +struct ScrollbarMarkerState { + scrollbar_size: Size, + dirty: bool, + markers: Arc<[PaintQuad]>, + pending_refresh: Option>>, +} + +impl ScrollbarMarkerState { + fn should_refresh(&self, scrollbar_size: Size) -> bool { + self.pending_refresh.is_none() && (self.scrollbar_size != scrollbar_size || self.dirty) + } +} + +impl Default for ScrollbarMarkerState { + fn default() -> Self { + Self { + scrollbar_size: Size::default(), + dirty: false, + markers: Arc::from([]), + pending_refresh: None, + } + } +} + +#[derive(Clone, Debug)] +struct RunnableTasks { + templates: Vec<(TaskSourceKind, TaskTemplate)>, + // We need the column at which the task context evaluation should take place. + column: u32, + extra_variables: HashMap, +} + +#[derive(Clone)] +struct ResolvedTasks { + templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>, + position: Anchor, +} + +/// Zed's primary text input `View`, allowing users to edit a [`MultiBuffer`] +/// +/// See the [module level documentation](self) for more information. +pub struct Editor { + focus_handle: FocusHandle, + /// The text buffer being edited + buffer: Model, + /// Map of how text in the buffer should be displayed. + /// Handles soft wraps, folds, fake inlay text insertions, etc. + pub display_map: Model, + pub selections: SelectionsCollection, + pub scroll_manager: ScrollManager, + columnar_selection_tail: Option, + add_selections_state: Option, + select_next_state: Option, + select_prev_state: Option, + selection_history: SelectionHistory, + autoclose_regions: Vec, + snippet_stack: InvalidationStack, + select_larger_syntax_node_stack: Vec]>>, + ime_transaction: Option, + active_diagnostics: Option, + soft_wrap_mode_override: Option, + project: Option>, + completion_provider: Option>, + collaboration_hub: Option>, + blink_manager: Model, + show_cursor_names: bool, + hovered_cursors: HashMap>, + pub show_local_selections: bool, + mode: EditorMode, + show_breadcrumbs: bool, + show_gutter: bool, + show_wrap_guides: Option, + placeholder_text: Option>, + highlight_order: usize, + highlighted_rows: HashMap>, + background_highlights: TreeMap, + scrollbar_marker_state: ScrollbarMarkerState, + nav_history: Option, + context_menu: RwLock>, + mouse_context_menu: Option, + completion_tasks: Vec<(CompletionId, Task>)>, + find_all_references_task_sources: Vec, + next_completion_id: CompletionId, + completion_documentation_pre_resolve_debounce: DebouncedDelay, + available_code_actions: Option<(Location, Arc<[CodeAction]>)>, + code_actions_task: Option>, + document_highlights_task: Option>, + pending_rename: Option, + searchable: bool, + cursor_shape: CursorShape, + current_line_highlight: CurrentLineHighlight, + collapse_matches: bool, + autoindent_mode: Option, + workspace: Option<(WeakView, WorkspaceId)>, + keymap_context_layers: BTreeMap, + input_enabled: bool, + use_modal_editing: bool, + read_only: bool, + leader_peer_id: Option, + remote_id: Option, + hover_state: HoverState, + gutter_hovered: bool, + hovered_link_state: Option, + inline_completion_provider: Option, + active_inline_completion: Option, + show_inline_completions: bool, + inlay_hint_cache: InlayHintCache, + expanded_hunks: ExpandedHunks, + next_inlay_id: usize, + _subscriptions: Vec, + pixel_position_of_newest_cursor: Option>, + gutter_dimensions: GutterDimensions, + pub vim_replace_map: HashMap, String>, + style: Option, + editor_actions: Vec)>>, + use_autoclose: bool, + auto_replace_emoji_shortcode: bool, + show_git_blame_gutter: bool, + show_git_blame_inline: bool, + show_git_blame_inline_delay_task: Option>, + git_blame_inline_enabled: bool, + blame: Option>, + blame_subscription: Option, + custom_context_menu: Option< + Box< + dyn 'static + + Fn(&mut Self, DisplayPoint, &mut ViewContext) -> Option>, + >, + >, + last_bounds: Option>, + expect_bounds_change: Option>, + tasks: HashMap<(BufferId, BufferRow), (usize, RunnableTasks)>, + tasks_update_task: Option>, +} + +#[derive(Clone)] +pub struct EditorSnapshot { + pub mode: EditorMode, + show_gutter: bool, + render_git_blame_gutter: bool, + pub display_snapshot: DisplaySnapshot, + pub placeholder_text: Option>, + is_focused: bool, + scroll_anchor: ScrollAnchor, + ongoing_scroll: OngoingScroll, + current_line_highlight: CurrentLineHighlight, +} + +const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.; + +#[derive(Debug, Clone, Copy)] +pub struct GutterDimensions { + pub left_padding: Pixels, + pub right_padding: Pixels, + pub width: Pixels, + pub margin: Pixels, + pub git_blame_entries_width: Option, +} + +impl Default for GutterDimensions { + fn default() -> Self { + Self { + left_padding: Pixels::ZERO, + right_padding: Pixels::ZERO, + width: Pixels::ZERO, + margin: Pixels::ZERO, + git_blame_entries_width: None, + } + } +} + +#[derive(Debug)] +pub struct RemoteSelection { + pub replica_id: ReplicaId, + pub selection: Selection, + pub cursor_shape: CursorShape, + pub peer_id: PeerId, + pub line_mode: bool, + pub participant_index: Option, + pub user_name: Option, +} + +#[derive(Clone, Debug)] +struct SelectionHistoryEntry { + selections: Arc<[Selection]>, + select_next_state: Option, + select_prev_state: Option, + add_selections_state: Option, +} + +enum SelectionHistoryMode { + Normal, + Undoing, + Redoing, +} + +#[derive(Clone, PartialEq, Eq, Hash)] +struct HoveredCursor { + replica_id: u16, + selection_id: usize, +} + +impl Default for SelectionHistoryMode { + fn default() -> Self { + Self::Normal + } +} + +#[derive(Default)] +struct SelectionHistory { + #[allow(clippy::type_complexity)] + selections_by_transaction: + HashMap]>, Option]>>)>, + mode: SelectionHistoryMode, + undo_stack: VecDeque, + redo_stack: VecDeque, +} + +impl SelectionHistory { + fn insert_transaction( + &mut self, + transaction_id: TransactionId, + selections: Arc<[Selection]>, + ) { + self.selections_by_transaction + .insert(transaction_id, (selections, None)); + } + + #[allow(clippy::type_complexity)] + fn transaction( + &self, + transaction_id: TransactionId, + ) -> Option<&(Arc<[Selection]>, Option]>>)> { + self.selections_by_transaction.get(&transaction_id) + } + + #[allow(clippy::type_complexity)] + fn transaction_mut( + &mut self, + transaction_id: TransactionId, + ) -> Option<&mut (Arc<[Selection]>, Option]>>)> { + self.selections_by_transaction.get_mut(&transaction_id) + } + + fn push(&mut self, entry: SelectionHistoryEntry) { + if !entry.selections.is_empty() { + match self.mode { + SelectionHistoryMode::Normal => { + self.push_undo(entry); + self.redo_stack.clear(); + } + SelectionHistoryMode::Undoing => self.push_redo(entry), + SelectionHistoryMode::Redoing => self.push_undo(entry), + } + } + } + + fn push_undo(&mut self, entry: SelectionHistoryEntry) { + if self + .undo_stack + .back() + .map_or(true, |e| e.selections != entry.selections) + { + self.undo_stack.push_back(entry); + if self.undo_stack.len() > MAX_SELECTION_HISTORY_LEN { + self.undo_stack.pop_front(); + } + } + } + + fn push_redo(&mut self, entry: SelectionHistoryEntry) { + if self + .redo_stack + .back() + .map_or(true, |e| e.selections != entry.selections) + { + self.redo_stack.push_back(entry); + if self.redo_stack.len() > MAX_SELECTION_HISTORY_LEN { + self.redo_stack.pop_front(); + } + } + } +} + +struct RowHighlight { + index: usize, + range: RangeInclusive, + color: Option, + should_autoscroll: bool, +} + +#[derive(Clone, Debug)] +struct AddSelectionsState { + above: bool, + stack: Vec, +} + +#[derive(Clone)] +struct SelectNextState { + query: AhoCorasick, + wordwise: bool, + done: bool, +} + +impl std::fmt::Debug for SelectNextState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct(std::any::type_name::()) + .field("wordwise", &self.wordwise) + .field("done", &self.done) + .finish() + } +} + +#[derive(Debug)] +struct AutocloseRegion { + selection_id: usize, + range: Range, + pair: BracketPair, +} + +#[derive(Debug)] +struct SnippetState { + ranges: Vec>>, + active_index: usize, +} + +#[doc(hidden)] +pub struct RenameState { + pub range: Range, + pub old_name: Arc, + pub editor: View, + block_id: BlockId, +} + +struct InvalidationStack(Vec); + +struct RegisteredInlineCompletionProvider { + provider: Arc, + _subscription: Subscription, +} + +enum ContextMenu { + Completions(CompletionsMenu), + CodeActions(CodeActionsMenu), +} + +impl ContextMenu { + fn select_first( + &mut self, + project: Option<&Model>, + cx: &mut ViewContext, + ) -> bool { + if self.visible() { + match self { + ContextMenu::Completions(menu) => menu.select_first(project, cx), + ContextMenu::CodeActions(menu) => menu.select_first(cx), + } + true + } else { + false + } + } + + fn select_prev( + &mut self, + project: Option<&Model>, + cx: &mut ViewContext, + ) -> bool { + if self.visible() { + match self { + ContextMenu::Completions(menu) => menu.select_prev(project, cx), + ContextMenu::CodeActions(menu) => menu.select_prev(cx), + } + true + } else { + false + } + } + + fn select_next( + &mut self, + project: Option<&Model>, + cx: &mut ViewContext, + ) -> bool { + if self.visible() { + match self { + ContextMenu::Completions(menu) => menu.select_next(project, cx), + ContextMenu::CodeActions(menu) => menu.select_next(cx), + } + true + } else { + false + } + } + + fn select_last( + &mut self, + project: Option<&Model>, + cx: &mut ViewContext, + ) -> bool { + if self.visible() { + match self { + ContextMenu::Completions(menu) => menu.select_last(project, cx), + ContextMenu::CodeActions(menu) => menu.select_last(cx), + } + true + } else { + false + } + } + + fn visible(&self) -> bool { + match self { + ContextMenu::Completions(menu) => menu.visible(), + ContextMenu::CodeActions(menu) => menu.visible(), + } + } + + fn render( + &self, + cursor_position: DisplayPoint, + style: &EditorStyle, + max_height: Pixels, + workspace: Option>, + cx: &mut ViewContext, + ) -> (ContextMenuOrigin, AnyElement) { + match self { + ContextMenu::Completions(menu) => ( + ContextMenuOrigin::EditorPoint(cursor_position), + menu.render(style, max_height, workspace, cx), + ), + ContextMenu::CodeActions(menu) => menu.render(cursor_position, style, max_height, cx), + } + } +} + +enum ContextMenuOrigin { + EditorPoint(DisplayPoint), + GutterIndicator(DisplayRow), +} + +#[derive(Clone)] +struct CompletionsMenu { + id: CompletionId, + initial_position: Anchor, + buffer: Model, + completions: Arc>>, + match_candidates: Arc<[StringMatchCandidate]>, + matches: Arc<[StringMatch]>, + selected_item: usize, + scroll_handle: UniformListScrollHandle, + selected_completion_documentation_resolve_debounce: Arc>, +} + +impl CompletionsMenu { + fn select_first(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + self.selected_item = 0; + self.scroll_handle.scroll_to_item(self.selected_item); + self.attempt_resolve_selected_completion_documentation(project, cx); + cx.notify(); + } + + fn select_prev(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + if self.selected_item > 0 { + self.selected_item -= 1; + } else { + self.selected_item = self.matches.len() - 1; + } + self.scroll_handle.scroll_to_item(self.selected_item); + self.attempt_resolve_selected_completion_documentation(project, cx); + cx.notify(); + } + + fn select_next(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + if self.selected_item + 1 < self.matches.len() { + self.selected_item += 1; + } else { + self.selected_item = 0; + } + self.scroll_handle.scroll_to_item(self.selected_item); + self.attempt_resolve_selected_completion_documentation(project, cx); + cx.notify(); + } + + fn select_last(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + self.selected_item = self.matches.len() - 1; + self.scroll_handle.scroll_to_item(self.selected_item); + self.attempt_resolve_selected_completion_documentation(project, cx); + cx.notify(); + } + + fn pre_resolve_completion_documentation( + buffer: Model, + completions: Arc>>, + matches: Arc<[StringMatch]>, + editor: &Editor, + cx: &mut ViewContext, + ) -> Task<()> { + let settings = EditorSettings::get_global(cx); + if !settings.show_completion_documentation { + return Task::ready(()); + } + + let Some(provider) = editor.completion_provider.as_ref() else { + return Task::ready(()); + }; + + let resolve_task = provider.resolve_completions( + buffer, + matches.iter().map(|m| m.candidate_id).collect(), + completions.clone(), + cx, + ); + + return cx.spawn(move |this, mut cx| async move { + if let Some(true) = resolve_task.await.log_err() { + this.update(&mut cx, |_, cx| cx.notify()).ok(); + } + }); + } + + fn attempt_resolve_selected_completion_documentation( + &mut self, + project: Option<&Model>, + cx: &mut ViewContext, + ) { + let settings = EditorSettings::get_global(cx); + if !settings.show_completion_documentation { + return; + } + + let completion_index = self.matches[self.selected_item].candidate_id; + let Some(project) = project else { + return; + }; + + let resolve_task = project.update(cx, |project, cx| { + project.resolve_completions( + self.buffer.clone(), + vec![completion_index], + self.completions.clone(), + cx, + ) + }); + + let delay_ms = + EditorSettings::get_global(cx).completion_documentation_secondary_query_debounce; + let delay = Duration::from_millis(delay_ms); + + self.selected_completion_documentation_resolve_debounce + .lock() + .fire_new(delay, cx, |_, cx| { + cx.spawn(move |this, mut cx| async move { + if let Some(true) = resolve_task.await.log_err() { + this.update(&mut cx, |_, cx| cx.notify()).ok(); + } + }) + }); + } + + fn visible(&self) -> bool { + !self.matches.is_empty() + } + + fn render( + &self, + style: &EditorStyle, + max_height: Pixels, + workspace: Option>, + cx: &mut ViewContext, + ) -> AnyElement { + let settings = EditorSettings::get_global(cx); + let show_completion_documentation = settings.show_completion_documentation; + + let widest_completion_ix = self + .matches + .iter() + .enumerate() + .max_by_key(|(_, mat)| { + let completions = self.completions.read(); + let completion = &completions[mat.candidate_id]; + let documentation = &completion.documentation; + + let mut len = completion.label.text.chars().count(); + if let Some(Documentation::SingleLine(text)) = documentation { + if show_completion_documentation { + len += text.chars().count(); + } + } + + len + }) + .map(|(ix, _)| ix); + + let completions = self.completions.clone(); + let matches = self.matches.clone(); + let selected_item = self.selected_item; + let style = style.clone(); + + let multiline_docs = if show_completion_documentation { + let mat = &self.matches[selected_item]; + let multiline_docs = match &self.completions.read()[mat.candidate_id].documentation { + Some(Documentation::MultiLinePlainText(text)) => { + Some(div().child(SharedString::from(text.clone()))) + } + Some(Documentation::MultiLineMarkdown(parsed)) if !parsed.text.is_empty() => { + Some(div().child(render_parsed_markdown( + "completions_markdown", + parsed, + &style, + workspace, + cx, + ))) + } + _ => None, + }; + multiline_docs.map(|div| { + div.id("multiline_docs") + .max_h(max_height) + .flex_1() + .px_1p5() + .py_1() + .min_w(px(260.)) + .max_w(px(640.)) + .w(px(500.)) + .overflow_y_scroll() + .occlude() + }) + } else { + None + }; + + let list = uniform_list( + cx.view().clone(), + "completions", + matches.len(), + move |_editor, range, cx| { + let start_ix = range.start; + let completions_guard = completions.read(); + + matches[range] + .iter() + .enumerate() + .map(|(ix, mat)| { + let item_ix = start_ix + ix; + let candidate_id = mat.candidate_id; + let completion = &completions_guard[candidate_id]; + + let documentation = if show_completion_documentation { + &completion.documentation + } else { + &None + }; + + let highlights = gpui::combine_highlights( + mat.ranges().map(|range| (range, FontWeight::BOLD.into())), + styled_runs_for_code_label(&completion.label, &style.syntax).map( + |(range, mut highlight)| { + // Ignore font weight for syntax highlighting, as we'll use it + // for fuzzy matches. + highlight.font_weight = None; + + if completion.lsp_completion.deprecated.unwrap_or(false) { + highlight.strikethrough = Some(StrikethroughStyle { + thickness: 1.0.into(), + ..Default::default() + }); + highlight.color = Some(cx.theme().colors().text_muted); + } + + (range, highlight) + }, + ), + ); + let completion_label = StyledText::new(completion.label.text.clone()) + .with_highlights(&style.text, highlights); + let documentation_label = + if let Some(Documentation::SingleLine(text)) = documentation { + if text.trim().is_empty() { + None + } else { + Some( + h_flex().ml_4().child( + Label::new(text.clone()) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + } + } else { + None + }; + + div().min_w(px(220.)).max_w(px(540.)).child( + ListItem::new(mat.candidate_id) + .inset(true) + .selected(item_ix == selected_item) + .on_click(cx.listener(move |editor, _event, cx| { + cx.stop_propagation(); + if let Some(task) = editor.confirm_completion( + &ConfirmCompletion { + item_ix: Some(item_ix), + }, + cx, + ) { + task.detach_and_log_err(cx) + } + })) + .child(h_flex().overflow_hidden().child(completion_label)) + .end_slot::

(documentation_label), + ) + }) + .collect() + }, + ) + .occlude() + .max_h(max_height) + .track_scroll(self.scroll_handle.clone()) + .with_width_from_item(widest_completion_ix); + + Popover::new() + .child(list) + .when_some(multiline_docs, |popover, multiline_docs| { + popover.aside(multiline_docs) + }) + .into_any_element() + } + + pub async fn filter(&mut self, query: Option<&str>, executor: BackgroundExecutor) { + let mut matches = if let Some(query) = query { + fuzzy::match_strings( + &self.match_candidates, + query, + query.chars().any(|c| c.is_uppercase()), + 100, + &Default::default(), + executor, + ) + .await + } else { + self.match_candidates + .iter() + .enumerate() + .map(|(candidate_id, candidate)| StringMatch { + candidate_id, + score: Default::default(), + positions: Default::default(), + string: candidate.string.clone(), + }) + .collect() + }; + + // Remove all candidates where the query's start does not match the start of any word in the candidate + if let Some(query) = query { + if let Some(query_start) = query.chars().next() { + matches.retain(|string_match| { + split_words(&string_match.string).any(|word| { + // Check that the first codepoint of the word as lowercase matches the first + // codepoint of the query as lowercase + word.chars() + .flat_map(|codepoint| codepoint.to_lowercase()) + .zip(query_start.to_lowercase()) + .all(|(word_cp, query_cp)| word_cp == query_cp) + }) + }); + } + } + + let completions = self.completions.read(); + matches.sort_unstable_by_key(|mat| { + // We do want to strike a balance here between what the language server tells us + // to sort by (the sort_text) and what are "obvious" good matches (i.e. when you type + // `Creat` and there is a local variable called `CreateComponent`). + // So what we do is: we bucket all matches into two buckets + // - Strong matches + // - Weak matches + // Strong matches are the ones with a high fuzzy-matcher score (the "obvious" matches) + // and the Weak matches are the rest. + // + // For the strong matches, we sort by the language-servers score first and for the weak + // matches, we prefer our fuzzy finder first. + // + // The thinking behind that: it's useless to take the sort_text the language-server gives + // us into account when it's obviously a bad match. + + #[derive(PartialEq, Eq, PartialOrd, Ord)] + enum MatchScore<'a> { + Strong { + sort_text: Option<&'a str>, + score: Reverse>, + sort_key: (usize, &'a str), + }, + Weak { + score: Reverse>, + sort_text: Option<&'a str>, + sort_key: (usize, &'a str), + }, + } + + let completion = &completions[mat.candidate_id]; + let sort_key = completion.sort_key(); + let sort_text = completion.lsp_completion.sort_text.as_deref(); + let score = Reverse(OrderedFloat(mat.score)); + + if mat.score >= 0.2 { + MatchScore::Strong { + sort_text, + score, + sort_key, + } + } else { + MatchScore::Weak { + score, + sort_text, + sort_key, + } + } + }); + + for mat in &mut matches { + let completion = &completions[mat.candidate_id]; + mat.string.clone_from(&completion.label.text); + for position in &mut mat.positions { + *position += completion.label.filter_range.start; + } + } + drop(completions); + + self.matches = matches.into(); + self.selected_item = 0; + } +} + +#[derive(Clone)] +struct CodeActionContents { + tasks: Option>, + actions: Option>, +} + +impl CodeActionContents { + fn len(&self) -> usize { + match (&self.tasks, &self.actions) { + (Some(tasks), Some(actions)) => actions.len() + tasks.templates.len(), + (Some(tasks), None) => tasks.templates.len(), + (None, Some(actions)) => actions.len(), + (None, None) => 0, + } + } + + fn is_empty(&self) -> bool { + match (&self.tasks, &self.actions) { + (Some(tasks), Some(actions)) => actions.is_empty() && tasks.templates.is_empty(), + (Some(tasks), None) => tasks.templates.is_empty(), + (None, Some(actions)) => actions.is_empty(), + (None, None) => true, + } + } + + fn iter(&self) -> impl Iterator + '_ { + self.tasks + .iter() + .flat_map(|tasks| { + tasks + .templates + .iter() + .map(|(kind, task)| CodeActionsItem::Task(kind.clone(), task.clone())) + }) + .chain(self.actions.iter().flat_map(|actions| { + actions + .iter() + .map(|action| CodeActionsItem::CodeAction(action.clone())) + })) + } + fn get(&self, index: usize) -> Option { + match (&self.tasks, &self.actions) { + (Some(tasks), Some(actions)) => { + if index < tasks.templates.len() { + tasks + .templates + .get(index) + .cloned() + .map(|(kind, task)| CodeActionsItem::Task(kind, task)) + } else { + actions + .get(index - tasks.templates.len()) + .cloned() + .map(CodeActionsItem::CodeAction) + } + } + (Some(tasks), None) => tasks + .templates + .get(index) + .cloned() + .map(|(kind, task)| CodeActionsItem::Task(kind, task)), + (None, Some(actions)) => actions.get(index).cloned().map(CodeActionsItem::CodeAction), + (None, None) => None, + } + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Clone)] +enum CodeActionsItem { + Task(TaskSourceKind, ResolvedTask), + CodeAction(CodeAction), +} + +impl CodeActionsItem { + fn as_task(&self) -> Option<&ResolvedTask> { + let Self::Task(_, task) = self else { + return None; + }; + Some(task) + } + fn as_code_action(&self) -> Option<&CodeAction> { + let Self::CodeAction(action) = self else { + return None; + }; + Some(action) + } + fn label(&self) -> String { + match self { + Self::CodeAction(action) => action.lsp_action.title.clone(), + Self::Task(_, task) => task.resolved_label.clone(), + } + } +} + +struct CodeActionsMenu { + actions: CodeActionContents, + buffer: Model, + selected_item: usize, + scroll_handle: UniformListScrollHandle, + deployed_from_indicator: Option, +} + +impl CodeActionsMenu { + fn select_first(&mut self, cx: &mut ViewContext) { + self.selected_item = 0; + self.scroll_handle.scroll_to_item(self.selected_item); + cx.notify() + } + + fn select_prev(&mut self, cx: &mut ViewContext) { + if self.selected_item > 0 { + self.selected_item -= 1; + } else { + self.selected_item = self.actions.len() - 1; + } + self.scroll_handle.scroll_to_item(self.selected_item); + cx.notify(); + } + + fn select_next(&mut self, cx: &mut ViewContext) { + if self.selected_item + 1 < self.actions.len() { + self.selected_item += 1; + } else { + self.selected_item = 0; + } + self.scroll_handle.scroll_to_item(self.selected_item); + cx.notify(); + } + + fn select_last(&mut self, cx: &mut ViewContext) { + self.selected_item = self.actions.len() - 1; + self.scroll_handle.scroll_to_item(self.selected_item); + cx.notify() + } + + fn visible(&self) -> bool { + !self.actions.is_empty() + } + + fn render( + &self, + cursor_position: DisplayPoint, + _style: &EditorStyle, + max_height: Pixels, + cx: &mut ViewContext, + ) -> (ContextMenuOrigin, AnyElement) { + let actions = self.actions.clone(); + let selected_item = self.selected_item; + let element = uniform_list( + cx.view().clone(), + "code_actions_menu", + self.actions.len(), + move |_this, range, cx| { + actions + .iter() + .skip(range.start) + .take(range.end - range.start) + .enumerate() + .map(|(ix, action)| { + let item_ix = range.start + ix; + let selected = selected_item == item_ix; + let colors = cx.theme().colors(); + div() + .px_2() + .text_color(colors.text) + .when(selected, |style| { + style + .bg(colors.element_active) + .text_color(colors.text_accent) + }) + .hover(|style| { + style + .bg(colors.element_hover) + .text_color(colors.text_accent) + }) + .whitespace_nowrap() + .when_some(action.as_code_action(), |this, action| { + this.on_mouse_down( + MouseButton::Left, + cx.listener(move |editor, _, cx| { + cx.stop_propagation(); + if let Some(task) = editor.confirm_code_action( + &ConfirmCodeAction { + item_ix: Some(item_ix), + }, + cx, + ) { + task.detach_and_log_err(cx) + } + }), + ) + // TASK: It would be good to make lsp_action.title a SharedString to avoid allocating here. + .child(SharedString::from(action.lsp_action.title.clone())) + }) + .when_some(action.as_task(), |this, task| { + this.on_mouse_down( + MouseButton::Left, + cx.listener(move |editor, _, cx| { + cx.stop_propagation(); + if let Some(task) = editor.confirm_code_action( + &ConfirmCodeAction { + item_ix: Some(item_ix), + }, + cx, + ) { + task.detach_and_log_err(cx) + } + }), + ) + .child(SharedString::from(task.resolved_label.clone())) + }) + }) + .collect() + }, + ) + .elevation_1(cx) + .px_2() + .py_1() + .max_h(max_height) + .occlude() + .track_scroll(self.scroll_handle.clone()) + .with_width_from_item( + self.actions + .iter() + .enumerate() + .max_by_key(|(_, action)| match action { + CodeActionsItem::Task(_, task) => task.resolved_label.chars().count(), + CodeActionsItem::CodeAction(action) => action.lsp_action.title.chars().count(), + }) + .map(|(ix, _)| ix), + ) + .into_any_element(); + + let cursor_position = if let Some(row) = self.deployed_from_indicator { + ContextMenuOrigin::GutterIndicator(row) + } else { + ContextMenuOrigin::EditorPoint(cursor_position) + }; + + (cursor_position, element) + } +} + +#[derive(Debug)] +struct ActiveDiagnosticGroup { + primary_range: Range, + primary_message: String, + group_id: usize, + blocks: HashMap, + is_valid: bool, +} + +#[derive(Serialize, Deserialize)] +pub struct ClipboardSelection { + pub len: usize, + pub is_entire_line: bool, + pub first_line_indent: u32, +} + +#[derive(Debug)] +pub(crate) struct NavigationData { + cursor_anchor: Anchor, + cursor_position: Point, + scroll_anchor: ScrollAnchor, + scroll_top_row: u32, +} + +enum GotoDefinitionKind { + Symbol, + Type, + Implementation, +} + +#[derive(Debug, Clone)] +enum InlayHintRefreshReason { + Toggle(bool), + SettingsChange(InlayHintSettings), + NewLinesShown, + BufferEdited(HashSet>), + RefreshRequested, + ExcerptsRemoved(Vec), +} + +impl InlayHintRefreshReason { + fn description(&self) -> &'static str { + match self { + Self::Toggle(_) => "toggle", + Self::SettingsChange(_) => "settings change", + Self::NewLinesShown => "new lines shown", + Self::BufferEdited(_) => "buffer edited", + Self::RefreshRequested => "refresh requested", + Self::ExcerptsRemoved(_) => "excerpts removed", + } + } +} + +impl Editor { + pub fn single_line(cx: &mut ViewContext) -> Self { + let buffer = cx.new_model(|cx| Buffer::local("", cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new(EditorMode::SingleLine, buffer, None, cx) + } + + pub fn multi_line(cx: &mut ViewContext) -> Self { + let buffer = cx.new_model(|cx| Buffer::local("", cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new(EditorMode::Full, buffer, None, cx) + } + + pub fn auto_height(max_lines: usize, cx: &mut ViewContext) -> Self { + let buffer = cx.new_model(|cx| Buffer::local("", cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new(EditorMode::AutoHeight { max_lines }, buffer, None, cx) + } + + pub fn for_buffer( + buffer: Model, + project: Option>, + cx: &mut ViewContext, + ) -> Self { + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + Self::new(EditorMode::Full, buffer, project, cx) + } + + pub fn for_multibuffer( + buffer: Model, + project: Option>, + cx: &mut ViewContext, + ) -> Self { + Self::new(EditorMode::Full, buffer, project, cx) + } + + pub fn clone(&self, cx: &mut ViewContext) -> Self { + let mut clone = Self::new(self.mode, self.buffer.clone(), self.project.clone(), cx); + self.display_map.update(cx, |display_map, cx| { + let snapshot = display_map.snapshot(cx); + clone.display_map.update(cx, |display_map, cx| { + display_map.set_state(&snapshot, cx); + }); + }); + clone.selections.clone_state(&self.selections); + clone.scroll_manager.clone_state(&self.scroll_manager); + clone.searchable = self.searchable; + clone + } + + fn new( + mode: EditorMode, + buffer: Model, + project: Option>, + cx: &mut ViewContext, + ) -> Self { + let style = cx.text_style(); + let font_size = style.font_size.to_pixels(cx.rem_size()); + let display_map = cx.new_model(|cx| { + DisplayMap::new(buffer.clone(), style.font(), font_size, None, 2, 1, cx) + }); + + let selections = SelectionsCollection::new(display_map.clone(), buffer.clone()); + + let blink_manager = cx.new_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx)); + + let soft_wrap_mode_override = + (mode == EditorMode::SingleLine).then(|| language_settings::SoftWrap::PreferLine); + + let mut project_subscriptions = Vec::new(); + if mode == EditorMode::Full { + if let Some(project) = project.as_ref() { + if buffer.read(cx).is_singleton() { + project_subscriptions.push(cx.observe(project, |_, _, cx| { + cx.emit(EditorEvent::TitleChanged); + })); + } + project_subscriptions.push(cx.subscribe(project, |editor, _, event, cx| { + if let project::Event::RefreshInlayHints = event { + editor.refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx); + }; + })); + let task_inventory = project.read(cx).task_inventory().clone(); + project_subscriptions.push(cx.observe(&task_inventory, |editor, _, cx| { + editor.tasks_update_task = Some(editor.refresh_runnables(cx)); + })); + } + } + + let inlay_hint_settings = inlay_hint_settings( + selections.newest_anchor().head(), + &buffer.read(cx).snapshot(cx), + cx, + ); + + let focus_handle = cx.focus_handle(); + cx.on_focus(&focus_handle, Self::handle_focus).detach(); + cx.on_blur(&focus_handle, Self::handle_blur).detach(); + + let mut this = Self { + focus_handle, + buffer: buffer.clone(), + display_map: display_map.clone(), + selections, + scroll_manager: ScrollManager::new(cx), + columnar_selection_tail: None, + add_selections_state: None, + select_next_state: None, + select_prev_state: None, + selection_history: Default::default(), + autoclose_regions: Default::default(), + snippet_stack: Default::default(), + select_larger_syntax_node_stack: Vec::new(), + ime_transaction: Default::default(), + active_diagnostics: None, + soft_wrap_mode_override, + completion_provider: project.clone().map(|project| Box::new(project) as _), + collaboration_hub: project.clone().map(|project| Box::new(project) as _), + project, + blink_manager: blink_manager.clone(), + show_local_selections: true, + mode, + show_breadcrumbs: EditorSettings::get_global(cx).toolbar.breadcrumbs, + show_gutter: mode == EditorMode::Full, + show_wrap_guides: None, + placeholder_text: None, + highlight_order: 0, + highlighted_rows: HashMap::default(), + background_highlights: Default::default(), + scrollbar_marker_state: ScrollbarMarkerState::default(), + nav_history: None, + context_menu: RwLock::new(None), + mouse_context_menu: None, + completion_tasks: Default::default(), + find_all_references_task_sources: Vec::new(), + next_completion_id: 0, + completion_documentation_pre_resolve_debounce: DebouncedDelay::new(), + next_inlay_id: 0, + available_code_actions: Default::default(), + code_actions_task: Default::default(), + document_highlights_task: Default::default(), + pending_rename: Default::default(), + searchable: true, + cursor_shape: Default::default(), + current_line_highlight: EditorSettings::get_global(cx).current_line_highlight, + autoindent_mode: Some(AutoindentMode::EachLine), + collapse_matches: false, + workspace: None, + keymap_context_layers: Default::default(), + input_enabled: true, + use_modal_editing: mode == EditorMode::Full, + read_only: false, + use_autoclose: true, + auto_replace_emoji_shortcode: false, + leader_peer_id: None, + remote_id: None, + hover_state: Default::default(), + hovered_link_state: Default::default(), + inline_completion_provider: None, + active_inline_completion: None, + inlay_hint_cache: InlayHintCache::new(inlay_hint_settings), + expanded_hunks: ExpandedHunks::default(), + gutter_hovered: false, + pixel_position_of_newest_cursor: None, + last_bounds: None, + expect_bounds_change: None, + gutter_dimensions: GutterDimensions::default(), + style: None, + show_cursor_names: false, + hovered_cursors: Default::default(), + editor_actions: Default::default(), + vim_replace_map: Default::default(), + show_inline_completions: mode == EditorMode::Full, + custom_context_menu: None, + show_git_blame_gutter: false, + show_git_blame_inline: false, + show_git_blame_inline_delay_task: None, + git_blame_inline_enabled: ProjectSettings::get_global(cx).git.inline_blame_enabled(), + blame: None, + blame_subscription: None, + tasks: Default::default(), + _subscriptions: vec![ + cx.observe(&buffer, Self::on_buffer_changed), + cx.subscribe(&buffer, Self::on_buffer_event), + cx.observe(&display_map, Self::on_display_map_changed), + cx.observe(&blink_manager, |_, _, cx| cx.notify()), + cx.observe_global::(Self::settings_changed), + observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()), + cx.observe_window_activation(|editor, cx| { + let active = cx.is_window_active(); + editor.blink_manager.update(cx, |blink_manager, cx| { + if active { + blink_manager.enable(cx); + } else { + blink_manager.show_cursor(cx); + blink_manager.disable(cx); + } + }); + }), + ], + tasks_update_task: None, + }; + this.tasks_update_task = Some(this.refresh_runnables(cx)); + this._subscriptions.extend(project_subscriptions); + + this.end_selection(cx); + this.scroll_manager.show_scrollbar(cx); + + if mode == EditorMode::Full { + let should_auto_hide_scrollbars = cx.should_auto_hide_scrollbars(); + cx.set_global(ScrollbarAutoHide(should_auto_hide_scrollbars)); + + if this.git_blame_inline_enabled { + this.git_blame_inline_enabled = true; + this.start_git_blame_inline(false, cx); + } + } + + this.report_editor_event("open", None, cx); + this + } + + pub fn mouse_menu_is_focused(&self, cx: &mut WindowContext) -> bool { + self.mouse_context_menu + .as_ref() + .is_some_and(|menu| menu.context_menu.focus_handle(cx).is_focused(cx)) + } + + fn key_context(&self, cx: &AppContext) -> KeyContext { + let mut key_context = KeyContext::new_with_defaults(); + key_context.add("Editor"); + let mode = match self.mode { + EditorMode::SingleLine => "single_line", + EditorMode::AutoHeight { .. } => "auto_height", + EditorMode::Full => "full", + }; + key_context.set("mode", mode); + if self.pending_rename.is_some() { + key_context.add("renaming"); + } + if self.context_menu_visible() { + match self.context_menu.read().as_ref() { + Some(ContextMenu::Completions(_)) => { + key_context.add("menu"); + key_context.add("showing_completions") + } + Some(ContextMenu::CodeActions(_)) => { + key_context.add("menu"); + key_context.add("showing_code_actions") + } + None => {} + } + } + + for layer in self.keymap_context_layers.values() { + key_context.extend(layer); + } + + if let Some(extension) = self + .buffer + .read(cx) + .as_singleton() + .and_then(|buffer| buffer.read(cx).file()?.path().extension()?.to_str()) + { + key_context.set("extension", extension.to_string()); + } + + if self.has_active_inline_completion(cx) { + key_context.add("copilot_suggestion"); + key_context.add("inline_completion"); + } + + key_context + } + + pub fn new_file( + workspace: &mut Workspace, + _: &workspace::NewFile, + cx: &mut ViewContext, + ) { + let project = workspace.project().clone(); + let create = project.update(cx, |project, cx| project.create_buffer(cx)); + + cx.spawn(|workspace, mut cx| async move { + let buffer = create.await?; + workspace.update(&mut cx, |workspace, cx| { + workspace.add_item_to_active_pane( + Box::new( + cx.new_view(|cx| Editor::for_buffer(buffer, Some(project.clone()), cx)), + ), + None, + cx, + ) + }) + }) + .detach_and_prompt_err("Failed to create buffer", cx, |e, _| match e.error_code() { + ErrorCode::RemoteUpgradeRequired => Some(format!( + "The remote instance of Zed does not support this yet. It must be upgraded to {}", + e.error_tag("required").unwrap_or("the latest version") + )), + _ => None, + }); + } + + pub fn new_file_in_direction( + workspace: &mut Workspace, + action: &workspace::NewFileInDirection, + cx: &mut ViewContext, + ) { + let project = workspace.project().clone(); + let create = project.update(cx, |project, cx| project.create_buffer(cx)); + let direction = action.0; + + cx.spawn(|workspace, mut cx| async move { + let buffer = create.await?; + workspace.update(&mut cx, move |workspace, cx| { + workspace.split_item( + direction, + Box::new( + cx.new_view(|cx| Editor::for_buffer(buffer, Some(project.clone()), cx)), + ), + cx, + ) + })?; + anyhow::Ok(()) + }) + .detach_and_prompt_err("Failed to create buffer", cx, |e, _| match e.error_code() { + ErrorCode::RemoteUpgradeRequired => Some(format!( + "The remote instance of Zed does not support this yet. It must be upgraded to {}", + e.error_tag("required").unwrap_or("the latest version") + )), + _ => None, + }); + } + + pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { + self.buffer.read(cx).replica_id() + } + + pub fn leader_peer_id(&self) -> Option { + self.leader_peer_id + } + + pub fn buffer(&self) -> &Model { + &self.buffer + } + + pub fn workspace(&self) -> Option> { + self.workspace.as_ref()?.0.upgrade() + } + + pub fn title<'a>(&self, cx: &'a AppContext) -> Cow<'a, str> { + self.buffer().read(cx).title(cx) + } + + pub fn snapshot(&mut self, cx: &mut WindowContext) -> EditorSnapshot { + EditorSnapshot { + mode: self.mode, + show_gutter: self.show_gutter, + render_git_blame_gutter: self.render_git_blame_gutter(cx), + display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)), + scroll_anchor: self.scroll_manager.anchor(), + ongoing_scroll: self.scroll_manager.ongoing_scroll(), + placeholder_text: self.placeholder_text.clone(), + is_focused: self.focus_handle.is_focused(cx), + current_line_highlight: self.current_line_highlight, + } + } + + pub fn language_at(&self, point: T, cx: &AppContext) -> Option> { + self.buffer.read(cx).language_at(point, cx) + } + + pub fn file_at( + &self, + point: T, + cx: &AppContext, + ) -> Option> { + self.buffer.read(cx).read(cx).file_at(point).cloned() + } + + pub fn active_excerpt( + &self, + cx: &AppContext, + ) -> Option<(ExcerptId, Model, Range)> { + self.buffer + .read(cx) + .excerpt_containing(self.selections.newest_anchor().head(), cx) + } + + pub fn mode(&self) -> EditorMode { + self.mode + } + + pub fn collaboration_hub(&self) -> Option<&dyn CollaborationHub> { + self.collaboration_hub.as_deref() + } + + pub fn set_collaboration_hub(&mut self, hub: Box) { + self.collaboration_hub = Some(hub); + } + + pub fn set_custom_context_menu( + &mut self, + f: impl 'static + + Fn(&mut Self, DisplayPoint, &mut ViewContext) -> Option>, + ) { + self.custom_context_menu = Some(Box::new(f)) + } + + pub fn set_completion_provider(&mut self, hub: Box) { + self.completion_provider = Some(hub); + } + + pub fn set_inline_completion_provider( + &mut self, + provider: Option>, + cx: &mut ViewContext, + ) where + T: InlineCompletionProvider, + { + self.inline_completion_provider = + provider.map(|provider| RegisteredInlineCompletionProvider { + _subscription: cx.observe(&provider, |this, _, cx| { + if this.focus_handle.is_focused(cx) { + this.update_visible_inline_completion(cx); + } + }), + provider: Arc::new(provider), + }); + self.refresh_inline_completion(false, cx); + } + + pub fn placeholder_text(&self, _cx: &mut WindowContext) -> Option<&str> { + self.placeholder_text.as_deref() + } + + pub fn set_placeholder_text( + &mut self, + placeholder_text: impl Into>, + cx: &mut ViewContext, + ) { + let placeholder_text = Some(placeholder_text.into()); + if self.placeholder_text != placeholder_text { + self.placeholder_text = placeholder_text; + cx.notify(); + } + } + + pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape, cx: &mut ViewContext) { + self.cursor_shape = cursor_shape; + cx.notify(); + } + + pub fn set_current_line_highlight(&mut self, current_line_highlight: CurrentLineHighlight) { + self.current_line_highlight = current_line_highlight; + } + + pub fn set_collapse_matches(&mut self, collapse_matches: bool) { + self.collapse_matches = collapse_matches; + } + + pub fn range_for_match(&self, range: &Range) -> Range { + if self.collapse_matches { + return range.start..range.start; + } + range.clone() + } + + pub fn set_clip_at_line_ends(&mut self, clip: bool, cx: &mut ViewContext) { + if self.display_map.read(cx).clip_at_line_ends != clip { + self.display_map + .update(cx, |map, _| map.clip_at_line_ends = clip); + } + } + + pub fn set_keymap_context_layer( + &mut self, + context: KeyContext, + cx: &mut ViewContext, + ) { + self.keymap_context_layers + .insert(TypeId::of::(), context); + cx.notify(); + } + + pub fn remove_keymap_context_layer(&mut self, cx: &mut ViewContext) { + self.keymap_context_layers.remove(&TypeId::of::()); + cx.notify(); + } + + pub fn set_input_enabled(&mut self, input_enabled: bool) { + self.input_enabled = input_enabled; + } + + pub fn set_autoindent(&mut self, autoindent: bool) { + if autoindent { + self.autoindent_mode = Some(AutoindentMode::EachLine); + } else { + self.autoindent_mode = None; + } + } + + pub fn read_only(&self, cx: &AppContext) -> bool { + self.read_only || self.buffer.read(cx).read_only() + } + + pub fn set_read_only(&mut self, read_only: bool) { + self.read_only = read_only; + } + + pub fn set_use_autoclose(&mut self, autoclose: bool) { + self.use_autoclose = autoclose; + } + + pub fn set_auto_replace_emoji_shortcode(&mut self, auto_replace: bool) { + self.auto_replace_emoji_shortcode = auto_replace; + } + + pub fn set_show_inline_completions(&mut self, show_inline_completions: bool) { + self.show_inline_completions = show_inline_completions; + } + + pub fn set_use_modal_editing(&mut self, to: bool) { + self.use_modal_editing = to; + } + + pub fn use_modal_editing(&self) -> bool { + self.use_modal_editing + } + + fn selections_did_change( + &mut self, + local: bool, + old_cursor_position: &Anchor, + show_completions: bool, + cx: &mut ViewContext, + ) { + // Copy selections to primary selection buffer + #[cfg(target_os = "linux")] + if local { + let selections = self.selections.all::(cx); + let buffer_handle = self.buffer.read(cx).read(cx); + + let mut text = String::new(); + for (index, selection) in selections.iter().enumerate() { + let text_for_selection = buffer_handle + .text_for_range(selection.start..selection.end) + .collect::(); + + text.push_str(&text_for_selection); + if index != selections.len() - 1 { + text.push('\n'); + } + } + + if !text.is_empty() { + cx.write_to_primary(ClipboardItem::new(text)); + } + } + + if self.focus_handle.is_focused(cx) && self.leader_peer_id.is_none() { + self.buffer.update(cx, |buffer, cx| { + buffer.set_active_selections( + &self.selections.disjoint_anchors(), + self.selections.line_mode, + self.cursor_shape, + cx, + ) + }); + } + + let display_map = self + .display_map + .update(cx, |display_map, cx| display_map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + self.add_selections_state = None; + self.select_next_state = None; + self.select_prev_state = None; + self.select_larger_syntax_node_stack.clear(); + self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer); + self.snippet_stack + .invalidate(&self.selections.disjoint_anchors(), buffer); + self.take_rename(false, cx); + + let new_cursor_position = self.selections.newest_anchor().head(); + + self.push_to_nav_history( + *old_cursor_position, + Some(new_cursor_position.to_point(buffer)), + cx, + ); + + if local { + let new_cursor_position = self.selections.newest_anchor().head(); + let mut context_menu = self.context_menu.write(); + let completion_menu = match context_menu.as_ref() { + Some(ContextMenu::Completions(menu)) => Some(menu), + + _ => { + *context_menu = None; + None + } + }; + + if let Some(completion_menu) = completion_menu { + let cursor_position = new_cursor_position.to_offset(buffer); + let (word_range, kind) = buffer.surrounding_word(completion_menu.initial_position); + if kind == Some(CharKind::Word) + && word_range.to_inclusive().contains(&cursor_position) + { + let mut completion_menu = completion_menu.clone(); + drop(context_menu); + + let query = Self::completion_query(buffer, cursor_position); + cx.spawn(move |this, mut cx| async move { + completion_menu + .filter(query.as_deref(), cx.background_executor().clone()) + .await; + + this.update(&mut cx, |this, cx| { + let mut context_menu = this.context_menu.write(); + let Some(ContextMenu::Completions(menu)) = context_menu.as_ref() else { + return; + }; + + if menu.id > completion_menu.id { + return; + } + + *context_menu = Some(ContextMenu::Completions(completion_menu)); + drop(context_menu); + cx.notify(); + }) + }) + .detach(); + + if show_completions { + self.show_completions(&ShowCompletions, cx); + } + } else { + drop(context_menu); + self.hide_context_menu(cx); + } + } else { + drop(context_menu); + } + + hide_hover(self, cx); + + if old_cursor_position.to_display_point(&display_map).row() + != new_cursor_position.to_display_point(&display_map).row() + { + self.available_code_actions.take(); + } + self.refresh_code_actions(cx); + self.refresh_document_highlights(cx); + refresh_matching_bracket_highlights(self, cx); + self.discard_inline_completion(false, cx); + if self.git_blame_inline_enabled { + self.start_inline_blame_timer(cx); + } + } + + self.blink_manager.update(cx, BlinkManager::pause_blinking); + cx.emit(EditorEvent::SelectionsChanged { local }); + + if self.selections.disjoint_anchors().len() == 1 { + cx.emit(SearchEvent::ActiveMatchChanged) + } + + cx.notify(); + } + + pub fn change_selections( + &mut self, + autoscroll: Option, + cx: &mut ViewContext, + change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R, + ) -> R { + self.change_selections_inner(autoscroll, true, cx, change) + } + + pub fn change_selections_inner( + &mut self, + autoscroll: Option, + request_completions: bool, + cx: &mut ViewContext, + change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R, + ) -> R { + let old_cursor_position = self.selections.newest_anchor().head(); + self.push_to_selection_history(); + + let (changed, result) = self.selections.change_with(cx, change); + + if changed { + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + self.selections_did_change(true, &old_cursor_position, request_completions, cx); + } + + result + } + + pub fn edit(&mut self, edits: I, cx: &mut ViewContext) + where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, + { + if self.read_only(cx) { + return; + } + + self.buffer + .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); + } + + pub fn edit_with_autoindent(&mut self, edits: I, cx: &mut ViewContext) + where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, + { + if self.read_only(cx) { + return; + } + + self.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, self.autoindent_mode.clone(), cx) + }); + } + + pub fn edit_with_block_indent( + &mut self, + edits: I, + original_indent_columns: Vec, + cx: &mut ViewContext, + ) where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, + { + if self.read_only(cx) { + return; + } + + self.buffer.update(cx, |buffer, cx| { + buffer.edit( + edits, + Some(AutoindentMode::Block { + original_indent_columns, + }), + cx, + ) + }); + } + + fn select(&mut self, phase: SelectPhase, cx: &mut ViewContext) { + self.hide_context_menu(cx); + + match phase { + SelectPhase::Begin { + position, + add, + click_count, + } => self.begin_selection(position, add, click_count, cx), + SelectPhase::BeginColumnar { + position, + goal_column, + reset, + } => self.begin_columnar_selection(position, goal_column, reset, cx), + SelectPhase::Extend { + position, + click_count, + } => self.extend_selection(position, click_count, cx), + SelectPhase::Update { + position, + goal_column, + scroll_delta, + } => self.update_selection(position, goal_column, scroll_delta, cx), + SelectPhase::End => self.end_selection(cx), + } + } + + fn extend_selection( + &mut self, + position: DisplayPoint, + click_count: usize, + cx: &mut ViewContext, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let tail = self.selections.newest::(cx).tail(); + self.begin_selection(position, false, click_count, cx); + + let position = position.to_offset(&display_map, Bias::Left); + let tail_anchor = display_map.buffer_snapshot.anchor_before(tail); + + let mut pending_selection = self + .selections + .pending_anchor() + .expect("extend_selection not called with pending selection"); + if position >= tail { + pending_selection.start = tail_anchor; + } else { + pending_selection.end = tail_anchor; + pending_selection.reversed = true; + } + + let mut pending_mode = self.selections.pending_mode().unwrap(); + match &mut pending_mode { + SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor, + _ => {} + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.set_pending(pending_selection, pending_mode) + }); + } + + fn begin_selection( + &mut self, + position: DisplayPoint, + add: bool, + click_count: usize, + cx: &mut ViewContext, + ) { + if !self.focus_handle.is_focused(cx) { + cx.focus(&self.focus_handle); + } + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let newest_selection = self.selections.newest_anchor().clone(); + let position = display_map.clip_point(position, Bias::Left); + + let start; + let end; + let mode; + let auto_scroll; + match click_count { + 1 => { + start = buffer.anchor_before(position.to_point(&display_map)); + end = start; + mode = SelectMode::Character; + auto_scroll = true; + } + 2 => { + let range = movement::surrounding_word(&display_map, position); + start = buffer.anchor_before(range.start.to_point(&display_map)); + end = buffer.anchor_before(range.end.to_point(&display_map)); + mode = SelectMode::Word(start..end); + auto_scroll = true; + } + 3 => { + let position = display_map + .clip_point(position, Bias::Left) + .to_point(&display_map); + let line_start = display_map.prev_line_boundary(position).0; + let next_line_start = buffer.clip_point( + display_map.next_line_boundary(position).0 + Point::new(1, 0), + Bias::Left, + ); + start = buffer.anchor_before(line_start); + end = buffer.anchor_before(next_line_start); + mode = SelectMode::Line(start..end); + auto_scroll = true; + } + _ => { + start = buffer.anchor_before(0); + end = buffer.anchor_before(buffer.len()); + mode = SelectMode::All; + auto_scroll = false; + } + } + + self.change_selections(auto_scroll.then(|| Autoscroll::newest()), cx, |s| { + if !add { + s.clear_disjoint(); + } else if click_count > 1 { + s.delete(newest_selection.id) + } + + s.set_pending_anchor_range(start..end, mode); + }); + } + + fn begin_columnar_selection( + &mut self, + position: DisplayPoint, + goal_column: u32, + reset: bool, + cx: &mut ViewContext, + ) { + if !self.focus_handle.is_focused(cx) { + cx.focus(&self.focus_handle); + } + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + if reset { + let pointer_position = display_map + .buffer_snapshot + .anchor_before(position.to_point(&display_map)); + + self.change_selections(Some(Autoscroll::newest()), cx, |s| { + s.clear_disjoint(); + s.set_pending_anchor_range( + pointer_position..pointer_position, + SelectMode::Character, + ); + }); + } + + let tail = self.selections.newest::(cx).tail(); + self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail)); + + if !reset { + self.select_columns( + tail.to_display_point(&display_map), + position, + goal_column, + &display_map, + cx, + ); + } + } + + fn update_selection( + &mut self, + position: DisplayPoint, + goal_column: u32, + scroll_delta: gpui::Point, + cx: &mut ViewContext, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + if let Some(tail) = self.columnar_selection_tail.as_ref() { + let tail = tail.to_display_point(&display_map); + self.select_columns(tail, position, goal_column, &display_map, cx); + } else if let Some(mut pending) = self.selections.pending_anchor() { + let buffer = self.buffer.read(cx).snapshot(cx); + let head; + let tail; + let mode = self.selections.pending_mode().unwrap(); + match &mode { + SelectMode::Character => { + head = position.to_point(&display_map); + tail = pending.tail().to_point(&buffer); + } + SelectMode::Word(original_range) => { + let original_display_range = original_range.start.to_display_point(&display_map) + ..original_range.end.to_display_point(&display_map); + let original_buffer_range = original_display_range.start.to_point(&display_map) + ..original_display_range.end.to_point(&display_map); + if movement::is_inside_word(&display_map, position) + || original_display_range.contains(&position) + { + let word_range = movement::surrounding_word(&display_map, position); + if word_range.start < original_display_range.start { + head = word_range.start.to_point(&display_map); + } else { + head = word_range.end.to_point(&display_map); + } + } else { + head = position.to_point(&display_map); + } + + if head <= original_buffer_range.start { + tail = original_buffer_range.end; + } else { + tail = original_buffer_range.start; + } + } + SelectMode::Line(original_range) => { + let original_range = original_range.to_point(&display_map.buffer_snapshot); + + let position = display_map + .clip_point(position, Bias::Left) + .to_point(&display_map); + let line_start = display_map.prev_line_boundary(position).0; + let next_line_start = buffer.clip_point( + display_map.next_line_boundary(position).0 + Point::new(1, 0), + Bias::Left, + ); + + if line_start < original_range.start { + head = line_start + } else { + head = next_line_start + } + + if head <= original_range.start { + tail = original_range.end; + } else { + tail = original_range.start; + } + } + SelectMode::All => { + return; + } + }; + + if head < tail { + pending.start = buffer.anchor_before(head); + pending.end = buffer.anchor_before(tail); + pending.reversed = true; + } else { + pending.start = buffer.anchor_before(tail); + pending.end = buffer.anchor_before(head); + pending.reversed = false; + } + + self.change_selections(None, cx, |s| { + s.set_pending(pending, mode); + }); + } else { + log::error!("update_selection dispatched with no pending selection"); + return; + } + + self.apply_scroll_delta(scroll_delta, cx); + cx.notify(); + } + + fn end_selection(&mut self, cx: &mut ViewContext) { + self.columnar_selection_tail.take(); + if self.selections.pending_anchor().is_some() { + let selections = self.selections.all::(cx); + self.change_selections(None, cx, |s| { + s.select(selections); + s.clear_pending(); + }); + } + } + + fn select_columns( + &mut self, + tail: DisplayPoint, + head: DisplayPoint, + goal_column: u32, + display_map: &DisplaySnapshot, + cx: &mut ViewContext, + ) { + let start_row = cmp::min(tail.row(), head.row()); + let end_row = cmp::max(tail.row(), head.row()); + let start_column = cmp::min(tail.column(), goal_column); + let end_column = cmp::max(tail.column(), goal_column); + let reversed = start_column < tail.column(); + + let selection_ranges = (start_row.0..=end_row.0) + .map(DisplayRow) + .filter_map(|row| { + if start_column <= display_map.line_len(row) && !display_map.is_block_line(row) { + let start = display_map + .clip_point(DisplayPoint::new(row, start_column), Bias::Left) + .to_point(display_map); + let end = display_map + .clip_point(DisplayPoint::new(row, end_column), Bias::Right) + .to_point(display_map); + if reversed { + Some(end..start) + } else { + Some(start..end) + } + } else { + None + } + }) + .collect::>(); + + self.change_selections(None, cx, |s| { + s.select_ranges(selection_ranges); + }); + cx.notify(); + } + + pub fn has_pending_nonempty_selection(&self) -> bool { + let pending_nonempty_selection = match self.selections.pending_anchor() { + Some(Selection { start, end, .. }) => start != end, + None => false, + }; + pending_nonempty_selection || self.columnar_selection_tail.is_some() + } + + pub fn has_pending_selection(&self) -> bool { + self.selections.pending_anchor().is_some() || self.columnar_selection_tail.is_some() + } + + pub fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { + self.clear_expanded_diff_hunks(cx); + if self.dismiss_menus_and_popups(true, cx) { + return; + } + + if self.mode == EditorMode::Full { + if self.change_selections(Some(Autoscroll::fit()), cx, |s| s.try_cancel()) { + return; + } + } + + cx.propagate(); + } + + pub fn dismiss_menus_and_popups( + &mut self, + should_report_inline_completion_event: bool, + cx: &mut ViewContext, + ) -> bool { + if self.take_rename(false, cx).is_some() { + return true; + } + + if hide_hover(self, cx) { + return true; + } + + if self.hide_context_menu(cx).is_some() { + return true; + } + + if self.discard_inline_completion(should_report_inline_completion_event, cx) { + return true; + } + + if self.snippet_stack.pop().is_some() { + return true; + } + + if self.mode == EditorMode::Full { + if self.active_diagnostics.is_some() { + self.dismiss_diagnostics(cx); + return true; + } + } + + false + } + + pub fn handle_input(&mut self, text: &str, cx: &mut ViewContext) { + let text: Arc = text.into(); + + if self.read_only(cx) { + return; + } + + let selections = self.selections.all_adjusted(cx); + let mut brace_inserted = false; + let mut edits = Vec::new(); + let mut new_selections = Vec::with_capacity(selections.len()); + let mut new_autoclose_regions = Vec::new(); + let snapshot = self.buffer.read(cx).read(cx); + + for (selection, autoclose_region) in + self.selections_with_autoclose_regions(selections, &snapshot) + { + if let Some(scope) = snapshot.language_scope_at(selection.head()) { + // Determine if the inserted text matches the opening or closing + // bracket of any of this language's bracket pairs. + let mut bracket_pair = None; + let mut is_bracket_pair_start = false; + let mut is_bracket_pair_end = false; + if !text.is_empty() { + // `text` can be empty when a user is using IME (e.g. Chinese Wubi Simplified) + // and they are removing the character that triggered IME popup. + for (pair, enabled) in scope.brackets() { + if !pair.close { + continue; + } + + if enabled && pair.start.ends_with(text.as_ref()) { + bracket_pair = Some(pair.clone()); + is_bracket_pair_start = true; + break; + } + if pair.end.as_str() == text.as_ref() { + bracket_pair = Some(pair.clone()); + is_bracket_pair_end = true; + break; + } + } + } + + if let Some(bracket_pair) = bracket_pair { + if selection.is_empty() { + if is_bracket_pair_start { + let prefix_len = bracket_pair.start.len() - text.len(); + + // If the inserted text is a suffix of an opening bracket and the + // selection is preceded by the rest of the opening bracket, then + // insert the closing bracket. + let following_text_allows_autoclose = snapshot + .chars_at(selection.start) + .next() + .map_or(true, |c| scope.should_autoclose_before(c)); + let preceding_text_matches_prefix = prefix_len == 0 + || (selection.start.column >= (prefix_len as u32) + && snapshot.contains_str_at( + Point::new( + selection.start.row, + selection.start.column - (prefix_len as u32), + ), + &bracket_pair.start[..prefix_len], + )); + let autoclose = self.use_autoclose + && snapshot.settings_at(selection.start, cx).use_autoclose; + if autoclose + && following_text_allows_autoclose + && preceding_text_matches_prefix + { + let anchor = snapshot.anchor_before(selection.end); + new_selections.push((selection.map(|_| anchor), text.len())); + new_autoclose_regions.push(( + anchor, + text.len(), + selection.id, + bracket_pair.clone(), + )); + edits.push(( + selection.range(), + format!("{}{}", text, bracket_pair.end).into(), + )); + brace_inserted = true; + continue; + } + } + + if let Some(region) = autoclose_region { + // If the selection is followed by an auto-inserted closing bracket, + // then don't insert that closing bracket again; just move the selection + // past the closing bracket. + let should_skip = selection.end == region.range.end.to_point(&snapshot) + && text.as_ref() == region.pair.end.as_str(); + if should_skip { + let anchor = snapshot.anchor_after(selection.end); + new_selections + .push((selection.map(|_| anchor), region.pair.end.len())); + continue; + } + } + + let always_treat_brackets_as_autoclosed = snapshot + .settings_at(selection.start, cx) + .always_treat_brackets_as_autoclosed; + if always_treat_brackets_as_autoclosed + && is_bracket_pair_end + && snapshot.contains_str_at(selection.end, text.as_ref()) + { + // Otherwise, when `always_treat_brackets_as_autoclosed` is set to `true + // and the inserted text is a closing bracket and the selection is followed + // by the closing bracket then move the selection past the closing bracket. + let anchor = snapshot.anchor_after(selection.end); + new_selections.push((selection.map(|_| anchor), text.len())); + continue; + } + } + // If an opening bracket is 1 character long and is typed while + // text is selected, then surround that text with the bracket pair. + else if is_bracket_pair_start && bracket_pair.start.chars().count() == 1 { + edits.push((selection.start..selection.start, text.clone())); + edits.push(( + selection.end..selection.end, + bracket_pair.end.as_str().into(), + )); + brace_inserted = true; + new_selections.push(( + Selection { + id: selection.id, + start: snapshot.anchor_after(selection.start), + end: snapshot.anchor_before(selection.end), + reversed: selection.reversed, + goal: selection.goal, + }, + 0, + )); + continue; + } + } + } + + if self.auto_replace_emoji_shortcode + && selection.is_empty() + && text.as_ref().ends_with(':') + { + if let Some(possible_emoji_short_code) = + Self::find_possible_emoji_shortcode_at_position(&snapshot, selection.start) + { + if !possible_emoji_short_code.is_empty() { + if let Some(emoji) = emojis::get_by_shortcode(&possible_emoji_short_code) { + let emoji_shortcode_start = Point::new( + selection.start.row, + selection.start.column - possible_emoji_short_code.len() as u32 - 1, + ); + + // Remove shortcode from buffer + edits.push(( + emoji_shortcode_start..selection.start, + "".to_string().into(), + )); + new_selections.push(( + Selection { + id: selection.id, + start: snapshot.anchor_after(emoji_shortcode_start), + end: snapshot.anchor_before(selection.start), + reversed: selection.reversed, + goal: selection.goal, + }, + 0, + )); + + // Insert emoji + let selection_start_anchor = snapshot.anchor_after(selection.start); + new_selections.push((selection.map(|_| selection_start_anchor), 0)); + edits.push((selection.start..selection.end, emoji.to_string().into())); + + continue; + } + } + } + } + + // If not handling any auto-close operation, then just replace the selected + // text with the given input and move the selection to the end of the + // newly inserted text. + let anchor = snapshot.anchor_after(selection.end); + new_selections.push((selection.map(|_| anchor), 0)); + edits.push((selection.start..selection.end, text.clone())); + } + + drop(snapshot); + self.transact(cx, |this, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, this.autoindent_mode.clone(), cx); + }); + + let new_anchor_selections = new_selections.iter().map(|e| &e.0); + let new_selection_deltas = new_selections.iter().map(|e| e.1); + let snapshot = this.buffer.read(cx).read(cx); + let new_selections = resolve_multiple::(new_anchor_selections, &snapshot) + .zip(new_selection_deltas) + .map(|(selection, delta)| Selection { + id: selection.id, + start: selection.start + delta, + end: selection.end + delta, + reversed: selection.reversed, + goal: SelectionGoal::None, + }) + .collect::>(); + + let mut i = 0; + for (position, delta, selection_id, pair) in new_autoclose_regions { + let position = position.to_offset(&snapshot) + delta; + let start = snapshot.anchor_before(position); + let end = snapshot.anchor_after(position); + while let Some(existing_state) = this.autoclose_regions.get(i) { + match existing_state.range.start.cmp(&start, &snapshot) { + Ordering::Less => i += 1, + Ordering::Greater => break, + Ordering::Equal => match end.cmp(&existing_state.range.end, &snapshot) { + Ordering::Less => i += 1, + Ordering::Equal => break, + Ordering::Greater => break, + }, + } + } + this.autoclose_regions.insert( + i, + AutocloseRegion { + selection_id, + range: start..end, + pair, + }, + ); + } + + drop(snapshot); + let had_active_inline_completion = this.has_active_inline_completion(cx); + this.change_selections_inner(Some(Autoscroll::fit()), false, cx, |s| { + s.select(new_selections) + }); + + if brace_inserted { + // If we inserted a brace while composing text (i.e. typing `"` on a + // Brazilian keyboard), exit the composing state because most likely + // the user wanted to surround the selection. + this.unmark_text(cx); + } else if EditorSettings::get_global(cx).use_on_type_format { + if let Some(on_type_format_task) = + this.trigger_on_type_formatting(text.to_string(), cx) + { + on_type_format_task.detach_and_log_err(cx); + } + } + + let trigger_in_words = !had_active_inline_completion; + this.trigger_completion_on_input(&text, trigger_in_words, cx); + this.refresh_inline_completion(true, cx); + }); + } + + fn find_possible_emoji_shortcode_at_position( + snapshot: &MultiBufferSnapshot, + position: Point, + ) -> Option { + let mut chars = Vec::new(); + let mut found_colon = false; + for char in snapshot.reversed_chars_at(position).take(100) { + // Found a possible emoji shortcode in the middle of the buffer + if found_colon { + if char.is_whitespace() { + chars.reverse(); + return Some(chars.iter().collect()); + } + // If the previous character is not a whitespace, we are in the middle of a word + // and we only want to complete the shortcode if the word is made up of other emojis + let mut containing_word = String::new(); + for ch in snapshot + .reversed_chars_at(position) + .skip(chars.len() + 1) + .take(100) + { + if ch.is_whitespace() { + break; + } + containing_word.push(ch); + } + let containing_word = containing_word.chars().rev().collect::(); + if util::word_consists_of_emojis(containing_word.as_str()) { + chars.reverse(); + return Some(chars.iter().collect()); + } + } + + if char.is_whitespace() || !char.is_ascii() { + return None; + } + if char == ':' { + found_colon = true; + } else { + chars.push(char); + } + } + // Found a possible emoji shortcode at the beginning of the buffer + chars.reverse(); + Some(chars.iter().collect()) + } + + pub fn newline(&mut self, _: &Newline, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + let (edits, selection_fixup_info): (Vec<_>, Vec<_>) = { + let selections = this.selections.all::(cx); + let multi_buffer = this.buffer.read(cx); + let buffer = multi_buffer.snapshot(cx); + selections + .iter() + .map(|selection| { + let start_point = selection.start.to_point(&buffer); + let mut indent = + buffer.indent_size_for_line(MultiBufferRow(start_point.row)); + indent.len = cmp::min(indent.len, start_point.column); + let start = selection.start; + let end = selection.end; + let selection_is_empty = start == end; + let language_scope = buffer.language_scope_at(start); + let (comment_delimiter, insert_extra_newline) = if let Some(language) = + &language_scope + { + let leading_whitespace_len = buffer + .reversed_chars_at(start) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + + let trailing_whitespace_len = buffer + .chars_at(end) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + + let insert_extra_newline = + language.brackets().any(|(pair, enabled)| { + let pair_start = pair.start.trim_end(); + let pair_end = pair.end.trim_start(); + + enabled + && pair.newline + && buffer.contains_str_at( + end + trailing_whitespace_len, + pair_end, + ) + && buffer.contains_str_at( + (start - leading_whitespace_len) + .saturating_sub(pair_start.len()), + pair_start, + ) + }); + + // Comment extension on newline is allowed only for cursor selections + let comment_delimiter = maybe!({ + if !selection_is_empty { + return None; + } + + if !multi_buffer.settings_at(0, cx).extend_comment_on_newline { + return None; + } + + let delimiters = language.line_comment_prefixes(); + let max_len_of_delimiter = + delimiters.iter().map(|delimiter| delimiter.len()).max()?; + let (snapshot, range) = + buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; + + let mut index_of_first_non_whitespace = 0; + let comment_candidate = snapshot + .chars_for_range(range) + .skip_while(|c| { + let should_skip = c.is_whitespace(); + if should_skip { + index_of_first_non_whitespace += 1; + } + should_skip + }) + .take(max_len_of_delimiter) + .collect::(); + let comment_prefix = delimiters.iter().find(|comment_prefix| { + comment_candidate.starts_with(comment_prefix.as_ref()) + })?; + let cursor_is_placed_after_comment_marker = + index_of_first_non_whitespace + comment_prefix.len() + <= start_point.column as usize; + if cursor_is_placed_after_comment_marker { + Some(comment_prefix.clone()) + } else { + None + } + }); + (comment_delimiter, insert_extra_newline) + } else { + (None, false) + }; + + let capacity_for_delimiter = comment_delimiter + .as_deref() + .map(str::len) + .unwrap_or_default(); + let mut new_text = + String::with_capacity(1 + capacity_for_delimiter + indent.len as usize); + new_text.push_str("\n"); + new_text.extend(indent.chars()); + if let Some(delimiter) = &comment_delimiter { + new_text.push_str(&delimiter); + } + if insert_extra_newline { + new_text = new_text.repeat(2); + } + + let anchor = buffer.anchor_after(end); + let new_selection = selection.map(|_| anchor); + ( + (start..end, new_text), + (insert_extra_newline, new_selection), + ) + }) + .unzip() + }; + + this.edit_with_autoindent(edits, cx); + let buffer = this.buffer.read(cx).snapshot(cx); + let new_selections = selection_fixup_info + .into_iter() + .map(|(extra_newline_inserted, new_selection)| { + let mut cursor = new_selection.end.to_point(&buffer); + if extra_newline_inserted { + cursor.row -= 1; + cursor.column = buffer.line_len(MultiBufferRow(cursor.row)); + } + new_selection.map(|_| cursor) + }) + .collect(); + + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(new_selections)); + this.refresh_inline_completion(true, cx); + }); + } + + pub fn newline_above(&mut self, _: &NewlineAbove, cx: &mut ViewContext) { + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + + let mut edits = Vec::new(); + let mut rows = Vec::new(); + + for (rows_inserted, selection) in self.selections.all_adjusted(cx).into_iter().enumerate() { + let cursor = selection.head(); + let row = cursor.row; + + let start_of_line = snapshot.clip_point(Point::new(row, 0), Bias::Left); + + let newline = "\n".to_string(); + edits.push((start_of_line..start_of_line, newline)); + + rows.push(row + rows_inserted as u32); + } + + self.transact(cx, |editor, cx| { + editor.edit(edits, cx); + + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + let mut index = 0; + s.move_cursors_with(|map, _, _| { + let row = rows[index]; + index += 1; + + let point = Point::new(row, 0); + let boundary = map.next_line_boundary(point).1; + let clipped = map.clip_point(boundary, Bias::Left); + + (clipped, SelectionGoal::None) + }); + }); + + let mut indent_edits = Vec::new(); + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); + for row in rows { + let indents = multibuffer_snapshot.suggested_indents(row..row + 1, cx); + for (row, indent) in indents { + if indent.len == 0 { + continue; + } + + let text = match indent.kind { + IndentKind::Space => " ".repeat(indent.len as usize), + IndentKind::Tab => "\t".repeat(indent.len as usize), + }; + let point = Point::new(row.0, 0); + indent_edits.push((point..point, text)); + } + } + editor.edit(indent_edits, cx); + }); + } + + pub fn newline_below(&mut self, _: &NewlineBelow, cx: &mut ViewContext) { + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + + let mut edits = Vec::new(); + let mut rows = Vec::new(); + let mut rows_inserted = 0; + + for selection in self.selections.all_adjusted(cx) { + let cursor = selection.head(); + let row = cursor.row; + + let point = Point::new(row + 1, 0); + let start_of_line = snapshot.clip_point(point, Bias::Left); + + let newline = "\n".to_string(); + edits.push((start_of_line..start_of_line, newline)); + + rows_inserted += 1; + rows.push(row + rows_inserted); + } + + self.transact(cx, |editor, cx| { + editor.edit(edits, cx); + + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + let mut index = 0; + s.move_cursors_with(|map, _, _| { + let row = rows[index]; + index += 1; + + let point = Point::new(row, 0); + let boundary = map.next_line_boundary(point).1; + let clipped = map.clip_point(boundary, Bias::Left); + + (clipped, SelectionGoal::None) + }); + }); + + let mut indent_edits = Vec::new(); + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); + for row in rows { + let indents = multibuffer_snapshot.suggested_indents(row..row + 1, cx); + for (row, indent) in indents { + if indent.len == 0 { + continue; + } + + let text = match indent.kind { + IndentKind::Space => " ".repeat(indent.len as usize), + IndentKind::Tab => "\t".repeat(indent.len as usize), + }; + let point = Point::new(row.0, 0); + indent_edits.push((point..point, text)); + } + } + editor.edit(indent_edits, cx); + }); + } + + pub fn insert(&mut self, text: &str, cx: &mut ViewContext) { + let autoindent = text.is_empty().not().then(|| AutoindentMode::Block { + original_indent_columns: Vec::new(), + }); + self.insert_with_autoindent_mode(text, autoindent, cx); + } + + fn insert_with_autoindent_mode( + &mut self, + text: &str, + autoindent_mode: Option, + cx: &mut ViewContext, + ) { + if self.read_only(cx) { + return; + } + + let text: Arc = text.into(); + self.transact(cx, |this, cx| { + let old_selections = this.selections.all_adjusted(cx); + let selection_anchors = this.buffer.update(cx, |buffer, cx| { + let anchors = { + let snapshot = buffer.read(cx); + old_selections + .iter() + .map(|s| { + let anchor = snapshot.anchor_after(s.head()); + s.map(|_| anchor) + }) + .collect::>() + }; + buffer.edit( + old_selections + .iter() + .map(|s| (s.start..s.end, text.clone())), + autoindent_mode, + cx, + ); + anchors + }); + + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_anchors(selection_anchors); + }) + }); + } + + fn trigger_completion_on_input( + &mut self, + text: &str, + trigger_in_words: bool, + cx: &mut ViewContext, + ) { + if !EditorSettings::get_global(cx).show_completions_on_input { + return; + } + + let selection = self.selections.newest_anchor(); + if self + .buffer + .read(cx) + .is_completion_trigger(selection.head(), text, trigger_in_words, cx) + { + self.show_completions(&ShowCompletions, cx); + } else { + self.hide_context_menu(cx); + } + } + + /// If any empty selections is touching the start of its innermost containing autoclose + /// region, expand it to select the brackets. + fn select_autoclose_pair(&mut self, cx: &mut ViewContext) { + let selections = self.selections.all::(cx); + let buffer = self.buffer.read(cx).read(cx); + let new_selections = self + .selections_with_autoclose_regions(selections, &buffer) + .map(|(mut selection, region)| { + if !selection.is_empty() { + return selection; + } + + if let Some(region) = region { + let mut range = region.range.to_offset(&buffer); + if selection.start == range.start && range.start >= region.pair.start.len() { + range.start -= region.pair.start.len(); + if buffer.contains_str_at(range.start, ®ion.pair.start) + && buffer.contains_str_at(range.end, ®ion.pair.end) + { + range.end += region.pair.end.len(); + selection.start = range.start; + selection.end = range.end; + + return selection; + } + } + } + + let always_treat_brackets_as_autoclosed = buffer + .settings_at(selection.start, cx) + .always_treat_brackets_as_autoclosed; + + if !always_treat_brackets_as_autoclosed { + return selection; + } + + if let Some(scope) = buffer.language_scope_at(selection.start) { + for (pair, enabled) in scope.brackets() { + if !enabled || !pair.close { + continue; + } + + if buffer.contains_str_at(selection.start, &pair.end) { + let pair_start_len = pair.start.len(); + if buffer.contains_str_at(selection.start - pair_start_len, &pair.start) + { + selection.start -= pair_start_len; + selection.end += pair.end.len(); + + return selection; + } + } + } + } + + selection + }) + .collect(); + + drop(buffer); + self.change_selections(None, cx, |selections| selections.select(new_selections)); + } + + /// Iterate the given selections, and for each one, find the smallest surrounding + /// autoclose region. This uses the ordering of the selections and the autoclose + /// regions to avoid repeated comparisons. + fn selections_with_autoclose_regions<'a, D: ToOffset + Clone>( + &'a self, + selections: impl IntoIterator>, + buffer: &'a MultiBufferSnapshot, + ) -> impl Iterator, Option<&'a AutocloseRegion>)> { + let mut i = 0; + let mut regions = self.autoclose_regions.as_slice(); + selections.into_iter().map(move |selection| { + let range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer); + + let mut enclosing = None; + while let Some(pair_state) = regions.get(i) { + if pair_state.range.end.to_offset(buffer) < range.start { + regions = ®ions[i + 1..]; + i = 0; + } else if pair_state.range.start.to_offset(buffer) > range.end { + break; + } else { + if pair_state.selection_id == selection.id { + enclosing = Some(pair_state); + } + i += 1; + } + } + + (selection.clone(), enclosing) + }) + } + + /// Remove any autoclose regions that no longer contain their selection. + fn invalidate_autoclose_regions( + &mut self, + mut selections: &[Selection], + buffer: &MultiBufferSnapshot, + ) { + self.autoclose_regions.retain(|state| { + let mut i = 0; + while let Some(selection) = selections.get(i) { + if selection.end.cmp(&state.range.start, buffer).is_lt() { + selections = &selections[1..]; + continue; + } + if selection.start.cmp(&state.range.end, buffer).is_gt() { + break; + } + if selection.id == state.selection_id { + return true; + } else { + i += 1; + } + } + false + }); + } + + fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option { + let offset = position.to_offset(buffer); + let (word_range, kind) = buffer.surrounding_word(offset); + if offset > word_range.start && kind == Some(CharKind::Word) { + Some( + buffer + .text_for_range(word_range.start..offset) + .collect::(), + ) + } else { + None + } + } + + pub fn toggle_inlay_hints(&mut self, _: &ToggleInlayHints, cx: &mut ViewContext) { + self.refresh_inlay_hints( + InlayHintRefreshReason::Toggle(!self.inlay_hint_cache.enabled), + cx, + ); + } + + pub fn inlay_hints_enabled(&self) -> bool { + self.inlay_hint_cache.enabled + } + + fn refresh_inlay_hints(&mut self, reason: InlayHintRefreshReason, cx: &mut ViewContext) { + if self.project.is_none() || self.mode != EditorMode::Full { + return; + } + + let reason_description = reason.description(); + let ignore_debounce = matches!( + reason, + InlayHintRefreshReason::SettingsChange(_) + | InlayHintRefreshReason::Toggle(_) + | InlayHintRefreshReason::ExcerptsRemoved(_) + ); + let (invalidate_cache, required_languages) = match reason { + InlayHintRefreshReason::Toggle(enabled) => { + self.inlay_hint_cache.enabled = enabled; + if enabled { + (InvalidationStrategy::RefreshRequested, None) + } else { + self.inlay_hint_cache.clear(); + self.splice_inlays( + self.visible_inlay_hints(cx) + .iter() + .map(|inlay| inlay.id) + .collect(), + Vec::new(), + cx, + ); + return; + } + } + InlayHintRefreshReason::SettingsChange(new_settings) => { + match self.inlay_hint_cache.update_settings( + &self.buffer, + new_settings, + self.visible_inlay_hints(cx), + cx, + ) { + ControlFlow::Break(Some(InlaySplice { + to_remove, + to_insert, + })) => { + self.splice_inlays(to_remove, to_insert, cx); + return; + } + ControlFlow::Break(None) => return, + ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None), + } + } + InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { + if let Some(InlaySplice { + to_remove, + to_insert, + }) = self.inlay_hint_cache.remove_excerpts(excerpts_removed) + { + self.splice_inlays(to_remove, to_insert, cx); + } + return; + } + InlayHintRefreshReason::NewLinesShown => (InvalidationStrategy::None, None), + InlayHintRefreshReason::BufferEdited(buffer_languages) => { + (InvalidationStrategy::BufferEdited, Some(buffer_languages)) + } + InlayHintRefreshReason::RefreshRequested => { + (InvalidationStrategy::RefreshRequested, None) + } + }; + + if let Some(InlaySplice { + to_remove, + to_insert, + }) = self.inlay_hint_cache.spawn_hint_refresh( + reason_description, + self.excerpts_for_inlay_hints_query(required_languages.as_ref(), cx), + invalidate_cache, + ignore_debounce, + cx, + ) { + self.splice_inlays(to_remove, to_insert, cx); + } + } + + fn visible_inlay_hints(&self, cx: &ViewContext<'_, Editor>) -> Vec { + self.display_map + .read(cx) + .current_inlays() + .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) + .cloned() + .collect() + } + + pub fn excerpts_for_inlay_hints_query( + &self, + restrict_to_languages: Option<&HashSet>>, + cx: &mut ViewContext, + ) -> HashMap, clock::Global, Range)> { + let Some(project) = self.project.as_ref() else { + return HashMap::default(); + }; + let project = project.read(cx); + let multi_buffer = self.buffer().read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let multi_buffer_visible_start = self + .scroll_manager + .anchor() + .anchor + .to_point(&multi_buffer_snapshot); + let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( + multi_buffer_visible_start + + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), + Bias::Left, + ); + let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; + multi_buffer + .range_to_buffer_ranges(multi_buffer_visible_range, cx) + .into_iter() + .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) + .filter_map(|(buffer_handle, excerpt_visible_range, excerpt_id)| { + let buffer = buffer_handle.read(cx); + let buffer_file = project::File::from_dyn(buffer.file())?; + let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?; + let worktree_entry = buffer_worktree + .read(cx) + .entry_for_id(buffer_file.project_entry_id(cx)?)?; + if worktree_entry.is_ignored { + return None; + } + + let language = buffer.language()?; + if let Some(restrict_to_languages) = restrict_to_languages { + if !restrict_to_languages.contains(language) { + return None; + } + } + Some(( + excerpt_id, + ( + buffer_handle, + buffer.version().clone(), + excerpt_visible_range, + ), + )) + }) + .collect() + } + + pub fn text_layout_details(&self, cx: &WindowContext) -> TextLayoutDetails { + TextLayoutDetails { + text_system: cx.text_system().clone(), + editor_style: self.style.clone().unwrap(), + rem_size: cx.rem_size(), + scroll_anchor: self.scroll_manager.anchor(), + visible_rows: self.visible_line_count(), + vertical_scroll_margin: self.scroll_manager.vertical_scroll_margin, + } + } + + fn splice_inlays( + &self, + to_remove: Vec, + to_insert: Vec, + cx: &mut ViewContext, + ) { + self.display_map.update(cx, |display_map, cx| { + display_map.splice_inlays(to_remove, to_insert, cx); + }); + cx.notify(); + } + + fn trigger_on_type_formatting( + &self, + input: String, + cx: &mut ViewContext, + ) -> Option>> { + if input.len() != 1 { + return None; + } + + let project = self.project.as_ref()?; + let position = self.selections.newest_anchor().head(); + let (buffer, buffer_position) = self + .buffer + .read(cx) + .text_anchor_for_position(position, cx)?; + + // OnTypeFormatting returns a list of edits, no need to pass them between Zed instances, + // hence we do LSP request & edit on host side only — add formats to host's history. + let push_to_lsp_host_history = true; + // If this is not the host, append its history with new edits. + let push_to_client_history = project.read(cx).is_remote(); + + let on_type_formatting = project.update(cx, |project, cx| { + project.on_type_format( + buffer.clone(), + buffer_position, + input, + push_to_lsp_host_history, + cx, + ) + }); + Some(cx.spawn(|editor, mut cx| async move { + if let Some(transaction) = on_type_formatting.await? { + if push_to_client_history { + buffer + .update(&mut cx, |buffer, _| { + buffer.push_transaction(transaction, Instant::now()); + }) + .ok(); + } + editor.update(&mut cx, |editor, cx| { + editor.refresh_document_highlights(cx); + })?; + } + Ok(()) + })) + } + + fn show_completions(&mut self, _: &ShowCompletions, cx: &mut ViewContext) { + if self.pending_rename.is_some() { + return; + } + + let Some(provider) = self.completion_provider.as_ref() else { + return; + }; + + let position = self.selections.newest_anchor().head(); + let (buffer, buffer_position) = + if let Some(output) = self.buffer.read(cx).text_anchor_for_position(position, cx) { + output + } else { + return; + }; + + let query = Self::completion_query(&self.buffer.read(cx).read(cx), position); + let completions = provider.completions(&buffer, buffer_position, cx); + + let id = post_inc(&mut self.next_completion_id); + let task = cx.spawn(|this, mut cx| { + async move { + let completions = completions.await.log_err(); + let menu = if let Some(completions) = completions { + let mut menu = CompletionsMenu { + id, + initial_position: position, + match_candidates: completions + .iter() + .enumerate() + .map(|(id, completion)| { + StringMatchCandidate::new( + id, + completion.label.text[completion.label.filter_range.clone()] + .into(), + ) + }) + .collect(), + buffer: buffer.clone(), + completions: Arc::new(RwLock::new(completions.into())), + matches: Vec::new().into(), + selected_item: 0, + scroll_handle: UniformListScrollHandle::new(), + selected_completion_documentation_resolve_debounce: Arc::new(Mutex::new( + DebouncedDelay::new(), + )), + }; + menu.filter(query.as_deref(), cx.background_executor().clone()) + .await; + + if menu.matches.is_empty() { + None + } else { + this.update(&mut cx, |editor, cx| { + let completions = menu.completions.clone(); + let matches = menu.matches.clone(); + + let delay_ms = EditorSettings::get_global(cx) + .completion_documentation_secondary_query_debounce; + let delay = Duration::from_millis(delay_ms); + + editor + .completion_documentation_pre_resolve_debounce + .fire_new(delay, cx, |editor, cx| { + CompletionsMenu::pre_resolve_completion_documentation( + buffer, + completions, + matches, + editor, + cx, + ) + }); + }) + .ok(); + Some(menu) + } + } else { + None + }; + + this.update(&mut cx, |this, cx| { + this.completion_tasks.retain(|(task_id, _)| *task_id >= id); + + let mut context_menu = this.context_menu.write(); + match context_menu.as_ref() { + None => {} + + Some(ContextMenu::Completions(prev_menu)) => { + if prev_menu.id > id { + return; + } + } + + _ => return, + } + + if this.focus_handle.is_focused(cx) && menu.is_some() { + let menu = menu.unwrap(); + *context_menu = Some(ContextMenu::Completions(menu)); + drop(context_menu); + this.discard_inline_completion(false, cx); + cx.notify(); + } else if this.completion_tasks.len() <= 1 { + // If there are no more completion tasks and the last menu was + // empty, we should hide it. If it was already hidden, we should + // also show the copilot completion when available. + drop(context_menu); + if this.hide_context_menu(cx).is_none() { + this.update_visible_inline_completion(cx); + } + } + })?; + + Ok::<_, anyhow::Error>(()) + } + .log_err() + }); + + self.completion_tasks.push((id, task)); + } + + pub fn confirm_completion( + &mut self, + action: &ConfirmCompletion, + cx: &mut ViewContext, + ) -> Option>> { + use language::ToOffset as _; + + let completions_menu = if let ContextMenu::Completions(menu) = self.hide_context_menu(cx)? { + menu + } else { + return None; + }; + + let mat = completions_menu + .matches + .get(action.item_ix.unwrap_or(completions_menu.selected_item))?; + let buffer_handle = completions_menu.buffer; + let completions = completions_menu.completions.read(); + let completion = completions.get(mat.candidate_id)?; + cx.stop_propagation(); + + let snippet; + let text; + if completion.is_snippet() { + snippet = Some(Snippet::parse(&completion.new_text).log_err()?); + text = snippet.as_ref().unwrap().text.clone(); + } else { + snippet = None; + text = completion.new_text.clone(); + }; + let selections = self.selections.all::(cx); + let buffer = buffer_handle.read(cx); + let old_range = completion.old_range.to_offset(buffer); + let old_text = buffer.text_for_range(old_range.clone()).collect::(); + + let newest_selection = self.selections.newest_anchor(); + if newest_selection.start.buffer_id != Some(buffer_handle.read(cx).remote_id()) { + return None; + } + + let lookbehind = newest_selection + .start + .text_anchor + .to_offset(buffer) + .saturating_sub(old_range.start); + let lookahead = old_range + .end + .saturating_sub(newest_selection.end.text_anchor.to_offset(buffer)); + let mut common_prefix_len = old_text + .bytes() + .zip(text.bytes()) + .take_while(|(a, b)| a == b) + .count(); + + let snapshot = self.buffer.read(cx).snapshot(cx); + let mut range_to_replace: Option> = None; + let mut ranges = Vec::new(); + for selection in &selections { + if snapshot.contains_str_at(selection.start.saturating_sub(lookbehind), &old_text) { + let start = selection.start.saturating_sub(lookbehind); + let end = selection.end + lookahead; + if selection.id == newest_selection.id { + range_to_replace = Some( + ((start + common_prefix_len) as isize - selection.start as isize) + ..(end as isize - selection.start as isize), + ); + } + ranges.push(start + common_prefix_len..end); + } else { + common_prefix_len = 0; + ranges.clear(); + ranges.extend(selections.iter().map(|s| { + if s.id == newest_selection.id { + range_to_replace = Some( + old_range.start.to_offset_utf16(&snapshot).0 as isize + - selection.start as isize + ..old_range.end.to_offset_utf16(&snapshot).0 as isize + - selection.start as isize, + ); + old_range.clone() + } else { + s.start..s.end + } + })); + break; + } + } + let text = &text[common_prefix_len..]; + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: range_to_replace, + text: text.into(), + }); + + self.transact(cx, |this, cx| { + if let Some(mut snippet) = snippet { + snippet.text = text.to_string(); + for tabstop in snippet.tabstops.iter_mut().flatten() { + tabstop.start -= common_prefix_len as isize; + tabstop.end -= common_prefix_len as isize; + } + + this.insert_snippet(&ranges, snippet, cx).log_err(); + } else { + this.buffer.update(cx, |buffer, cx| { + buffer.edit( + ranges.iter().map(|range| (range.clone(), text)), + this.autoindent_mode.clone(), + cx, + ); + }); + } + + this.refresh_inline_completion(true, cx); + }); + + let provider = self.completion_provider.as_ref()?; + let apply_edits = provider.apply_additional_edits_for_completion( + buffer_handle, + completion.clone(), + true, + cx, + ); + Some(cx.foreground_executor().spawn(async move { + apply_edits.await?; + Ok(()) + })) + } + + pub fn toggle_code_actions(&mut self, action: &ToggleCodeActions, cx: &mut ViewContext) { + let mut context_menu = self.context_menu.write(); + if let Some(ContextMenu::CodeActions(code_actions)) = context_menu.as_ref() { + if code_actions.deployed_from_indicator == action.deployed_from_indicator { + // Toggle if we're selecting the same one + *context_menu = None; + cx.notify(); + return; + } else { + // Otherwise, clear it and start a new one + *context_menu = None; + cx.notify(); + } + } + drop(context_menu); + let snapshot = self.snapshot(cx); + let deployed_from_indicator = action.deployed_from_indicator; + let mut task = self.code_actions_task.take(); + let action = action.clone(); + cx.spawn(|this, mut cx| async move { + while let Some(prev_task) = task { + prev_task.await; + task = this.update(&mut cx, |this, _| this.code_actions_task.take())?; + } + + let spawned_test_task = this.update(&mut cx, |this, cx| { + if this.focus_handle.is_focused(cx) { + let multibuffer_point = action + .deployed_from_indicator + .map(|row| DisplayPoint::new(row, 0).to_point(&snapshot)) + .unwrap_or_else(|| this.selections.newest::(cx).head()); + let (buffer, buffer_row) = snapshot + .buffer_snapshot + .buffer_line_for_row(MultiBufferRow(multibuffer_point.row)) + .and_then(|(buffer_snapshot, range)| { + this.buffer + .read(cx) + .buffer(buffer_snapshot.remote_id()) + .map(|buffer| (buffer, range.start.row)) + })?; + let (_, code_actions) = this + .available_code_actions + .clone() + .and_then(|(location, code_actions)| { + let snapshot = location.buffer.read(cx).snapshot(); + let point_range = location.range.to_point(&snapshot); + let point_range = point_range.start.row..=point_range.end.row; + if point_range.contains(&buffer_row) { + Some((location, code_actions)) + } else { + None + } + }) + .unzip(); + let buffer_id = buffer.read(cx).remote_id(); + let tasks = this + .tasks + .get(&(buffer_id, buffer_row)) + .map(|t| Arc::new(t.to_owned())); + if tasks.is_none() && code_actions.is_none() { + return None; + } + + this.completion_tasks.clear(); + this.discard_inline_completion(false, cx); + let task_context = tasks.as_ref().zip(this.workspace.clone()).and_then( + |(tasks, (workspace, _))| { + let position = Point::new(buffer_row, tasks.1.column); + let range_start = buffer.read(cx).anchor_at(position, Bias::Right); + let location = Location { + buffer: buffer.clone(), + range: range_start..range_start, + }; + workspace + .update(cx, |workspace, cx| { + tasks::task_context_for_location(workspace, location, cx) + }) + .ok() + .flatten() + }, + ); + let tasks = tasks.zip(task_context).map(|(tasks, mut task_context)| { + // Fill in the environmental variables from the tree-sitter captures + let mut additional_task_variables = TaskVariables::default(); + for (capture_name, value) in tasks.1.extra_variables.clone() { + additional_task_variables.insert( + task::VariableName::Custom(capture_name.into()), + value.clone(), + ); + } + task_context + .task_variables + .extend(additional_task_variables); + + Arc::new(ResolvedTasks { + templates: tasks + .1 + .templates + .iter() + .filter_map(|(kind, template)| { + template + .resolve_task(&kind.to_id_base(), &task_context) + .map(|task| (kind.clone(), task)) + }) + .collect(), + position: snapshot + .buffer_snapshot + .anchor_before(Point::new(multibuffer_point.row, tasks.1.column)), + }) + }); + let spawn_straight_away = tasks + .as_ref() + .map_or(false, |tasks| tasks.templates.len() == 1) + && code_actions + .as_ref() + .map_or(true, |actions| actions.is_empty()); + *this.context_menu.write() = Some(ContextMenu::CodeActions(CodeActionsMenu { + buffer, + actions: CodeActionContents { + tasks, + actions: code_actions, + }, + selected_item: Default::default(), + scroll_handle: UniformListScrollHandle::default(), + deployed_from_indicator, + })); + if spawn_straight_away { + if let Some(task) = + this.confirm_code_action(&ConfirmCodeAction { item_ix: Some(0) }, cx) + { + cx.notify(); + return Some(task); + } + } + cx.notify(); + } + Some(Task::ready(Ok(()))) + })?; + if let Some(task) = spawned_test_task { + task.await?; + } + + Ok::<_, anyhow::Error>(()) + }) + .detach_and_log_err(cx); + } + + pub fn confirm_code_action( + &mut self, + action: &ConfirmCodeAction, + cx: &mut ViewContext, + ) -> Option>> { + let actions_menu = if let ContextMenu::CodeActions(menu) = self.hide_context_menu(cx)? { + menu + } else { + return None; + }; + let action_ix = action.item_ix.unwrap_or(actions_menu.selected_item); + let action = actions_menu.actions.get(action_ix)?; + let title = action.label(); + let buffer = actions_menu.buffer; + let workspace = self.workspace()?; + + match action { + CodeActionsItem::Task(task_source_kind, resolved_task) => { + workspace.update(cx, |workspace, cx| { + workspace::tasks::schedule_resolved_task( + workspace, + task_source_kind, + resolved_task, + false, + cx, + ); + + Some(Task::ready(Ok(()))) + }) + } + CodeActionsItem::CodeAction(action) => { + let apply_code_actions = workspace + .read(cx) + .project() + .clone() + .update(cx, |project, cx| { + project.apply_code_action(buffer, action, true, cx) + }); + let workspace = workspace.downgrade(); + Some(cx.spawn(|editor, cx| async move { + let project_transaction = apply_code_actions.await?; + Self::open_project_transaction( + &editor, + workspace, + project_transaction, + title, + cx, + ) + .await + })) + } + } + } + + pub async fn open_project_transaction( + this: &WeakView, + workspace: WeakView, + transaction: ProjectTransaction, + title: String, + mut cx: AsyncWindowContext, + ) -> Result<()> { + let replica_id = this.update(&mut cx, |this, cx| this.replica_id(cx))?; + + let mut entries = transaction.0.into_iter().collect::>(); + cx.update(|cx| { + entries.sort_unstable_by_key(|(buffer, _)| { + buffer.read(cx).file().map(|f| f.path().clone()) + }); + })?; + + // If the project transaction's edits are all contained within this editor, then + // avoid opening a new editor to display them. + + if let Some((buffer, transaction)) = entries.first() { + if entries.len() == 1 { + let excerpt = this.update(&mut cx, |editor, cx| { + editor + .buffer() + .read(cx) + .excerpt_containing(editor.selections.newest_anchor().head(), cx) + })?; + if let Some((_, excerpted_buffer, excerpt_range)) = excerpt { + if excerpted_buffer == *buffer { + let all_edits_within_excerpt = buffer.read_with(&cx, |buffer, _| { + let excerpt_range = excerpt_range.to_offset(buffer); + buffer + .edited_ranges_for_transaction::(transaction) + .all(|range| { + excerpt_range.start <= range.start + && excerpt_range.end >= range.end + }) + })?; + + if all_edits_within_excerpt { + return Ok(()); + } + } + } + } + } else { + return Ok(()); + } + + let mut ranges_to_highlight = Vec::new(); + let excerpt_buffer = cx.new_model(|cx| { + let mut multibuffer = + MultiBuffer::new(replica_id, Capability::ReadWrite).with_title(title); + for (buffer_handle, transaction) in &entries { + let buffer = buffer_handle.read(cx); + ranges_to_highlight.extend( + multibuffer.push_excerpts_with_context_lines( + buffer_handle.clone(), + buffer + .edited_ranges_for_transaction::(transaction) + .collect(), + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ), + ); + } + multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx); + multibuffer + })?; + + workspace.update(&mut cx, |workspace, cx| { + let project = workspace.project().clone(); + let editor = + cx.new_view(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), cx)); + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx); + editor.update(cx, |editor, cx| { + editor.highlight_background::( + &ranges_to_highlight, + |theme| theme.editor_highlighted_line_background, + cx, + ); + }); + })?; + + Ok(()) + } + + fn refresh_code_actions(&mut self, cx: &mut ViewContext) -> Option<()> { + let project = self.project.clone()?; + let buffer = self.buffer.read(cx); + let newest_selection = self.selections.newest_anchor().clone(); + let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?; + let (end_buffer, end) = buffer.text_anchor_for_position(newest_selection.end, cx)?; + if start_buffer != end_buffer { + return None; + } + + self.code_actions_task = Some(cx.spawn(|this, mut cx| async move { + cx.background_executor() + .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) + .await; + + let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| { + project.code_actions(&start_buffer, start..end, cx) + }) { + code_actions.await + } else { + Vec::new() + }; + + this.update(&mut cx, |this, cx| { + this.available_code_actions = if actions.is_empty() { + None + } else { + Some(( + Location { + buffer: start_buffer, + range: start..end, + }, + actions.into(), + )) + }; + cx.notify(); + }) + .log_err(); + })); + None + } + + fn start_inline_blame_timer(&mut self, cx: &mut ViewContext) { + if let Some(delay) = ProjectSettings::get_global(cx).git.inline_blame_delay() { + self.show_git_blame_inline = false; + + self.show_git_blame_inline_delay_task = Some(cx.spawn(|this, mut cx| async move { + cx.background_executor().timer(delay).await; + + this.update(&mut cx, |this, cx| { + this.show_git_blame_inline = true; + cx.notify(); + }) + .log_err(); + })); + } + } + + fn refresh_document_highlights(&mut self, cx: &mut ViewContext) -> Option<()> { + if self.pending_rename.is_some() { + return None; + } + + let project = self.project.clone()?; + let buffer = self.buffer.read(cx); + let newest_selection = self.selections.newest_anchor().clone(); + let cursor_position = newest_selection.head(); + let (cursor_buffer, cursor_buffer_position) = + buffer.text_anchor_for_position(cursor_position, cx)?; + let (tail_buffer, _) = buffer.text_anchor_for_position(newest_selection.tail(), cx)?; + if cursor_buffer != tail_buffer { + return None; + } + + self.document_highlights_task = Some(cx.spawn(|this, mut cx| async move { + cx.background_executor() + .timer(DOCUMENT_HIGHLIGHTS_DEBOUNCE_TIMEOUT) + .await; + + let highlights = if let Some(highlights) = project + .update(&mut cx, |project, cx| { + project.document_highlights(&cursor_buffer, cursor_buffer_position, cx) + }) + .log_err() + { + highlights.await.log_err() + } else { + None + }; + + if let Some(highlights) = highlights { + this.update(&mut cx, |this, cx| { + if this.pending_rename.is_some() { + return; + } + + let buffer_id = cursor_position.buffer_id; + let buffer = this.buffer.read(cx); + if !buffer + .text_anchor_for_position(cursor_position, cx) + .map_or(false, |(buffer, _)| buffer == cursor_buffer) + { + return; + } + + let cursor_buffer_snapshot = cursor_buffer.read(cx); + let mut write_ranges = Vec::new(); + let mut read_ranges = Vec::new(); + for highlight in highlights { + for (excerpt_id, excerpt_range) in + buffer.excerpts_for_buffer(&cursor_buffer, cx) + { + let start = highlight + .range + .start + .max(&excerpt_range.context.start, cursor_buffer_snapshot); + let end = highlight + .range + .end + .min(&excerpt_range.context.end, cursor_buffer_snapshot); + if start.cmp(&end, cursor_buffer_snapshot).is_ge() { + continue; + } + + let range = Anchor { + buffer_id, + excerpt_id: excerpt_id, + text_anchor: start, + }..Anchor { + buffer_id, + excerpt_id, + text_anchor: end, + }; + if highlight.kind == lsp::DocumentHighlightKind::WRITE { + write_ranges.push(range); + } else { + read_ranges.push(range); + } + } + } + + this.highlight_background::( + &read_ranges, + |theme| theme.editor_document_highlight_read_background, + cx, + ); + this.highlight_background::( + &write_ranges, + |theme| theme.editor_document_highlight_write_background, + cx, + ); + cx.notify(); + }) + .log_err(); + } + })); + None + } + + fn refresh_inline_completion( + &mut self, + debounce: bool, + cx: &mut ViewContext, + ) -> Option<()> { + let provider = self.inline_completion_provider()?; + let cursor = self.selections.newest_anchor().head(); + let (buffer, cursor_buffer_position) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + if !self.show_inline_completions + || !provider.is_enabled(&buffer, cursor_buffer_position, cx) + { + self.discard_inline_completion(false, cx); + return None; + } + + self.update_visible_inline_completion(cx); + provider.refresh(buffer, cursor_buffer_position, debounce, cx); + Some(()) + } + + fn cycle_inline_completion( + &mut self, + direction: Direction, + cx: &mut ViewContext, + ) -> Option<()> { + let provider = self.inline_completion_provider()?; + let cursor = self.selections.newest_anchor().head(); + let (buffer, cursor_buffer_position) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + if !self.show_inline_completions + || !provider.is_enabled(&buffer, cursor_buffer_position, cx) + { + return None; + } + + provider.cycle(buffer, cursor_buffer_position, direction, cx); + self.update_visible_inline_completion(cx); + + Some(()) + } + + pub fn show_inline_completion(&mut self, _: &ShowInlineCompletion, cx: &mut ViewContext) { + if !self.has_active_inline_completion(cx) { + self.refresh_inline_completion(false, cx); + return; + } + + self.update_visible_inline_completion(cx); + } + + pub fn display_cursor_names(&mut self, _: &DisplayCursorNames, cx: &mut ViewContext) { + self.show_cursor_names(cx); + } + + fn show_cursor_names(&mut self, cx: &mut ViewContext) { + self.show_cursor_names = true; + cx.notify(); + cx.spawn(|this, mut cx| async move { + cx.background_executor().timer(CURSORS_VISIBLE_FOR).await; + this.update(&mut cx, |this, cx| { + this.show_cursor_names = false; + cx.notify() + }) + .ok() + }) + .detach(); + } + + pub fn next_inline_completion(&mut self, _: &NextInlineCompletion, cx: &mut ViewContext) { + if self.has_active_inline_completion(cx) { + self.cycle_inline_completion(Direction::Next, cx); + } else { + let is_copilot_disabled = self.refresh_inline_completion(false, cx).is_none(); + if is_copilot_disabled { + cx.propagate(); + } + } + } + + pub fn previous_inline_completion( + &mut self, + _: &PreviousInlineCompletion, + cx: &mut ViewContext, + ) { + if self.has_active_inline_completion(cx) { + self.cycle_inline_completion(Direction::Prev, cx); + } else { + let is_copilot_disabled = self.refresh_inline_completion(false, cx).is_none(); + if is_copilot_disabled { + cx.propagate(); + } + } + } + + fn accept_inline_completion(&mut self, cx: &mut ViewContext) -> bool { + if let Some(completion) = self.take_active_inline_completion(cx) { + if let Some(provider) = self.inline_completion_provider() { + provider.accept(cx); + } + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: None, + text: completion.text.to_string().into(), + }); + self.insert_with_autoindent_mode(&completion.text.to_string(), None, cx); + self.refresh_inline_completion(true, cx); + cx.notify(); + true + } else { + false + } + } + + pub fn accept_partial_inline_completion( + &mut self, + _: &AcceptPartialInlineCompletion, + cx: &mut ViewContext, + ) { + if self.selections.count() == 1 && self.has_active_inline_completion(cx) { + if let Some(completion) = self.take_active_inline_completion(cx) { + let mut partial_completion = completion + .text + .chars() + .by_ref() + .take_while(|c| c.is_alphabetic()) + .collect::(); + if partial_completion.is_empty() { + partial_completion = completion + .text + .chars() + .by_ref() + .take_while(|c| c.is_whitespace() || !c.is_alphabetic()) + .collect::(); + } + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: None, + text: partial_completion.clone().into(), + }); + self.insert_with_autoindent_mode(&partial_completion, None, cx); + self.refresh_inline_completion(true, cx); + cx.notify(); + } + } + } + + fn discard_inline_completion( + &mut self, + should_report_inline_completion_event: bool, + cx: &mut ViewContext, + ) -> bool { + if let Some(provider) = self.inline_completion_provider() { + provider.discard(should_report_inline_completion_event, cx); + } + + self.take_active_inline_completion(cx).is_some() + } + + pub fn has_active_inline_completion(&self, cx: &AppContext) -> bool { + if let Some(completion) = self.active_inline_completion.as_ref() { + let buffer = self.buffer.read(cx).read(cx); + completion.position.is_valid(&buffer) + } else { + false + } + } + + fn take_active_inline_completion(&mut self, cx: &mut ViewContext) -> Option { + let completion = self.active_inline_completion.take()?; + self.display_map.update(cx, |map, cx| { + map.splice_inlays(vec![completion.id], Default::default(), cx); + }); + let buffer = self.buffer.read(cx).read(cx); + + if completion.position.is_valid(&buffer) { + Some(completion) + } else { + None + } + } + + fn update_visible_inline_completion(&mut self, cx: &mut ViewContext) { + let selection = self.selections.newest_anchor(); + let cursor = selection.head(); + + if self.context_menu.read().is_none() + && self.completion_tasks.is_empty() + && selection.start == selection.end + { + if let Some(provider) = self.inline_completion_provider() { + if let Some((buffer, cursor_buffer_position)) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx) + { + if let Some(text) = + provider.active_completion_text(&buffer, cursor_buffer_position, cx) + { + let text = Rope::from(text); + let mut to_remove = Vec::new(); + if let Some(completion) = self.active_inline_completion.take() { + to_remove.push(completion.id); + } + + let completion_inlay = + Inlay::suggestion(post_inc(&mut self.next_inlay_id), cursor, text); + self.active_inline_completion = Some(completion_inlay.clone()); + self.display_map.update(cx, move |map, cx| { + map.splice_inlays(to_remove, vec![completion_inlay], cx) + }); + cx.notify(); + return; + } + } + } + } + + self.discard_inline_completion(false, cx); + } + + fn inline_completion_provider(&self) -> Option> { + Some(self.inline_completion_provider.as_ref()?.provider.clone()) + } + + fn render_code_actions_indicator( + &self, + _style: &EditorStyle, + row: DisplayRow, + is_active: bool, + cx: &mut ViewContext, + ) -> Option { + if self.available_code_actions.is_some() { + Some( + IconButton::new("code_actions_indicator", ui::IconName::Bolt) + .icon_size(IconSize::XSmall) + .size(ui::ButtonSize::None) + .icon_color(Color::Muted) + .selected(is_active) + .on_click(cx.listener(move |editor, _e, cx| { + editor.focus(cx); + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from_indicator: Some(row), + }, + cx, + ); + })), + ) + } else { + None + } + } + + fn clear_tasks(&mut self) { + self.tasks.clear() + } + + fn insert_tasks(&mut self, key: (BufferId, BufferRow), value: (usize, RunnableTasks)) { + if let Some(_) = self.tasks.insert(key, value) { + // This case should hopefully be rare, but just in case... + log::error!("multiple different run targets found on a single line, only the last target will be rendered") + } + } + + fn render_run_indicator( + &self, + _style: &EditorStyle, + is_active: bool, + row: DisplayRow, + cx: &mut ViewContext, + ) -> IconButton { + IconButton::new(("run_indicator", row.0 as usize), ui::IconName::Play) + .icon_size(IconSize::XSmall) + .size(ui::ButtonSize::None) + .icon_color(Color::Muted) + .selected(is_active) + .on_click(cx.listener(move |editor, _e, cx| { + editor.focus(cx); + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from_indicator: Some(row), + }, + cx, + ); + })) + } + + pub fn render_fold_indicators( + &mut self, + fold_data: Vec>, + _style: &EditorStyle, + gutter_hovered: bool, + _line_height: Pixels, + _gutter_margin: Pixels, + cx: &mut ViewContext, + ) -> Vec> { + fold_data + .iter() + .enumerate() + .map(|(ix, fold_data)| { + fold_data + .map(|(fold_status, buffer_row, active)| { + (active || gutter_hovered || fold_status == FoldStatus::Folded).then(|| { + IconButton::new(ix, ui::IconName::ChevronDown) + .on_click(cx.listener(move |this, _e, cx| match fold_status { + FoldStatus::Folded => { + this.unfold_at(&UnfoldAt { buffer_row }, cx); + } + FoldStatus::Foldable => { + this.fold_at(&FoldAt { buffer_row }, cx); + } + })) + .icon_color(ui::Color::Muted) + .icon_size(ui::IconSize::Small) + .selected(fold_status == FoldStatus::Folded) + .selected_icon(ui::IconName::ChevronRight) + .size(ui::ButtonSize::None) + .into_any_element() + }) + }) + .flatten() + }) + .collect() + } + + pub fn context_menu_visible(&self) -> bool { + self.context_menu + .read() + .as_ref() + .map_or(false, |menu| menu.visible()) + } + + fn render_context_menu( + &self, + cursor_position: DisplayPoint, + style: &EditorStyle, + max_height: Pixels, + cx: &mut ViewContext, + ) -> Option<(ContextMenuOrigin, AnyElement)> { + self.context_menu.read().as_ref().map(|menu| { + menu.render( + cursor_position, + style, + max_height, + self.workspace.as_ref().map(|(w, _)| w.clone()), + cx, + ) + }) + } + + fn hide_context_menu(&mut self, cx: &mut ViewContext) -> Option { + cx.notify(); + self.completion_tasks.clear(); + let context_menu = self.context_menu.write().take(); + if context_menu.is_some() { + self.update_visible_inline_completion(cx); + } + context_menu + } + + pub fn insert_snippet( + &mut self, + insertion_ranges: &[Range], + snippet: Snippet, + cx: &mut ViewContext, + ) -> Result<()> { + struct Tabstop { + is_end_tabstop: bool, + ranges: Vec>, + } + + let tabstops = self.buffer.update(cx, |buffer, cx| { + let snippet_text: Arc = snippet.text.clone().into(); + buffer.edit( + insertion_ranges + .iter() + .cloned() + .map(|range| (range, snippet_text.clone())), + Some(AutoindentMode::EachLine), + cx, + ); + + let snapshot = &*buffer.read(cx); + let snippet = &snippet; + snippet + .tabstops + .iter() + .map(|tabstop| { + let is_end_tabstop = tabstop.first().map_or(false, |tabstop| { + tabstop.is_empty() && tabstop.start == snippet.text.len() as isize + }); + let mut tabstop_ranges = tabstop + .iter() + .flat_map(|tabstop_range| { + let mut delta = 0_isize; + insertion_ranges.iter().map(move |insertion_range| { + let insertion_start = insertion_range.start as isize + delta; + delta += + snippet.text.len() as isize - insertion_range.len() as isize; + + let start = ((insertion_start + tabstop_range.start) as usize) + .min(snapshot.len()); + let end = ((insertion_start + tabstop_range.end) as usize) + .min(snapshot.len()); + snapshot.anchor_before(start)..snapshot.anchor_after(end) + }) + }) + .collect::>(); + tabstop_ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start, snapshot)); + + Tabstop { + is_end_tabstop, + ranges: tabstop_ranges, + } + }) + .collect::>() + }); + + if let Some(tabstop) = tabstops.first() { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges(tabstop.ranges.iter().cloned()); + }); + + // If we're already at the last tabstop and it's at the end of the snippet, + // we're done, we don't need to keep the state around. + if !tabstop.is_end_tabstop { + let ranges = tabstops + .into_iter() + .map(|tabstop| tabstop.ranges) + .collect::>(); + self.snippet_stack.push(SnippetState { + active_index: 0, + ranges, + }); + } + + // Check whether the just-entered snippet ends with an auto-closable bracket. + if self.autoclose_regions.is_empty() { + let snapshot = self.buffer.read(cx).snapshot(cx); + for selection in &mut self.selections.all::(cx) { + let selection_head = selection.head(); + let Some(scope) = snapshot.language_scope_at(selection_head) else { + continue; + }; + + let mut bracket_pair = None; + let next_chars = snapshot.chars_at(selection_head).collect::(); + let prev_chars = snapshot + .reversed_chars_at(selection_head) + .collect::(); + for (pair, enabled) in scope.brackets() { + if enabled + && pair.close + && prev_chars.starts_with(pair.start.as_str()) + && next_chars.starts_with(pair.end.as_str()) + { + bracket_pair = Some(pair.clone()); + break; + } + } + if let Some(pair) = bracket_pair { + let start = snapshot.anchor_after(selection_head); + let end = snapshot.anchor_after(selection_head); + self.autoclose_regions.push(AutocloseRegion { + selection_id: selection.id, + range: start..end, + pair, + }); + } + } + } + } + Ok(()) + } + + pub fn move_to_next_snippet_tabstop(&mut self, cx: &mut ViewContext) -> bool { + self.move_to_snippet_tabstop(Bias::Right, cx) + } + + pub fn move_to_prev_snippet_tabstop(&mut self, cx: &mut ViewContext) -> bool { + self.move_to_snippet_tabstop(Bias::Left, cx) + } + + pub fn move_to_snippet_tabstop(&mut self, bias: Bias, cx: &mut ViewContext) -> bool { + if let Some(mut snippet) = self.snippet_stack.pop() { + match bias { + Bias::Left => { + if snippet.active_index > 0 { + snippet.active_index -= 1; + } else { + self.snippet_stack.push(snippet); + return false; + } + } + Bias::Right => { + if snippet.active_index + 1 < snippet.ranges.len() { + snippet.active_index += 1; + } else { + self.snippet_stack.push(snippet); + return false; + } + } + } + if let Some(current_ranges) = snippet.ranges.get(snippet.active_index) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_anchor_ranges(current_ranges.iter().cloned()) + }); + // If snippet state is not at the last tabstop, push it back on the stack + if snippet.active_index + 1 < snippet.ranges.len() { + self.snippet_stack.push(snippet); + } + return true; + } + } + + false + } + + pub fn clear(&mut self, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.select_all(&SelectAll, cx); + this.insert("", cx); + }); + } + + pub fn backspace(&mut self, _: &Backspace, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.select_autoclose_pair(cx); + let mut selections = this.selections.all::(cx); + if !this.selections.line_mode { + let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + for selection in &mut selections { + if selection.is_empty() { + let old_head = selection.head(); + let mut new_head = + movement::left(&display_map, old_head.to_display_point(&display_map)) + .to_point(&display_map); + if let Some((buffer, line_buffer_range)) = display_map + .buffer_snapshot + .buffer_line_for_row(MultiBufferRow(old_head.row)) + { + let indent_size = + buffer.indent_size_for_line(line_buffer_range.start.row); + let indent_len = match indent_size.kind { + IndentKind::Space => { + buffer.settings_at(line_buffer_range.start, cx).tab_size + } + IndentKind::Tab => NonZeroU32::new(1).unwrap(), + }; + if old_head.column <= indent_size.len && old_head.column > 0 { + let indent_len = indent_len.get(); + new_head = cmp::min( + new_head, + MultiBufferPoint::new( + old_head.row, + ((old_head.column - 1) / indent_len) * indent_len, + ), + ); + } + } + + selection.set_head(new_head, SelectionGoal::None); + } + } + } + + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections)); + this.insert("", cx); + this.refresh_inline_completion(true, cx); + }); + } + + pub fn delete(&mut self, _: &Delete, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if selection.is_empty() && !line_mode { + let cursor = movement::right(map, selection.head()); + selection.end = cursor; + selection.reversed = true; + selection.goal = SelectionGoal::None; + } + }) + }); + this.insert("", cx); + this.refresh_inline_completion(true, cx); + }); + } + + pub fn tab_prev(&mut self, _: &TabPrev, cx: &mut ViewContext) { + if self.move_to_prev_snippet_tabstop(cx) { + return; + } + + self.outdent(&Outdent, cx); + } + + pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext) { + if self.move_to_next_snippet_tabstop(cx) || self.read_only(cx) { + return; + } + + let mut selections = self.selections.all_adjusted(cx); + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + let rows_iter = selections.iter().map(|s| s.head().row); + let suggested_indents = snapshot.suggested_indents(rows_iter, cx); + + let mut edits = Vec::new(); + let mut prev_edited_row = 0; + let mut row_delta = 0; + for selection in &mut selections { + if selection.start.row != prev_edited_row { + row_delta = 0; + } + prev_edited_row = selection.end.row; + + // If the selection is non-empty, then increase the indentation of the selected lines. + if !selection.is_empty() { + row_delta = + Self::indent_selection(buffer, &snapshot, selection, &mut edits, row_delta, cx); + continue; + } + + // If the selection is empty and the cursor is in the leading whitespace before the + // suggested indentation, then auto-indent the line. + let cursor = selection.head(); + let current_indent = snapshot.indent_size_for_line(MultiBufferRow(cursor.row)); + if let Some(suggested_indent) = + suggested_indents.get(&MultiBufferRow(cursor.row)).copied() + { + if cursor.column < suggested_indent.len + && cursor.column <= current_indent.len + && current_indent.len <= suggested_indent.len + { + selection.start = Point::new(cursor.row, suggested_indent.len); + selection.end = selection.start; + if row_delta == 0 { + edits.extend(Buffer::edit_for_indent_size_adjustment( + cursor.row, + current_indent, + suggested_indent, + )); + row_delta = suggested_indent.len - current_indent.len; + } + continue; + } + } + + // Accept copilot completion if there is only one selection and the cursor is not + // in the leading whitespace. + if self.selections.count() == 1 + && cursor.column >= current_indent.len + && self.has_active_inline_completion(cx) + { + self.accept_inline_completion(cx); + return; + } + + // Otherwise, insert a hard or soft tab. + let settings = buffer.settings_at(cursor, cx); + let tab_size = if settings.hard_tabs { + IndentSize::tab() + } else { + let tab_size = settings.tab_size.get(); + let char_column = snapshot + .text_for_range(Point::new(cursor.row, 0)..cursor) + .flat_map(str::chars) + .count() + + row_delta as usize; + let chars_to_next_tab_stop = tab_size - (char_column as u32 % tab_size); + IndentSize::spaces(chars_to_next_tab_stop) + }; + selection.start = Point::new(cursor.row, cursor.column + row_delta + tab_size.len); + selection.end = selection.start; + edits.push((cursor..cursor, tab_size.chars().collect::())); + row_delta += tab_size.len; + } + + self.transact(cx, |this, cx| { + this.buffer.update(cx, |b, cx| b.edit(edits, None, cx)); + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections)); + this.refresh_inline_completion(true, cx); + }); + } + + pub fn indent(&mut self, _: &Indent, cx: &mut ViewContext) { + if self.read_only(cx) { + return; + } + let mut selections = self.selections.all::(cx); + let mut prev_edited_row = 0; + let mut row_delta = 0; + let mut edits = Vec::new(); + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + for selection in &mut selections { + if selection.start.row != prev_edited_row { + row_delta = 0; + } + prev_edited_row = selection.end.row; + + row_delta = + Self::indent_selection(buffer, &snapshot, selection, &mut edits, row_delta, cx); + } + + self.transact(cx, |this, cx| { + this.buffer.update(cx, |b, cx| b.edit(edits, None, cx)); + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections)); + }); + } + + fn indent_selection( + buffer: &MultiBuffer, + snapshot: &MultiBufferSnapshot, + selection: &mut Selection, + edits: &mut Vec<(Range, String)>, + delta_for_start_row: u32, + cx: &AppContext, + ) -> u32 { + let settings = buffer.settings_at(selection.start, cx); + let tab_size = settings.tab_size.get(); + let indent_kind = if settings.hard_tabs { + IndentKind::Tab + } else { + IndentKind::Space + }; + let mut start_row = selection.start.row; + let mut end_row = selection.end.row + 1; + + // If a selection ends at the beginning of a line, don't indent + // that last line. + if selection.end.column == 0 && selection.end.row > selection.start.row { + end_row -= 1; + } + + // Avoid re-indenting a row that has already been indented by a + // previous selection, but still update this selection's column + // to reflect that indentation. + if delta_for_start_row > 0 { + start_row += 1; + selection.start.column += delta_for_start_row; + if selection.end.row == selection.start.row { + selection.end.column += delta_for_start_row; + } + } + + let mut delta_for_end_row = 0; + let has_multiple_rows = start_row + 1 != end_row; + for row in start_row..end_row { + let current_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); + let indent_delta = match (current_indent.kind, indent_kind) { + (IndentKind::Space, IndentKind::Space) => { + let columns_to_next_tab_stop = tab_size - (current_indent.len % tab_size); + IndentSize::spaces(columns_to_next_tab_stop) + } + (IndentKind::Tab, IndentKind::Space) => IndentSize::spaces(tab_size), + (_, IndentKind::Tab) => IndentSize::tab(), + }; + + let start = if has_multiple_rows || current_indent.len < selection.start.column { + 0 + } else { + selection.start.column + }; + let row_start = Point::new(row, start); + edits.push(( + row_start..row_start, + indent_delta.chars().collect::(), + )); + + // Update this selection's endpoints to reflect the indentation. + if row == selection.start.row { + selection.start.column += indent_delta.len; + } + if row == selection.end.row { + selection.end.column += indent_delta.len; + delta_for_end_row = indent_delta.len; + } + } + + if selection.start.row == selection.end.row { + delta_for_start_row + delta_for_end_row + } else { + delta_for_end_row + } + } + + pub fn outdent(&mut self, _: &Outdent, cx: &mut ViewContext) { + if self.read_only(cx) { + return; + } + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + let mut deletion_ranges = Vec::new(); + let mut last_outdent = None; + { + let buffer = self.buffer.read(cx); + let snapshot = buffer.snapshot(cx); + for selection in &selections { + let settings = buffer.settings_at(selection.start, cx); + let tab_size = settings.tab_size.get(); + let mut rows = selection.spanned_rows(false, &display_map); + + // Avoid re-outdenting a row that has already been outdented by a + // previous selection. + if let Some(last_row) = last_outdent { + if last_row == rows.start { + rows.start = rows.start.next_row(); + } + } + let has_multiple_rows = rows.len() > 1; + for row in rows.iter_rows() { + let indent_size = snapshot.indent_size_for_line(row); + if indent_size.len > 0 { + let deletion_len = match indent_size.kind { + IndentKind::Space => { + let columns_to_prev_tab_stop = indent_size.len % tab_size; + if columns_to_prev_tab_stop == 0 { + tab_size + } else { + columns_to_prev_tab_stop + } + } + IndentKind::Tab => 1, + }; + let start = if has_multiple_rows + || deletion_len > selection.start.column + || indent_size.len < selection.start.column + { + 0 + } else { + selection.start.column - deletion_len + }; + deletion_ranges.push( + Point::new(row.0, start)..Point::new(row.0, start + deletion_len), + ); + last_outdent = Some(row); + } + } + } + } + + self.transact(cx, |this, cx| { + this.buffer.update(cx, |buffer, cx| { + let empty_str: Arc = "".into(); + buffer.edit( + deletion_ranges + .into_iter() + .map(|range| (range, empty_str.clone())), + None, + cx, + ); + }); + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections)); + }); + } + + pub fn delete_line(&mut self, _: &DeleteLine, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + + let mut new_cursors = Vec::new(); + let mut edit_ranges = Vec::new(); + let mut selections = selections.iter().peekable(); + while let Some(selection) = selections.next() { + let mut rows = selection.spanned_rows(false, &display_map); + let goal_display_column = selection.head().to_display_point(&display_map).column(); + + // Accumulate contiguous regions of rows that we want to delete. + while let Some(next_selection) = selections.peek() { + let next_rows = next_selection.spanned_rows(false, &display_map); + if next_rows.start <= rows.end { + rows.end = next_rows.end; + selections.next().unwrap(); + } else { + break; + } + } + + let buffer = &display_map.buffer_snapshot; + let mut edit_start = Point::new(rows.start.0, 0).to_offset(buffer); + let edit_end; + let cursor_buffer_row; + if buffer.max_point().row >= rows.end.0 { + // If there's a line after the range, delete the \n from the end of the row range + // and position the cursor on the next line. + edit_end = Point::new(rows.end.0, 0).to_offset(buffer); + cursor_buffer_row = rows.end; + } else { + // If there isn't a line after the range, delete the \n from the line before the + // start of the row range and position the cursor there. + edit_start = edit_start.saturating_sub(1); + edit_end = buffer.len(); + cursor_buffer_row = rows.start.previous_row(); + } + + let mut cursor = Point::new(cursor_buffer_row.0, 0).to_display_point(&display_map); + *cursor.column_mut() = + cmp::min(goal_display_column, display_map.line_len(cursor.row())); + + new_cursors.push(( + selection.id, + buffer.anchor_after(cursor.to_point(&display_map)), + )); + edit_ranges.push(edit_start..edit_end); + } + + self.transact(cx, |this, cx| { + let buffer = this.buffer.update(cx, |buffer, cx| { + let empty_str: Arc = "".into(); + buffer.edit( + edit_ranges + .into_iter() + .map(|range| (range, empty_str.clone())), + None, + cx, + ); + buffer.snapshot(cx) + }); + let new_selections = new_cursors + .into_iter() + .map(|(id, cursor)| { + let cursor = cursor.to_point(&buffer); + Selection { + id, + start: cursor, + end: cursor, + reversed: false, + goal: SelectionGoal::None, + } + }) + .collect(); + + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(new_selections); + }); + }); + } + + pub fn join_lines(&mut self, _: &JoinLines, cx: &mut ViewContext) { + if self.read_only(cx) { + return; + } + let mut row_ranges = Vec::>::new(); + for selection in self.selections.all::(cx) { + let start = MultiBufferRow(selection.start.row); + let end = if selection.start.row == selection.end.row { + MultiBufferRow(selection.start.row + 1) + } else { + MultiBufferRow(selection.end.row) + }; + + if let Some(last_row_range) = row_ranges.last_mut() { + if start <= last_row_range.end { + last_row_range.end = end; + continue; + } + } + row_ranges.push(start..end); + } + + let snapshot = self.buffer.read(cx).snapshot(cx); + let mut cursor_positions = Vec::new(); + for row_range in &row_ranges { + let anchor = snapshot.anchor_before(Point::new( + row_range.end.previous_row().0, + snapshot.line_len(row_range.end.previous_row()), + )); + cursor_positions.push(anchor..anchor); + } + + self.transact(cx, |this, cx| { + for row_range in row_ranges.into_iter().rev() { + for row in row_range.iter_rows().rev() { + let end_of_line = Point::new(row.0, snapshot.line_len(row)); + let next_line_row = row.next_row(); + let indent = snapshot.indent_size_for_line(next_line_row); + let start_of_next_line = Point::new(next_line_row.0, indent.len); + + let replace = if snapshot.line_len(next_line_row) > indent.len { + " " + } else { + "" + }; + + this.buffer.update(cx, |buffer, cx| { + buffer.edit([(end_of_line..start_of_next_line, replace)], None, cx) + }); + } + } + + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_anchor_ranges(cursor_positions) + }); + }); + } + + pub fn sort_lines_case_sensitive( + &mut self, + _: &SortLinesCaseSensitive, + cx: &mut ViewContext, + ) { + self.manipulate_lines(cx, |lines| lines.sort()) + } + + pub fn sort_lines_case_insensitive( + &mut self, + _: &SortLinesCaseInsensitive, + cx: &mut ViewContext, + ) { + self.manipulate_lines(cx, |lines| lines.sort_by_key(|line| line.to_lowercase())) + } + + pub fn unique_lines_case_insensitive( + &mut self, + _: &UniqueLinesCaseInsensitive, + cx: &mut ViewContext, + ) { + self.manipulate_lines(cx, |lines| { + let mut seen = HashSet::default(); + lines.retain(|line| seen.insert(line.to_lowercase())); + }) + } + + pub fn unique_lines_case_sensitive( + &mut self, + _: &UniqueLinesCaseSensitive, + cx: &mut ViewContext, + ) { + self.manipulate_lines(cx, |lines| { + let mut seen = HashSet::default(); + lines.retain(|line| seen.insert(*line)); + }) + } + + pub fn revert_selected_hunks(&mut self, _: &RevertSelectedHunks, cx: &mut ViewContext) { + let revert_changes = self.gather_revert_changes(&self.selections.disjoint_anchors(), cx); + if !revert_changes.is_empty() { + self.transact(cx, |editor, cx| { + editor.buffer().update(cx, |multi_buffer, cx| { + for (buffer_id, changes) in revert_changes { + if let Some(buffer) = multi_buffer.buffer(buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.edit( + changes.into_iter().map(|(range, text)| { + (range, text.to_string().map(Arc::::from)) + }), + None, + cx, + ); + }); + } + } + }); + editor.change_selections(None, cx, |selections| selections.refresh()); + }); + } + } + + pub fn open_active_item_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { + if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let project_path = buffer.read(cx).project_path(cx)?; + let project = self.project.as_ref()?.read(cx); + let entry = project.entry_for_path(&project_path, cx)?; + let abs_path = project.absolute_path(&project_path, cx)?; + let parent = if entry.is_symlink { + abs_path.canonicalize().ok()? + } else { + abs_path + } + .parent()? + .to_path_buf(); + Some(parent) + }) { + cx.dispatch_action(OpenTerminal { working_directory }.boxed_clone()); + } + } + + fn gather_revert_changes( + &mut self, + selections: &[Selection], + cx: &mut ViewContext<'_, Editor>, + ) -> HashMap, Rope)>> { + let mut revert_changes = HashMap::default(); + self.buffer.update(cx, |multi_buffer, cx| { + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + for hunk in hunks_for_selections(&multi_buffer_snapshot, selections) { + Self::prepare_revert_change(&mut revert_changes, &multi_buffer, &hunk, cx); + } + }); + revert_changes + } + + fn prepare_revert_change( + revert_changes: &mut HashMap, Rope)>>, + multi_buffer: &MultiBuffer, + hunk: &DiffHunk, + cx: &mut AppContext, + ) -> Option<()> { + let buffer = multi_buffer.buffer(hunk.buffer_id)?; + let buffer = buffer.read(cx); + let original_text = buffer.diff_base()?.slice(hunk.diff_base_byte_range.clone()); + let buffer_snapshot = buffer.snapshot(); + let buffer_revert_changes = revert_changes.entry(buffer.remote_id()).or_default(); + if let Err(i) = buffer_revert_changes.binary_search_by(|probe| { + probe + .0 + .start + .cmp(&hunk.buffer_range.start, &buffer_snapshot) + .then(probe.0.end.cmp(&hunk.buffer_range.end, &buffer_snapshot)) + }) { + buffer_revert_changes.insert(i, (hunk.buffer_range.clone(), original_text)); + Some(()) + } else { + None + } + } + + pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext) { + self.manipulate_lines(cx, |lines| lines.reverse()) + } + + pub fn shuffle_lines(&mut self, _: &ShuffleLines, cx: &mut ViewContext) { + self.manipulate_lines(cx, |lines| lines.shuffle(&mut thread_rng())) + } + + fn manipulate_lines(&mut self, cx: &mut ViewContext, mut callback: Fn) + where + Fn: FnMut(&mut Vec<&str>), + { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut edits = Vec::new(); + + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + let mut contiguous_row_selections = Vec::new(); + let mut new_selections = Vec::new(); + let mut added_lines = 0; + let mut removed_lines = 0; + + while let Some(selection) = selections.next() { + let (start_row, end_row) = consume_contiguous_rows( + &mut contiguous_row_selections, + selection, + &display_map, + &mut selections, + ); + + let start_point = Point::new(start_row.0, 0); + let end_point = Point::new( + end_row.previous_row().0, + buffer.line_len(end_row.previous_row()), + ); + let text = buffer + .text_for_range(start_point..end_point) + .collect::(); + + let mut lines = text.split('\n').collect_vec(); + + let lines_before = lines.len(); + callback(&mut lines); + let lines_after = lines.len(); + + edits.push((start_point..end_point, lines.join("\n"))); + + // Selections must change based on added and removed line count + let start_row = + MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32); + let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32); + new_selections.push(Selection { + id: selection.id, + start: start_row, + end: end_row, + goal: SelectionGoal::None, + reversed: selection.reversed, + }); + + if lines_after > lines_before { + added_lines += lines_after - lines_before; + } else if lines_before > lines_after { + removed_lines += lines_before - lines_after; + } + } + + self.transact(cx, |this, cx| { + let buffer = this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + buffer.snapshot(cx) + }); + + // Recalculate offsets on newly edited buffer + let new_selections = new_selections + .iter() + .map(|s| { + let start_point = Point::new(s.start.0, 0); + let end_point = Point::new(s.end.0, buffer.line_len(s.end)); + Selection { + id: s.id, + start: buffer.point_to_offset(start_point), + end: buffer.point_to_offset(end_point), + goal: s.goal, + reversed: s.reversed, + } + }) + .collect(); + + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(new_selections); + }); + + this.request_autoscroll(Autoscroll::fit(), cx); + }); + } + + pub fn convert_to_upper_case(&mut self, _: &ConvertToUpperCase, cx: &mut ViewContext) { + self.manipulate_text(cx, |text| text.to_uppercase()) + } + + pub fn convert_to_lower_case(&mut self, _: &ConvertToLowerCase, cx: &mut ViewContext) { + self.manipulate_text(cx, |text| text.to_lowercase()) + } + + pub fn convert_to_title_case(&mut self, _: &ConvertToTitleCase, cx: &mut ViewContext) { + self.manipulate_text(cx, |text| { + // Hack to get around the fact that to_case crate doesn't support '\n' as a word boundary + // https://github.com/rutrum/convert-case/issues/16 + text.split('\n') + .map(|line| line.to_case(Case::Title)) + .join("\n") + }) + } + + pub fn convert_to_snake_case(&mut self, _: &ConvertToSnakeCase, cx: &mut ViewContext) { + self.manipulate_text(cx, |text| text.to_case(Case::Snake)) + } + + pub fn convert_to_kebab_case(&mut self, _: &ConvertToKebabCase, cx: &mut ViewContext) { + self.manipulate_text(cx, |text| text.to_case(Case::Kebab)) + } + + pub fn convert_to_upper_camel_case( + &mut self, + _: &ConvertToUpperCamelCase, + cx: &mut ViewContext, + ) { + self.manipulate_text(cx, |text| { + // Hack to get around the fact that to_case crate doesn't support '\n' as a word boundary + // https://github.com/rutrum/convert-case/issues/16 + text.split('\n') + .map(|line| line.to_case(Case::UpperCamel)) + .join("\n") + }) + } + + pub fn convert_to_lower_camel_case( + &mut self, + _: &ConvertToLowerCamelCase, + cx: &mut ViewContext, + ) { + self.manipulate_text(cx, |text| text.to_case(Case::Camel)) + } + + pub fn convert_to_opposite_case( + &mut self, + _: &ConvertToOppositeCase, + cx: &mut ViewContext, + ) { + self.manipulate_text(cx, |text| { + text.chars() + .fold(String::with_capacity(text.len()), |mut t, c| { + if c.is_uppercase() { + t.extend(c.to_lowercase()); + } else { + t.extend(c.to_uppercase()); + } + t + }) + }) + } + + fn manipulate_text(&mut self, cx: &mut ViewContext, mut callback: Fn) + where + Fn: FnMut(&str) -> String, + { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut new_selections = Vec::new(); + let mut edits = Vec::new(); + let mut selection_adjustment = 0i32; + + for selection in self.selections.all::(cx) { + let selection_is_empty = selection.is_empty(); + + let (start, end) = if selection_is_empty { + let word_range = movement::surrounding_word( + &display_map, + selection.start.to_display_point(&display_map), + ); + let start = word_range.start.to_offset(&display_map, Bias::Left); + let end = word_range.end.to_offset(&display_map, Bias::Left); + (start, end) + } else { + (selection.start, selection.end) + }; + + let text = buffer.text_for_range(start..end).collect::(); + let old_length = text.len() as i32; + let text = callback(&text); + + new_selections.push(Selection { + start: (start as i32 - selection_adjustment) as usize, + end: ((start + text.len()) as i32 - selection_adjustment) as usize, + goal: SelectionGoal::None, + ..selection + }); + + selection_adjustment += old_length - text.len() as i32; + + edits.push((start..end, text)); + } + + self.transact(cx, |this, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(new_selections); + }); + + this.request_autoscroll(Autoscroll::fit(), cx); + }); + } + + pub fn duplicate_line(&mut self, upwards: bool, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let selections = self.selections.all::(cx); + + let mut edits = Vec::new(); + let mut selections_iter = selections.iter().peekable(); + while let Some(selection) = selections_iter.next() { + // Avoid duplicating the same lines twice. + let mut rows = selection.spanned_rows(false, &display_map); + + while let Some(next_selection) = selections_iter.peek() { + let next_rows = next_selection.spanned_rows(false, &display_map); + if next_rows.start < rows.end { + rows.end = next_rows.end; + selections_iter.next().unwrap(); + } else { + break; + } + } + + // Copy the text from the selected row region and splice it either at the start + // or end of the region. + let start = Point::new(rows.start.0, 0); + let end = Point::new( + rows.end.previous_row().0, + buffer.line_len(rows.end.previous_row()), + ); + let text = buffer + .text_for_range(start..end) + .chain(Some("\n")) + .collect::(); + let insert_location = if upwards { + Point::new(rows.end.0, 0) + } else { + start + }; + edits.push((insert_location..insert_location, text)); + } + + self.transact(cx, |this, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + + this.request_autoscroll(Autoscroll::fit(), cx); + }); + } + + pub fn duplicate_line_up(&mut self, _: &DuplicateLineUp, cx: &mut ViewContext) { + self.duplicate_line(true, cx); + } + + pub fn duplicate_line_down(&mut self, _: &DuplicateLineDown, cx: &mut ViewContext) { + self.duplicate_line(false, cx); + } + + pub fn move_line_up(&mut self, _: &MoveLineUp, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut edits = Vec::new(); + let mut unfold_ranges = Vec::new(); + let mut refold_ranges = Vec::new(); + + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + let mut contiguous_row_selections = Vec::new(); + let mut new_selections = Vec::new(); + + while let Some(selection) = selections.next() { + // Find all the selections that span a contiguous row range + let (start_row, end_row) = consume_contiguous_rows( + &mut contiguous_row_selections, + selection, + &display_map, + &mut selections, + ); + + // Move the text spanned by the row range to be before the line preceding the row range + if start_row.0 > 0 { + let range_to_move = Point::new( + start_row.previous_row().0, + buffer.line_len(start_row.previous_row()), + ) + ..Point::new( + end_row.previous_row().0, + buffer.line_len(end_row.previous_row()), + ); + let insertion_point = display_map + .prev_line_boundary(Point::new(start_row.previous_row().0, 0)) + .0; + + // Don't move lines across excerpts + if buffer + .excerpt_boundaries_in_range(( + Bound::Excluded(insertion_point), + Bound::Included(range_to_move.end), + )) + .next() + .is_none() + { + let text = buffer + .text_for_range(range_to_move.clone()) + .flat_map(|s| s.chars()) + .skip(1) + .chain(['\n']) + .collect::(); + + edits.push(( + buffer.anchor_after(range_to_move.start) + ..buffer.anchor_before(range_to_move.end), + String::new(), + )); + let insertion_anchor = buffer.anchor_after(insertion_point); + edits.push((insertion_anchor..insertion_anchor, text)); + + let row_delta = range_to_move.start.row - insertion_point.row + 1; + + // Move selections up + new_selections.extend(contiguous_row_selections.drain(..).map( + |mut selection| { + selection.start.row -= row_delta; + selection.end.row -= row_delta; + selection + }, + )); + + // Move folds up + unfold_ranges.push(range_to_move.clone()); + for fold in display_map.folds_in_range( + buffer.anchor_before(range_to_move.start) + ..buffer.anchor_after(range_to_move.end), + ) { + let mut start = fold.range.start.to_point(&buffer); + let mut end = fold.range.end.to_point(&buffer); + start.row -= row_delta; + end.row -= row_delta; + refold_ranges.push(start..end); + } + } + } + + // If we didn't move line(s), preserve the existing selections + new_selections.append(&mut contiguous_row_selections); + } + + self.transact(cx, |this, cx| { + this.unfold_ranges(unfold_ranges, true, true, cx); + this.buffer.update(cx, |buffer, cx| { + for (range, text) in edits { + buffer.edit([(range, text)], None, cx); + } + }); + this.fold_ranges(refold_ranges, true, cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(new_selections); + }) + }); + } + + pub fn move_line_down(&mut self, _: &MoveLineDown, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut edits = Vec::new(); + let mut unfold_ranges = Vec::new(); + let mut refold_ranges = Vec::new(); + + let selections = self.selections.all::(cx); + let mut selections = selections.iter().peekable(); + let mut contiguous_row_selections = Vec::new(); + let mut new_selections = Vec::new(); + + while let Some(selection) = selections.next() { + // Find all the selections that span a contiguous row range + let (start_row, end_row) = consume_contiguous_rows( + &mut contiguous_row_selections, + selection, + &display_map, + &mut selections, + ); + + // Move the text spanned by the row range to be after the last line of the row range + if end_row.0 <= buffer.max_point().row { + let range_to_move = + MultiBufferPoint::new(start_row.0, 0)..MultiBufferPoint::new(end_row.0, 0); + let insertion_point = display_map + .next_line_boundary(MultiBufferPoint::new(end_row.0, 0)) + .0; + + // Don't move lines across excerpt boundaries + if buffer + .excerpt_boundaries_in_range(( + Bound::Excluded(range_to_move.start), + Bound::Included(insertion_point), + )) + .next() + .is_none() + { + let mut text = String::from("\n"); + text.extend(buffer.text_for_range(range_to_move.clone())); + text.pop(); // Drop trailing newline + edits.push(( + buffer.anchor_after(range_to_move.start) + ..buffer.anchor_before(range_to_move.end), + String::new(), + )); + let insertion_anchor = buffer.anchor_after(insertion_point); + edits.push((insertion_anchor..insertion_anchor, text)); + + let row_delta = insertion_point.row - range_to_move.end.row + 1; + + // Move selections down + new_selections.extend(contiguous_row_selections.drain(..).map( + |mut selection| { + selection.start.row += row_delta; + selection.end.row += row_delta; + selection + }, + )); + + // Move folds down + unfold_ranges.push(range_to_move.clone()); + for fold in display_map.folds_in_range( + buffer.anchor_before(range_to_move.start) + ..buffer.anchor_after(range_to_move.end), + ) { + let mut start = fold.range.start.to_point(&buffer); + let mut end = fold.range.end.to_point(&buffer); + start.row += row_delta; + end.row += row_delta; + refold_ranges.push(start..end); + } + } + } + + // If we didn't move line(s), preserve the existing selections + new_selections.append(&mut contiguous_row_selections); + } + + self.transact(cx, |this, cx| { + this.unfold_ranges(unfold_ranges, true, true, cx); + this.buffer.update(cx, |buffer, cx| { + for (range, text) in edits { + buffer.edit([(range, text)], None, cx); + } + }); + this.fold_ranges(refold_ranges, true, cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(new_selections)); + }); + } + + pub fn transpose(&mut self, _: &Transpose, cx: &mut ViewContext) { + let text_layout_details = &self.text_layout_details(cx); + self.transact(cx, |this, cx| { + let edits = this.change_selections(Some(Autoscroll::fit()), cx, |s| { + let mut edits: Vec<(Range, String)> = Default::default(); + let line_mode = s.line_mode; + s.move_with(|display_map, selection| { + if !selection.is_empty() || line_mode { + return; + } + + let mut head = selection.head(); + let mut transpose_offset = head.to_offset(display_map, Bias::Right); + if head.column() == display_map.line_len(head.row()) { + transpose_offset = display_map + .buffer_snapshot + .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); + } + + if transpose_offset == 0 { + return; + } + + *head.column_mut() += 1; + head = display_map.clip_point(head, Bias::Right); + let goal = SelectionGoal::HorizontalPosition( + display_map + .x_for_display_point(head, &text_layout_details) + .into(), + ); + selection.collapse_to(head, goal); + + let transpose_start = display_map + .buffer_snapshot + .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); + if edits.last().map_or(true, |e| e.0.end <= transpose_start) { + let transpose_end = display_map + .buffer_snapshot + .clip_offset(transpose_offset + 1, Bias::Right); + if let Some(ch) = + display_map.buffer_snapshot.chars_at(transpose_start).next() + { + edits.push((transpose_start..transpose_offset, String::new())); + edits.push((transpose_end..transpose_end, ch.to_string())); + } + } + }); + edits + }); + this.buffer + .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections); + }); + }); + } + + pub fn cut(&mut self, _: &Cut, cx: &mut ViewContext) { + let mut text = String::new(); + let buffer = self.buffer.read(cx).snapshot(cx); + let mut selections = self.selections.all::(cx); + let mut clipboard_selections = Vec::with_capacity(selections.len()); + { + let max_point = buffer.max_point(); + let mut is_first = true; + for selection in &mut selections { + let is_entire_line = selection.is_empty() || self.selections.line_mode; + if is_entire_line { + selection.start = Point::new(selection.start.row, 0); + selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); + selection.goal = SelectionGoal::None; + } + if is_first { + is_first = false; + } else { + text += "\n"; + } + let mut len = 0; + for chunk in buffer.text_for_range(selection.start..selection.end) { + text.push_str(chunk); + len += chunk.len(); + } + clipboard_selections.push(ClipboardSelection { + len, + is_entire_line, + first_line_indent: buffer + .indent_size_for_line(MultiBufferRow(selection.start.row)) + .len, + }); + } + } + + self.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections); + }); + this.insert("", cx); + cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections)); + }); + } + + pub fn copy(&mut self, _: &Copy, cx: &mut ViewContext) { + let selections = self.selections.all::(cx); + let buffer = self.buffer.read(cx).read(cx); + let mut text = String::new(); + + let mut clipboard_selections = Vec::with_capacity(selections.len()); + { + let max_point = buffer.max_point(); + let mut is_first = true; + for selection in selections.iter() { + let mut start = selection.start; + let mut end = selection.end; + let is_entire_line = selection.is_empty() || self.selections.line_mode; + if is_entire_line { + start = Point::new(start.row, 0); + end = cmp::min(max_point, Point::new(end.row + 1, 0)); + } + if is_first { + is_first = false; + } else { + text += "\n"; + } + let mut len = 0; + for chunk in buffer.text_for_range(start..end) { + text.push_str(chunk); + len += chunk.len(); + } + clipboard_selections.push(ClipboardSelection { + len, + is_entire_line, + first_line_indent: buffer.indent_size_for_line(MultiBufferRow(start.row)).len, + }); + } + } + + cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections)); + } + + pub fn paste(&mut self, _: &Paste, cx: &mut ViewContext) { + if self.read_only(cx) { + return; + } + + self.transact(cx, |this, cx| { + if let Some(item) = cx.read_from_clipboard() { + let clipboard_text = Cow::Borrowed(item.text()); + if let Some(mut clipboard_selections) = item.metadata::>() { + let old_selections = this.selections.all::(cx); + let all_selections_were_entire_line = + clipboard_selections.iter().all(|s| s.is_entire_line); + let first_selection_indent_column = + clipboard_selections.first().map(|s| s.first_line_indent); + if clipboard_selections.len() != old_selections.len() { + clipboard_selections.drain(..); + } + + this.buffer.update(cx, |buffer, cx| { + let snapshot = buffer.read(cx); + let mut start_offset = 0; + let mut edits = Vec::new(); + let mut original_indent_columns = Vec::new(); + let line_mode = this.selections.line_mode; + for (ix, selection) in old_selections.iter().enumerate() { + let to_insert; + let entire_line; + let original_indent_column; + if let Some(clipboard_selection) = clipboard_selections.get(ix) { + let end_offset = start_offset + clipboard_selection.len; + to_insert = &clipboard_text[start_offset..end_offset]; + entire_line = clipboard_selection.is_entire_line; + start_offset = end_offset + 1; + original_indent_column = + Some(clipboard_selection.first_line_indent); + } else { + to_insert = clipboard_text.as_str(); + entire_line = all_selections_were_entire_line; + original_indent_column = first_selection_indent_column + } + + // If the corresponding selection was empty when this slice of the + // clipboard text was written, then the entire line containing the + // selection was copied. If this selection is also currently empty, + // then paste the line before the current line of the buffer. + let range = if selection.is_empty() && !line_mode && entire_line { + let column = selection.start.to_point(&snapshot).column as usize; + let line_start = selection.start - column; + line_start..line_start + } else { + selection.range() + }; + + edits.push((range, to_insert)); + original_indent_columns.extend(original_indent_column); + } + drop(snapshot); + + buffer.edit( + edits, + Some(AutoindentMode::Block { + original_indent_columns, + }), + cx, + ); + }); + + let selections = this.selections.all::(cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections)); + } else { + this.insert(&clipboard_text, cx); + } + } + }); + } + + pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext) { + if self.read_only(cx) { + return; + } + + if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { + if let Some((selections, _)) = self.selection_history.transaction(tx_id).cloned() { + self.change_selections(None, cx, |s| { + s.select_anchors(selections.to_vec()); + }); + } + self.request_autoscroll(Autoscroll::fit(), cx); + self.unmark_text(cx); + self.refresh_inline_completion(true, cx); + cx.emit(EditorEvent::Edited); + cx.emit(EditorEvent::TransactionUndone { + transaction_id: tx_id, + }); + } + } + + pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext) { + if self.read_only(cx) { + return; + } + + if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { + if let Some((_, Some(selections))) = self.selection_history.transaction(tx_id).cloned() + { + self.change_selections(None, cx, |s| { + s.select_anchors(selections.to_vec()); + }); + } + self.request_autoscroll(Autoscroll::fit(), cx); + self.unmark_text(cx); + self.refresh_inline_completion(true, cx); + cx.emit(EditorEvent::Edited); + } + } + + pub fn finalize_last_transaction(&mut self, cx: &mut ViewContext) { + self.buffer + .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); + } + + pub fn group_until_transaction(&mut self, tx_id: TransactionId, cx: &mut ViewContext) { + self.buffer + .update(cx, |buffer, cx| buffer.group_until_transaction(tx_id, cx)); + } + + pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + let cursor = if selection.is_empty() && !line_mode { + movement::left(map, selection.start) + } else { + selection.start + }; + selection.collapse_to(cursor, SelectionGoal::None); + }); + }) + } + + pub fn select_left(&mut self, _: &SelectLeft, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| (movement::left(map, head), SelectionGoal::None)); + }) + } + + pub fn move_right(&mut self, _: &MoveRight, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + let cursor = if selection.is_empty() && !line_mode { + movement::right(map, selection.end) + } else { + selection.end + }; + selection.collapse_to(cursor, SelectionGoal::None) + }); + }) + } + + pub fn select_right(&mut self, _: &SelectRight, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| (movement::right(map, head), SelectionGoal::None)); + }) + } + + pub fn move_up(&mut self, _: &MoveUp, cx: &mut ViewContext) { + if self.take_rename(true, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + let text_layout_details = &self.text_layout_details(cx); + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if !selection.is_empty() && !line_mode { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::up( + map, + selection.start, + selection.goal, + false, + &text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }) + } + + pub fn move_up_by_lines(&mut self, action: &MoveUpByLines, cx: &mut ViewContext) { + if self.take_rename(true, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + let text_layout_details = &self.text_layout_details(cx); + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if !selection.is_empty() && !line_mode { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::up_by_rows( + map, + selection.start, + action.lines, + selection.goal, + false, + &text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }) + } + + pub fn move_down_by_lines(&mut self, action: &MoveDownByLines, cx: &mut ViewContext) { + if self.take_rename(true, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + let text_layout_details = &self.text_layout_details(cx); + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if !selection.is_empty() && !line_mode { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::down_by_rows( + map, + selection.start, + action.lines, + selection.goal, + false, + &text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }) + } + + pub fn select_down_by_lines(&mut self, action: &SelectDownByLines, cx: &mut ViewContext) { + let text_layout_details = &self.text_layout_details(cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::down_by_rows(map, head, action.lines, goal, false, &text_layout_details) + }) + }) + } + + pub fn select_up_by_lines(&mut self, action: &SelectUpByLines, cx: &mut ViewContext) { + let text_layout_details = &self.text_layout_details(cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::up_by_rows(map, head, action.lines, goal, false, &text_layout_details) + }) + }) + } + + pub fn move_page_up(&mut self, action: &MovePageUp, cx: &mut ViewContext) { + if self.take_rename(true, cx).is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + let row_count = if let Some(row_count) = self.visible_line_count() { + row_count as u32 - 1 + } else { + return; + }; + + let autoscroll = if action.center_cursor { + Autoscroll::center() + } else { + Autoscroll::fit() + }; + + let text_layout_details = &self.text_layout_details(cx); + + self.change_selections(Some(autoscroll), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if !selection.is_empty() && !line_mode { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::up_by_rows( + map, + selection.end, + row_count, + selection.goal, + false, + &text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + } + + pub fn select_up(&mut self, _: &SelectUp, cx: &mut ViewContext) { + let text_layout_details = &self.text_layout_details(cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::up(map, head, goal, false, &text_layout_details) + }) + }) + } + + pub fn move_down(&mut self, _: &MoveDown, cx: &mut ViewContext) { + self.take_rename(true, cx); + + if self.mode == EditorMode::SingleLine { + cx.propagate(); + return; + } + + let text_layout_details = &self.text_layout_details(cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if !selection.is_empty() && !line_mode { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::down( + map, + selection.end, + selection.goal, + false, + &text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + } + + pub fn move_page_down(&mut self, action: &MovePageDown, cx: &mut ViewContext) { + if self.take_rename(true, cx).is_some() { + return; + } + + if self + .context_menu + .write() + .as_mut() + .map(|menu| menu.select_last(self.project.as_ref(), cx)) + .unwrap_or(false) + { + return; + } + + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + let row_count = if let Some(row_count) = self.visible_line_count() { + row_count as u32 - 1 + } else { + return; + }; + + let autoscroll = if action.center_cursor { + Autoscroll::center() + } else { + Autoscroll::fit() + }; + + let text_layout_details = &self.text_layout_details(cx); + self.change_selections(Some(autoscroll), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if !selection.is_empty() && !line_mode { + selection.goal = SelectionGoal::None; + } + let (cursor, goal) = movement::down_by_rows( + map, + selection.end, + row_count, + selection.goal, + false, + &text_layout_details, + ); + selection.collapse_to(cursor, goal); + }); + }); + } + + pub fn select_down(&mut self, _: &SelectDown, cx: &mut ViewContext) { + let text_layout_details = &self.text_layout_details(cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, goal| { + movement::down(map, head, goal, false, &text_layout_details) + }) + }); + } + + pub fn context_menu_first(&mut self, _: &ContextMenuFirst, cx: &mut ViewContext) { + if let Some(context_menu) = self.context_menu.write().as_mut() { + context_menu.select_first(self.project.as_ref(), cx); + } + } + + pub fn context_menu_prev(&mut self, _: &ContextMenuPrev, cx: &mut ViewContext) { + if let Some(context_menu) = self.context_menu.write().as_mut() { + context_menu.select_prev(self.project.as_ref(), cx); + } + } + + pub fn context_menu_next(&mut self, _: &ContextMenuNext, cx: &mut ViewContext) { + if let Some(context_menu) = self.context_menu.write().as_mut() { + context_menu.select_next(self.project.as_ref(), cx); + } + } + + pub fn context_menu_last(&mut self, _: &ContextMenuLast, cx: &mut ViewContext) { + if let Some(context_menu) = self.context_menu.write().as_mut() { + context_menu.select_last(self.project.as_ref(), cx); + } + } + + pub fn move_to_previous_word_start( + &mut self, + _: &MoveToPreviousWordStart, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::previous_word_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_previous_subword_start( + &mut self, + _: &MoveToPreviousSubwordStart, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::previous_subword_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_previous_word_start( + &mut self, + _: &SelectToPreviousWordStart, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::previous_word_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_previous_subword_start( + &mut self, + _: &SelectToPreviousSubwordStart, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::previous_subword_start(map, head), + SelectionGoal::None, + ) + }); + }) + } + + pub fn delete_to_previous_word_start( + &mut self, + _: &DeleteToPreviousWordStart, + cx: &mut ViewContext, + ) { + self.transact(cx, |this, cx| { + this.select_autoclose_pair(cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if selection.is_empty() && !line_mode { + let cursor = movement::previous_word_start(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", cx); + }); + } + + pub fn delete_to_previous_subword_start( + &mut self, + _: &DeleteToPreviousSubwordStart, + cx: &mut ViewContext, + ) { + self.transact(cx, |this, cx| { + this.select_autoclose_pair(cx); + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if selection.is_empty() && !line_mode { + let cursor = movement::previous_subword_start(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", cx); + }); + } + + pub fn move_to_next_word_end(&mut self, _: &MoveToNextWordEnd, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, head, _| { + (movement::next_word_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn move_to_next_subword_end( + &mut self, + _: &MoveToNextSubwordEnd, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, head, _| { + (movement::next_subword_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn select_to_next_word_end(&mut self, _: &SelectToNextWordEnd, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + (movement::next_word_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn select_to_next_subword_end( + &mut self, + _: &SelectToNextSubwordEnd, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + (movement::next_subword_end(map, head), SelectionGoal::None) + }); + }) + } + + pub fn delete_to_next_word_end(&mut self, _: &DeleteToNextWordEnd, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + let line_mode = s.line_mode; + s.move_with(|map, selection| { + if selection.is_empty() && !line_mode { + let cursor = movement::next_word_end(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", cx); + }); + } + + pub fn delete_to_next_subword_end( + &mut self, + _: &DeleteToNextSubwordEnd, + cx: &mut ViewContext, + ) { + self.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + if selection.is_empty() { + let cursor = movement::next_subword_end(map, selection.head()); + selection.set_head(cursor, SelectionGoal::None); + } + }); + }); + this.insert("", cx); + }); + } + + pub fn move_to_beginning_of_line( + &mut self, + action: &MoveToBeginningOfLine, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::indented_line_beginning(map, head, action.stop_at_soft_wraps), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_beginning_of_line( + &mut self, + action: &SelectToBeginningOfLine, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::indented_line_beginning(map, head, action.stop_at_soft_wraps), + SelectionGoal::None, + ) + }); + }); + } + + pub fn delete_to_beginning_of_line( + &mut self, + _: &DeleteToBeginningOfLine, + cx: &mut ViewContext, + ) { + self.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|_, selection| { + selection.reversed = true; + }); + }); + + this.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: false, + }, + cx, + ); + this.backspace(&Backspace, cx); + }); + } + + pub fn move_to_end_of_line(&mut self, action: &MoveToEndOfLine, cx: &mut ViewContext) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, head, _| { + ( + movement::line_end(map, head, action.stop_at_soft_wraps), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_end_of_line( + &mut self, + action: &SelectToEndOfLine, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::line_end(map, head, action.stop_at_soft_wraps), + SelectionGoal::None, + ) + }); + }) + } + + pub fn delete_to_end_of_line(&mut self, _: &DeleteToEndOfLine, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.select_to_end_of_line( + &SelectToEndOfLine { + stop_at_soft_wraps: false, + }, + cx, + ); + this.delete(&Delete, cx); + }); + } + + pub fn cut_to_end_of_line(&mut self, _: &CutToEndOfLine, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.select_to_end_of_line( + &SelectToEndOfLine { + stop_at_soft_wraps: false, + }, + cx, + ); + this.cut(&Cut, cx); + }); + } + + pub fn move_to_start_of_paragraph( + &mut self, + _: &MoveToStartOfParagraph, + cx: &mut ViewContext, + ) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::start_of_paragraph(map, selection.head(), 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_end_of_paragraph( + &mut self, + _: &MoveToEndOfParagraph, + cx: &mut ViewContext, + ) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + selection.collapse_to( + movement::end_of_paragraph(map, selection.head(), 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_start_of_paragraph( + &mut self, + _: &SelectToStartOfParagraph, + cx: &mut ViewContext, + ) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::start_of_paragraph(map, head, 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn select_to_end_of_paragraph( + &mut self, + _: &SelectToEndOfParagraph, + cx: &mut ViewContext, + ) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_heads_with(|map, head, _| { + ( + movement::end_of_paragraph(map, head, 1), + SelectionGoal::None, + ) + }); + }) + } + + pub fn move_to_beginning(&mut self, _: &MoveToBeginning, cx: &mut ViewContext) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges(vec![0..0]); + }); + } + + pub fn select_to_beginning(&mut self, _: &SelectToBeginning, cx: &mut ViewContext) { + let mut selection = self.selections.last::(cx); + selection.set_head(Point::zero(), SelectionGoal::None); + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(vec![selection]); + }); + } + + pub fn move_to_end(&mut self, _: &MoveToEnd, cx: &mut ViewContext) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + let cursor = self.buffer.read(cx).read(cx).len(); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges(vec![cursor..cursor]) + }); + } + + pub fn set_nav_history(&mut self, nav_history: Option) { + self.nav_history = nav_history; + } + + pub fn nav_history(&self) -> Option<&ItemNavHistory> { + self.nav_history.as_ref() + } + + fn push_to_nav_history( + &mut self, + cursor_anchor: Anchor, + new_position: Option, + cx: &mut ViewContext, + ) { + if let Some(nav_history) = self.nav_history.as_mut() { + let buffer = self.buffer.read(cx).read(cx); + let cursor_position = cursor_anchor.to_point(&buffer); + let scroll_state = self.scroll_manager.anchor(); + let scroll_top_row = scroll_state.top_row(&buffer); + drop(buffer); + + if let Some(new_position) = new_position { + let row_delta = (new_position.row as i64 - cursor_position.row as i64).abs(); + if row_delta < MIN_NAVIGATION_HISTORY_ROW_DELTA { + return; + } + } + + nav_history.push( + Some(NavigationData { + cursor_anchor, + cursor_position, + scroll_anchor: scroll_state, + scroll_top_row, + }), + cx, + ); + } + } + + pub fn select_to_end(&mut self, _: &SelectToEnd, cx: &mut ViewContext) { + let buffer = self.buffer.read(cx).snapshot(cx); + let mut selection = self.selections.first::(cx); + selection.set_head(buffer.len(), SelectionGoal::None); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(vec![selection]); + }); + } + + pub fn select_all(&mut self, _: &SelectAll, cx: &mut ViewContext) { + let end = self.buffer.read(cx).read(cx).len(); + self.change_selections(None, cx, |s| { + s.select_ranges(vec![0..end]); + }); + } + + pub fn select_line(&mut self, _: &SelectLine, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut selections = self.selections.all::(cx); + let max_point = display_map.buffer_snapshot.max_point(); + for selection in &mut selections { + let rows = selection.spanned_rows(true, &display_map); + selection.start = Point::new(rows.start.0, 0); + selection.end = cmp::min(max_point, Point::new(rows.end.0, 0)); + selection.reversed = false; + } + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections); + }); + } + + pub fn split_selection_into_lines( + &mut self, + _: &SplitSelectionIntoLines, + cx: &mut ViewContext, + ) { + let mut to_unfold = Vec::new(); + let mut new_selection_ranges = Vec::new(); + { + let selections = self.selections.all::(cx); + let buffer = self.buffer.read(cx).read(cx); + for selection in selections { + for row in selection.start.row..selection.end.row { + let cursor = Point::new(row, buffer.line_len(MultiBufferRow(row))); + new_selection_ranges.push(cursor..cursor); + } + new_selection_ranges.push(selection.end..selection.end); + to_unfold.push(selection.start..selection.end); + } + } + self.unfold_ranges(to_unfold, true, true, cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges(new_selection_ranges); + }); + } + + pub fn add_selection_above(&mut self, _: &AddSelectionAbove, cx: &mut ViewContext) { + self.add_selection(true, cx); + } + + pub fn add_selection_below(&mut self, _: &AddSelectionBelow, cx: &mut ViewContext) { + self.add_selection(false, cx); + } + + fn add_selection(&mut self, above: bool, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut selections = self.selections.all::(cx); + let text_layout_details = self.text_layout_details(cx); + let mut state = self.add_selections_state.take().unwrap_or_else(|| { + let oldest_selection = selections.iter().min_by_key(|s| s.id).unwrap().clone(); + let range = oldest_selection.display_range(&display_map).sorted(); + + let start_x = display_map.x_for_display_point(range.start, &text_layout_details); + let end_x = display_map.x_for_display_point(range.end, &text_layout_details); + let positions = start_x.min(end_x)..start_x.max(end_x); + + selections.clear(); + let mut stack = Vec::new(); + for row in range.start.row().0..=range.end.row().0 { + if let Some(selection) = self.selections.build_columnar_selection( + &display_map, + DisplayRow(row), + &positions, + oldest_selection.reversed, + &text_layout_details, + ) { + stack.push(selection.id); + selections.push(selection); + } + } + + if above { + stack.reverse(); + } + + AddSelectionsState { above, stack } + }); + + let last_added_selection = *state.stack.last().unwrap(); + let mut new_selections = Vec::new(); + if above == state.above { + let end_row = if above { + DisplayRow(0) + } else { + display_map.max_point().row() + }; + + 'outer: for selection in selections { + if selection.id == last_added_selection { + let range = selection.display_range(&display_map).sorted(); + debug_assert_eq!(range.start.row(), range.end.row()); + let mut row = range.start.row(); + let positions = + if let SelectionGoal::HorizontalRange { start, end } = selection.goal { + px(start)..px(end) + } else { + let start_x = + display_map.x_for_display_point(range.start, &text_layout_details); + let end_x = + display_map.x_for_display_point(range.end, &text_layout_details); + start_x.min(end_x)..start_x.max(end_x) + }; + + while row != end_row { + if above { + row.0 -= 1; + } else { + row.0 += 1; + } + + if let Some(new_selection) = self.selections.build_columnar_selection( + &display_map, + row, + &positions, + selection.reversed, + &text_layout_details, + ) { + state.stack.push(new_selection.id); + if above { + new_selections.push(new_selection); + new_selections.push(selection); + } else { + new_selections.push(selection); + new_selections.push(new_selection); + } + + continue 'outer; + } + } + } + + new_selections.push(selection); + } + } else { + new_selections = selections; + new_selections.retain(|s| s.id != last_added_selection); + state.stack.pop(); + } + + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(new_selections); + }); + if state.stack.len() > 1 { + self.add_selections_state = Some(state); + } + } + + pub fn select_next_match_internal( + &mut self, + display_map: &DisplaySnapshot, + replace_newest: bool, + autoscroll: Option, + cx: &mut ViewContext, + ) -> Result<()> { + fn select_next_match_ranges( + this: &mut Editor, + range: Range, + replace_newest: bool, + auto_scroll: Option, + cx: &mut ViewContext, + ) { + this.unfold_ranges([range.clone()], false, true, cx); + this.change_selections(auto_scroll, cx, |s| { + if replace_newest { + s.delete(s.newest_anchor().id); + } + s.insert_range(range.clone()); + }); + } + + let buffer = &display_map.buffer_snapshot; + let mut selections = self.selections.all::(cx); + if let Some(mut select_next_state) = self.select_next_state.take() { + let query = &select_next_state.query; + if !select_next_state.done { + let first_selection = selections.iter().min_by_key(|s| s.id).unwrap(); + let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); + let mut next_selected_range = None; + + let bytes_after_last_selection = + buffer.bytes_in_range(last_selection.end..buffer.len()); + let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start); + let query_matches = query + .stream_find_iter(bytes_after_last_selection) + .map(|result| (last_selection.end, result)) + .chain( + query + .stream_find_iter(bytes_before_first_selection) + .map(|result| (0, result)), + ); + + for (start_offset, query_match) in query_matches { + let query_match = query_match.unwrap(); // can only fail due to I/O + let offset_range = + start_offset + query_match.start()..start_offset + query_match.end(); + let display_range = offset_range.start.to_display_point(&display_map) + ..offset_range.end.to_display_point(&display_map); + + if !select_next_state.wordwise + || (!movement::is_inside_word(&display_map, display_range.start) + && !movement::is_inside_word(&display_map, display_range.end)) + { + // TODO: This is n^2, because we might check all the selections + if !selections + .iter() + .any(|selection| selection.range().overlaps(&offset_range)) + { + next_selected_range = Some(offset_range); + break; + } + } + } + + if let Some(next_selected_range) = next_selected_range { + select_next_match_ranges( + self, + next_selected_range, + replace_newest, + autoscroll, + cx, + ); + } else { + select_next_state.done = true; + } + } + + self.select_next_state = Some(select_next_state); + } else { + let mut only_carets = true; + let mut same_text_selected = true; + let mut selected_text = None; + + let mut selections_iter = selections.iter().peekable(); + while let Some(selection) = selections_iter.next() { + if selection.start != selection.end { + only_carets = false; + } + + if same_text_selected { + if selected_text.is_none() { + selected_text = + Some(buffer.text_for_range(selection.range()).collect::()); + } + + if let Some(next_selection) = selections_iter.peek() { + if next_selection.range().len() == selection.range().len() { + let next_selected_text = buffer + .text_for_range(next_selection.range()) + .collect::(); + if Some(next_selected_text) != selected_text { + same_text_selected = false; + selected_text = None; + } + } else { + same_text_selected = false; + selected_text = None; + } + } + } + } + + if only_carets { + for selection in &mut selections { + let word_range = movement::surrounding_word( + &display_map, + selection.start.to_display_point(&display_map), + ); + selection.start = word_range.start.to_offset(&display_map, Bias::Left); + selection.end = word_range.end.to_offset(&display_map, Bias::Left); + selection.goal = SelectionGoal::None; + selection.reversed = false; + select_next_match_ranges( + self, + selection.start..selection.end, + replace_newest, + autoscroll, + cx, + ); + } + + if selections.len() == 1 { + let selection = selections + .last() + .expect("ensured that there's only one selection"); + let query = buffer + .text_for_range(selection.start..selection.end) + .collect::(); + let is_empty = query.is_empty(); + let select_state = SelectNextState { + query: AhoCorasick::new(&[query])?, + wordwise: true, + done: is_empty, + }; + self.select_next_state = Some(select_state); + } else { + self.select_next_state = None; + } + } else if let Some(selected_text) = selected_text { + self.select_next_state = Some(SelectNextState { + query: AhoCorasick::new(&[selected_text])?, + wordwise: false, + done: false, + }); + self.select_next_match_internal(display_map, replace_newest, autoscroll, cx)?; + } + } + Ok(()) + } + + pub fn select_all_matches( + &mut self, + _action: &SelectAllMatches, + cx: &mut ViewContext, + ) -> Result<()> { + self.push_to_selection_history(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + self.select_next_match_internal(&display_map, false, None, cx)?; + let Some(select_next_state) = self.select_next_state.as_mut() else { + return Ok(()); + }; + if select_next_state.done { + return Ok(()); + } + + let mut new_selections = self.selections.all::(cx); + + let buffer = &display_map.buffer_snapshot; + let query_matches = select_next_state + .query + .stream_find_iter(buffer.bytes_in_range(0..buffer.len())); + + for query_match in query_matches { + let query_match = query_match.unwrap(); // can only fail due to I/O + let offset_range = query_match.start()..query_match.end(); + let display_range = offset_range.start.to_display_point(&display_map) + ..offset_range.end.to_display_point(&display_map); + + if !select_next_state.wordwise + || (!movement::is_inside_word(&display_map, display_range.start) + && !movement::is_inside_word(&display_map, display_range.end)) + { + self.selections.change_with(cx, |selections| { + new_selections.push(Selection { + id: selections.new_selection_id(), + start: offset_range.start, + end: offset_range.end, + reversed: false, + goal: SelectionGoal::None, + }); + }); + } + } + + new_selections.sort_by_key(|selection| selection.start); + let mut ix = 0; + while ix + 1 < new_selections.len() { + let current_selection = &new_selections[ix]; + let next_selection = &new_selections[ix + 1]; + if current_selection.range().overlaps(&next_selection.range()) { + if current_selection.id < next_selection.id { + new_selections.remove(ix + 1); + } else { + new_selections.remove(ix); + } + } else { + ix += 1; + } + } + + select_next_state.done = true; + self.unfold_ranges( + new_selections.iter().map(|selection| selection.range()), + false, + false, + cx, + ); + self.change_selections(Some(Autoscroll::fit()), cx, |selections| { + selections.select(new_selections) + }); + + Ok(()) + } + + pub fn select_next(&mut self, action: &SelectNext, cx: &mut ViewContext) -> Result<()> { + self.push_to_selection_history(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.select_next_match_internal( + &display_map, + action.replace_newest, + Some(Autoscroll::newest()), + cx, + )?; + Ok(()) + } + + pub fn select_previous( + &mut self, + action: &SelectPrevious, + cx: &mut ViewContext, + ) -> Result<()> { + self.push_to_selection_history(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let mut selections = self.selections.all::(cx); + if let Some(mut select_prev_state) = self.select_prev_state.take() { + let query = &select_prev_state.query; + if !select_prev_state.done { + let first_selection = selections.iter().min_by_key(|s| s.id).unwrap(); + let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); + let mut next_selected_range = None; + // When we're iterating matches backwards, the oldest match will actually be the furthest one in the buffer. + let bytes_before_last_selection = + buffer.reversed_bytes_in_range(0..last_selection.start); + let bytes_after_first_selection = + buffer.reversed_bytes_in_range(first_selection.end..buffer.len()); + let query_matches = query + .stream_find_iter(bytes_before_last_selection) + .map(|result| (last_selection.start, result)) + .chain( + query + .stream_find_iter(bytes_after_first_selection) + .map(|result| (buffer.len(), result)), + ); + for (end_offset, query_match) in query_matches { + let query_match = query_match.unwrap(); // can only fail due to I/O + let offset_range = + end_offset - query_match.end()..end_offset - query_match.start(); + let display_range = offset_range.start.to_display_point(&display_map) + ..offset_range.end.to_display_point(&display_map); + + if !select_prev_state.wordwise + || (!movement::is_inside_word(&display_map, display_range.start) + && !movement::is_inside_word(&display_map, display_range.end)) + { + next_selected_range = Some(offset_range); + break; + } + } + + if let Some(next_selected_range) = next_selected_range { + self.unfold_ranges([next_selected_range.clone()], false, true, cx); + self.change_selections(Some(Autoscroll::newest()), cx, |s| { + if action.replace_newest { + s.delete(s.newest_anchor().id); + } + s.insert_range(next_selected_range); + }); + } else { + select_prev_state.done = true; + } + } + + self.select_prev_state = Some(select_prev_state); + } else { + let mut only_carets = true; + let mut same_text_selected = true; + let mut selected_text = None; + + let mut selections_iter = selections.iter().peekable(); + while let Some(selection) = selections_iter.next() { + if selection.start != selection.end { + only_carets = false; + } + + if same_text_selected { + if selected_text.is_none() { + selected_text = + Some(buffer.text_for_range(selection.range()).collect::()); + } + + if let Some(next_selection) = selections_iter.peek() { + if next_selection.range().len() == selection.range().len() { + let next_selected_text = buffer + .text_for_range(next_selection.range()) + .collect::(); + if Some(next_selected_text) != selected_text { + same_text_selected = false; + selected_text = None; + } + } else { + same_text_selected = false; + selected_text = None; + } + } + } + } + + if only_carets { + for selection in &mut selections { + let word_range = movement::surrounding_word( + &display_map, + selection.start.to_display_point(&display_map), + ); + selection.start = word_range.start.to_offset(&display_map, Bias::Left); + selection.end = word_range.end.to_offset(&display_map, Bias::Left); + selection.goal = SelectionGoal::None; + selection.reversed = false; + } + if selections.len() == 1 { + let selection = selections + .last() + .expect("ensured that there's only one selection"); + let query = buffer + .text_for_range(selection.start..selection.end) + .collect::(); + let is_empty = query.is_empty(); + let select_state = SelectNextState { + query: AhoCorasick::new(&[query.chars().rev().collect::()])?, + wordwise: true, + done: is_empty, + }; + self.select_prev_state = Some(select_state); + } else { + self.select_prev_state = None; + } + + self.unfold_ranges( + selections.iter().map(|s| s.range()).collect::>(), + false, + true, + cx, + ); + self.change_selections(Some(Autoscroll::newest()), cx, |s| { + s.select(selections); + }); + } else if let Some(selected_text) = selected_text { + self.select_prev_state = Some(SelectNextState { + query: AhoCorasick::new(&[selected_text.chars().rev().collect::()])?, + wordwise: false, + done: false, + }); + self.select_previous(action, cx)?; + } + } + Ok(()) + } + + pub fn toggle_comments(&mut self, action: &ToggleComments, cx: &mut ViewContext) { + let text_layout_details = &self.text_layout_details(cx); + self.transact(cx, |this, cx| { + let mut selections = this.selections.all::(cx); + let mut edits = Vec::new(); + let mut selection_edit_ranges = Vec::new(); + let mut last_toggled_row = None; + let snapshot = this.buffer.read(cx).read(cx); + let empty_str: Arc = "".into(); + let mut suffixes_inserted = Vec::new(); + + fn comment_prefix_range( + snapshot: &MultiBufferSnapshot, + row: MultiBufferRow, + comment_prefix: &str, + comment_prefix_whitespace: &str, + ) -> Range { + let start = Point::new(row.0, snapshot.indent_size_for_line(row).len); + + let mut line_bytes = snapshot + .bytes_in_range(start..snapshot.max_point()) + .flatten() + .copied(); + + // If this line currently begins with the line comment prefix, then record + // the range containing the prefix. + if line_bytes + .by_ref() + .take(comment_prefix.len()) + .eq(comment_prefix.bytes()) + { + // Include any whitespace that matches the comment prefix. + let matching_whitespace_len = line_bytes + .zip(comment_prefix_whitespace.bytes()) + .take_while(|(a, b)| a == b) + .count() as u32; + let end = Point::new( + start.row, + start.column + comment_prefix.len() as u32 + matching_whitespace_len, + ); + start..end + } else { + start..start + } + } + + fn comment_suffix_range( + snapshot: &MultiBufferSnapshot, + row: MultiBufferRow, + comment_suffix: &str, + comment_suffix_has_leading_space: bool, + ) -> Range { + let end = Point::new(row.0, snapshot.line_len(row)); + let suffix_start_column = end.column.saturating_sub(comment_suffix.len() as u32); + + let mut line_end_bytes = snapshot + .bytes_in_range(Point::new(end.row, suffix_start_column.saturating_sub(1))..end) + .flatten() + .copied(); + + let leading_space_len = if suffix_start_column > 0 + && line_end_bytes.next() == Some(b' ') + && comment_suffix_has_leading_space + { + 1 + } else { + 0 + }; + + // If this line currently begins with the line comment prefix, then record + // the range containing the prefix. + if line_end_bytes.by_ref().eq(comment_suffix.bytes()) { + let start = Point::new(end.row, suffix_start_column - leading_space_len); + start..end + } else { + end..end + } + } + + // TODO: Handle selections that cross excerpts + for selection in &mut selections { + let start_column = snapshot + .indent_size_for_line(MultiBufferRow(selection.start.row)) + .len; + let language = if let Some(language) = + snapshot.language_scope_at(Point::new(selection.start.row, start_column)) + { + language + } else { + continue; + }; + + selection_edit_ranges.clear(); + + // If multiple selections contain a given row, avoid processing that + // row more than once. + let mut start_row = MultiBufferRow(selection.start.row); + if last_toggled_row == Some(start_row) { + start_row = start_row.next_row(); + } + let end_row = + if selection.end.row > selection.start.row && selection.end.column == 0 { + MultiBufferRow(selection.end.row - 1) + } else { + MultiBufferRow(selection.end.row) + }; + last_toggled_row = Some(end_row); + + if start_row > end_row { + continue; + } + + // If the language has line comments, toggle those. + let full_comment_prefixes = language.line_comment_prefixes(); + if !full_comment_prefixes.is_empty() { + let first_prefix = full_comment_prefixes + .first() + .expect("prefixes is non-empty"); + let prefix_trimmed_lengths = full_comment_prefixes + .iter() + .map(|p| p.trim_end_matches(' ').len()) + .collect::>(); + + let mut all_selection_lines_are_comments = true; + + for row in start_row.0..=end_row.0 { + let row = MultiBufferRow(row); + if start_row < end_row && snapshot.is_line_blank(row) { + continue; + } + + let prefix_range = full_comment_prefixes + .iter() + .zip(prefix_trimmed_lengths.iter().copied()) + .map(|(prefix, trimmed_prefix_len)| { + comment_prefix_range( + snapshot.deref(), + row, + &prefix[..trimmed_prefix_len], + &prefix[trimmed_prefix_len..], + ) + }) + .max_by_key(|range| range.end.column - range.start.column) + .expect("prefixes is non-empty"); + + if prefix_range.is_empty() { + all_selection_lines_are_comments = false; + } + + selection_edit_ranges.push(prefix_range); + } + + if all_selection_lines_are_comments { + edits.extend( + selection_edit_ranges + .iter() + .cloned() + .map(|range| (range, empty_str.clone())), + ); + } else { + let min_column = selection_edit_ranges + .iter() + .map(|range| range.start.column) + .min() + .unwrap_or(0); + edits.extend(selection_edit_ranges.iter().map(|range| { + let position = Point::new(range.start.row, min_column); + (position..position, first_prefix.clone()) + })); + } + } else if let Some((full_comment_prefix, comment_suffix)) = + language.block_comment_delimiters() + { + let comment_prefix = full_comment_prefix.trim_end_matches(' '); + let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; + let prefix_range = comment_prefix_range( + snapshot.deref(), + start_row, + comment_prefix, + comment_prefix_whitespace, + ); + let suffix_range = comment_suffix_range( + snapshot.deref(), + end_row, + comment_suffix.trim_start_matches(' '), + comment_suffix.starts_with(' '), + ); + + if prefix_range.is_empty() || suffix_range.is_empty() { + edits.push(( + prefix_range.start..prefix_range.start, + full_comment_prefix.clone(), + )); + edits.push((suffix_range.end..suffix_range.end, comment_suffix.clone())); + suffixes_inserted.push((end_row, comment_suffix.len())); + } else { + edits.push((prefix_range, empty_str.clone())); + edits.push((suffix_range, empty_str.clone())); + } + } else { + continue; + } + } + + drop(snapshot); + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + + // Adjust selections so that they end before any comment suffixes that + // were inserted. + let mut suffixes_inserted = suffixes_inserted.into_iter().peekable(); + let mut selections = this.selections.all::(cx); + let snapshot = this.buffer.read(cx).read(cx); + for selection in &mut selections { + while let Some((row, suffix_len)) = suffixes_inserted.peek().copied() { + match row.cmp(&MultiBufferRow(selection.end.row)) { + Ordering::Less => { + suffixes_inserted.next(); + continue; + } + Ordering::Greater => break, + Ordering::Equal => { + if selection.end.column == snapshot.line_len(row) { + if selection.is_empty() { + selection.start.column -= suffix_len as u32; + } + selection.end.column -= suffix_len as u32; + } + break; + } + } + } + } + + drop(snapshot); + this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections)); + + let selections = this.selections.all::(cx); + let selections_on_single_row = selections.windows(2).all(|selections| { + selections[0].start.row == selections[1].start.row + && selections[0].end.row == selections[1].end.row + && selections[0].start.row == selections[0].end.row + }); + let selections_selecting = selections + .iter() + .any(|selection| selection.start != selection.end); + let advance_downwards = action.advance_downwards + && selections_on_single_row + && !selections_selecting + && this.mode != EditorMode::SingleLine; + + if advance_downwards { + let snapshot = this.buffer.read(cx).snapshot(cx); + + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|display_snapshot, display_point, _| { + let mut point = display_point.to_point(display_snapshot); + point.row += 1; + point = snapshot.clip_point(point, Bias::Left); + let display_point = point.to_display_point(display_snapshot); + let goal = SelectionGoal::HorizontalPosition( + display_snapshot + .x_for_display_point(display_point, &text_layout_details) + .into(), + ); + (display_point, goal) + }) + }); + } + }); + } + + pub fn select_larger_syntax_node( + &mut self, + _: &SelectLargerSyntaxNode, + cx: &mut ViewContext, + ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = self.buffer.read(cx).snapshot(cx); + let old_selections = self.selections.all::(cx).into_boxed_slice(); + + let mut stack = mem::take(&mut self.select_larger_syntax_node_stack); + let mut selected_larger_node = false; + let new_selections = old_selections + .iter() + .map(|selection| { + let old_range = selection.start..selection.end; + let mut new_range = old_range.clone(); + while let Some(containing_range) = + buffer.range_for_syntax_ancestor(new_range.clone()) + { + new_range = containing_range; + if !display_map.intersects_fold(new_range.start) + && !display_map.intersects_fold(new_range.end) + { + break; + } + } + + selected_larger_node |= new_range != old_range; + Selection { + id: selection.id, + start: new_range.start, + end: new_range.end, + goal: SelectionGoal::None, + reversed: selection.reversed, + } + }) + .collect::>(); + + if selected_larger_node { + stack.push(old_selections); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(new_selections); + }); + } + self.select_larger_syntax_node_stack = stack; + } + + pub fn select_smaller_syntax_node( + &mut self, + _: &SelectSmallerSyntaxNode, + cx: &mut ViewContext, + ) { + let mut stack = mem::take(&mut self.select_larger_syntax_node_stack); + if let Some(selections) = stack.pop() { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections.to_vec()); + }); + } + self.select_larger_syntax_node_stack = stack; + } + + fn refresh_runnables(&mut self, cx: &mut ViewContext) -> Task<()> { + let project = self.project.clone(); + cx.spawn(|this, mut cx| async move { + let Ok(display_snapshot) = this.update(&mut cx, |this, cx| { + this.display_map.update(cx, |map, cx| map.snapshot(cx)) + }) else { + return; + }; + + let Some(project) = project else { + return; + }; + if project + .update(&mut cx, |this, _| this.is_remote()) + .unwrap_or(true) + { + // Do not display any test indicators in remote projects. + return; + } + let new_rows = + cx.background_executor() + .spawn({ + let snapshot = display_snapshot.clone(); + async move { + Self::fetch_runnable_ranges(&snapshot, Anchor::min()..Anchor::max()) + } + }) + .await; + let rows = Self::runnable_rows(project, display_snapshot, new_rows, cx.clone()); + + this.update(&mut cx, |this, _| { + this.clear_tasks(); + for (key, value) in rows { + this.insert_tasks(key, value); + } + }) + .ok(); + }) + } + fn fetch_runnable_ranges( + snapshot: &DisplaySnapshot, + range: Range, + ) -> Vec { + snapshot.buffer_snapshot.runnable_ranges(range).collect() + } + + fn runnable_rows( + project: Model, + snapshot: DisplaySnapshot, + runnable_ranges: Vec, + mut cx: AsyncWindowContext, + ) -> Vec<((BufferId, u32), (usize, RunnableTasks))> { + runnable_ranges + .into_iter() + .filter_map(|mut runnable| { + let (tasks, _) = cx + .update(|cx| { + Self::resolve_runnable(project.clone(), &mut runnable.runnable, cx) + }) + .ok()?; + if tasks.is_empty() { + return None; + } + + let point = runnable.run_range.start.to_point(&snapshot.buffer_snapshot); + + let row = snapshot + .buffer_snapshot + .buffer_line_for_row(MultiBufferRow(point.row))? + .1 + .start + .row; + + Some(( + (runnable.buffer_id, row), + ( + runnable.run_range.start, + RunnableTasks { + templates: tasks, + column: point.column, + extra_variables: runnable.extra_captures, + }, + ), + )) + }) + .collect() + } + + fn resolve_runnable( + project: Model, + runnable: &mut Runnable, + cx: &WindowContext<'_>, + ) -> (Vec<(TaskSourceKind, TaskTemplate)>, Option) { + let (inventory, worktree_id) = project.read_with(cx, |project, cx| { + let worktree_id = project + .buffer_for_id(runnable.buffer) + .and_then(|buffer| buffer.read(cx).file()) + .map(|file| WorktreeId::from_usize(file.worktree_id())); + + (project.task_inventory().clone(), worktree_id) + }); + + let inventory = inventory.read(cx); + let tags = mem::take(&mut runnable.tags); + let mut tags: Vec<_> = tags + .into_iter() + .flat_map(|tag| { + let tag = tag.0.clone(); + inventory + .list_tasks(Some(runnable.language.clone()), worktree_id) + .into_iter() + .filter(move |(_, template)| { + template.tags.iter().any(|source_tag| source_tag == &tag) + }) + }) + .sorted_by_key(|(kind, _)| kind.to_owned()) + .collect(); + if let Some((leading_tag_source, _)) = tags.first() { + // Strongest source wins; if we have worktree tag binding, prefer that to + // global and language bindings; + // if we have a global binding, prefer that to language binding. + let first_mismatch = tags + .iter() + .position(|(tag_source, _)| tag_source != leading_tag_source); + if let Some(index) = first_mismatch { + tags.truncate(index); + } + } + + (tags, worktree_id) + } + + pub fn move_to_enclosing_bracket( + &mut self, + _: &MoveToEnclosingBracket, + cx: &mut ViewContext, + ) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_offsets_with(|snapshot, selection| { + let Some(enclosing_bracket_ranges) = + snapshot.enclosing_bracket_ranges(selection.start..selection.end) + else { + return; + }; + + let mut best_length = usize::MAX; + let mut best_inside = false; + let mut best_in_bracket_range = false; + let mut best_destination = None; + for (open, close) in enclosing_bracket_ranges { + let close = close.to_inclusive(); + let length = close.end() - open.start; + let inside = selection.start >= open.end && selection.end <= *close.start(); + let in_bracket_range = open.to_inclusive().contains(&selection.head()) + || close.contains(&selection.head()); + + // If best is next to a bracket and current isn't, skip + if !in_bracket_range && best_in_bracket_range { + continue; + } + + // Prefer smaller lengths unless best is inside and current isn't + if length > best_length && (best_inside || !inside) { + continue; + } + + best_length = length; + best_inside = inside; + best_in_bracket_range = in_bracket_range; + best_destination = Some( + if close.contains(&selection.start) && close.contains(&selection.end) { + if inside { + open.end + } else { + open.start + } + } else { + if inside { + *close.start() + } else { + *close.end() + } + }, + ); + } + + if let Some(destination) = best_destination { + selection.collapse_to(destination, SelectionGoal::None); + } + }) + }); + } + + pub fn undo_selection(&mut self, _: &UndoSelection, cx: &mut ViewContext) { + self.end_selection(cx); + self.selection_history.mode = SelectionHistoryMode::Undoing; + if let Some(entry) = self.selection_history.undo_stack.pop_back() { + self.change_selections(None, cx, |s| s.select_anchors(entry.selections.to_vec())); + self.select_next_state = entry.select_next_state; + self.select_prev_state = entry.select_prev_state; + self.add_selections_state = entry.add_selections_state; + self.request_autoscroll(Autoscroll::newest(), cx); + } + self.selection_history.mode = SelectionHistoryMode::Normal; + } + + pub fn redo_selection(&mut self, _: &RedoSelection, cx: &mut ViewContext) { + self.end_selection(cx); + self.selection_history.mode = SelectionHistoryMode::Redoing; + if let Some(entry) = self.selection_history.redo_stack.pop_back() { + self.change_selections(None, cx, |s| s.select_anchors(entry.selections.to_vec())); + self.select_next_state = entry.select_next_state; + self.select_prev_state = entry.select_prev_state; + self.add_selections_state = entry.add_selections_state; + self.request_autoscroll(Autoscroll::newest(), cx); + } + self.selection_history.mode = SelectionHistoryMode::Normal; + } + + pub fn expand_excerpts(&mut self, action: &ExpandExcerpts, cx: &mut ViewContext) { + let selections = self.selections.disjoint_anchors(); + + let lines = if action.lines == 0 { 3 } else { action.lines }; + + self.buffer.update(cx, |buffer, cx| { + buffer.expand_excerpts( + selections + .into_iter() + .map(|selection| selection.head().excerpt_id) + .dedup(), + lines, + cx, + ) + }) + } + + pub fn expand_excerpt(&mut self, excerpt: ExcerptId, cx: &mut ViewContext) { + self.buffer + .update(cx, |buffer, cx| buffer.expand_excerpts([excerpt], 3, cx)) + } + + fn go_to_diagnostic(&mut self, _: &GoToDiagnostic, cx: &mut ViewContext) { + self.go_to_diagnostic_impl(Direction::Next, cx) + } + + fn go_to_prev_diagnostic(&mut self, _: &GoToPrevDiagnostic, cx: &mut ViewContext) { + self.go_to_diagnostic_impl(Direction::Prev, cx) + } + + pub fn go_to_diagnostic_impl(&mut self, direction: Direction, cx: &mut ViewContext) { + let buffer = self.buffer.read(cx).snapshot(cx); + let selection = self.selections.newest::(cx); + + // If there is an active Diagnostic Popover jump to its diagnostic instead. + if direction == Direction::Next { + if let Some(popover) = self.hover_state.diagnostic_popover.as_ref() { + let (group_id, jump_to) = popover.activation_info(); + if self.activate_diagnostics(group_id, cx) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let mut new_selection = s.newest_anchor().clone(); + new_selection.collapse_to(jump_to, SelectionGoal::None); + s.select_anchors(vec![new_selection.clone()]); + }); + } + return; + } + } + + let mut active_primary_range = self.active_diagnostics.as_ref().map(|active_diagnostics| { + active_diagnostics + .primary_range + .to_offset(&buffer) + .to_inclusive() + }); + let mut search_start = if let Some(active_primary_range) = active_primary_range.as_ref() { + if active_primary_range.contains(&selection.head()) { + *active_primary_range.start() + } else { + selection.head() + } + } else { + selection.head() + }; + let snapshot = self.snapshot(cx); + loop { + let diagnostics = if direction == Direction::Prev { + buffer.diagnostics_in_range::<_, usize>(0..search_start, true) + } else { + buffer.diagnostics_in_range::<_, usize>(search_start..buffer.len(), false) + } + .filter(|diagnostic| !snapshot.intersects_fold(diagnostic.range.start)); + let group = diagnostics + // relies on diagnostics_in_range to return diagnostics with the same starting range to + // be sorted in a stable way + // skip until we are at current active diagnostic, if it exists + .skip_while(|entry| { + (match direction { + Direction::Prev => entry.range.start >= search_start, + Direction::Next => entry.range.start <= search_start, + }) && self + .active_diagnostics + .as_ref() + .is_some_and(|a| a.group_id != entry.diagnostic.group_id) + }) + .find_map(|entry| { + if entry.diagnostic.is_primary + && entry.diagnostic.severity <= DiagnosticSeverity::WARNING + && !entry.range.is_empty() + // if we match with the active diagnostic, skip it + && Some(entry.diagnostic.group_id) + != self.active_diagnostics.as_ref().map(|d| d.group_id) + { + Some((entry.range, entry.diagnostic.group_id)) + } else { + None + } + }); + + if let Some((primary_range, group_id)) = group { + if self.activate_diagnostics(group_id, cx) { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(vec![Selection { + id: selection.id, + start: primary_range.start, + end: primary_range.start, + reversed: false, + goal: SelectionGoal::None, + }]); + }); + } + break; + } else { + // Cycle around to the start of the buffer, potentially moving back to the start of + // the currently active diagnostic. + active_primary_range.take(); + if direction == Direction::Prev { + if search_start == buffer.len() { + break; + } else { + search_start = buffer.len(); + } + } else if search_start == 0 { + break; + } else { + search_start = 0; + } + } + } + } + + fn go_to_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext) { + let snapshot = self + .display_map + .update(cx, |display_map, cx| display_map.snapshot(cx)); + let selection = self.selections.newest::(cx); + + if !self.seek_in_direction( + &snapshot, + selection.head(), + false, + snapshot.buffer_snapshot.git_diff_hunks_in_range( + MultiBufferRow(selection.head().row + 1)..MultiBufferRow::MAX, + ), + cx, + ) { + let wrapped_point = Point::zero(); + self.seek_in_direction( + &snapshot, + wrapped_point, + true, + snapshot.buffer_snapshot.git_diff_hunks_in_range( + MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX, + ), + cx, + ); + } + } + + fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext) { + let snapshot = self + .display_map + .update(cx, |display_map, cx| display_map.snapshot(cx)); + let selection = self.selections.newest::(cx); + + if !self.seek_in_direction( + &snapshot, + selection.head(), + false, + snapshot.buffer_snapshot.git_diff_hunks_in_range_rev( + MultiBufferRow(0)..MultiBufferRow(selection.head().row), + ), + cx, + ) { + let wrapped_point = snapshot.buffer_snapshot.max_point(); + self.seek_in_direction( + &snapshot, + wrapped_point, + true, + snapshot.buffer_snapshot.git_diff_hunks_in_range_rev( + MultiBufferRow(0)..MultiBufferRow(wrapped_point.row), + ), + cx, + ); + } + } + + fn seek_in_direction( + &mut self, + snapshot: &DisplaySnapshot, + initial_point: Point, + is_wrapped: bool, + hunks: impl Iterator>, + cx: &mut ViewContext, + ) -> bool { + let display_point = initial_point.to_display_point(snapshot); + let mut hunks = hunks + .map(|hunk| diff_hunk_to_display(&hunk, &snapshot)) + .filter(|hunk| { + if is_wrapped { + true + } else { + !hunk.contains_display_row(display_point.row()) + } + }) + .dedup(); + + if let Some(hunk) = hunks.next() { + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + let row = hunk.start_display_row(); + let point = DisplayPoint::new(row, 0); + s.select_display_ranges([point..point]); + }); + + true + } else { + false + } + } + + pub fn go_to_definition( + &mut self, + _: &GoToDefinition, + cx: &mut ViewContext, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, cx) + } + + pub fn go_to_implementation( + &mut self, + _: &GoToImplementation, + cx: &mut ViewContext, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Implementation, false, cx) + } + + pub fn go_to_implementation_split( + &mut self, + _: &GoToImplementationSplit, + cx: &mut ViewContext, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Implementation, true, cx) + } + + pub fn go_to_type_definition( + &mut self, + _: &GoToTypeDefinition, + cx: &mut ViewContext, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Type, false, cx) + } + + pub fn go_to_definition_split( + &mut self, + _: &GoToDefinitionSplit, + cx: &mut ViewContext, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, true, cx) + } + + pub fn go_to_type_definition_split( + &mut self, + _: &GoToTypeDefinitionSplit, + cx: &mut ViewContext, + ) -> Task> { + self.go_to_definition_of_kind(GotoDefinitionKind::Type, true, cx) + } + + fn go_to_definition_of_kind( + &mut self, + kind: GotoDefinitionKind, + split: bool, + cx: &mut ViewContext, + ) -> Task> { + let Some(workspace) = self.workspace() else { + return Task::ready(Ok(false)); + }; + let buffer = self.buffer.read(cx); + let head = self.selections.newest::(cx).head(); + let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) { + text_anchor + } else { + return Task::ready(Ok(false)); + }; + + let project = workspace.read(cx).project().clone(); + let definitions = project.update(cx, |project, cx| match kind { + GotoDefinitionKind::Symbol => project.definition(&buffer, head, cx), + GotoDefinitionKind::Type => project.type_definition(&buffer, head, cx), + GotoDefinitionKind::Implementation => project.implementation(&buffer, head, cx), + }); + + cx.spawn(|editor, mut cx| async move { + let definitions = definitions.await?; + let navigated = editor + .update(&mut cx, |editor, cx| { + editor.navigate_to_hover_links( + Some(kind), + definitions + .into_iter() + .filter(|location| { + hover_links::exclude_link_to_position(&buffer, &head, location, cx) + }) + .map(HoverLink::Text) + .collect::>(), + split, + cx, + ) + })? + .await?; + anyhow::Ok(navigated) + }) + } + + pub fn open_url(&mut self, _: &OpenUrl, cx: &mut ViewContext) { + let position = self.selections.newest_anchor().head(); + let Some((buffer, buffer_position)) = + self.buffer.read(cx).text_anchor_for_position(position, cx) + else { + return; + }; + + cx.spawn(|editor, mut cx| async move { + if let Some((_, url)) = find_url(&buffer, buffer_position, cx.clone()) { + editor.update(&mut cx, |_, cx| { + cx.open_url(&url); + }) + } else { + Ok(()) + } + }) + .detach(); + } + + pub(crate) fn navigate_to_hover_links( + &mut self, + kind: Option, + mut definitions: Vec, + split: bool, + cx: &mut ViewContext, + ) -> Task> { + // If there is one definition, just open it directly + if definitions.len() == 1 { + let definition = definitions.pop().unwrap(); + let target_task = match definition { + HoverLink::Text(link) => Task::Ready(Some(Ok(Some(link.target)))), + HoverLink::InlayHint(lsp_location, server_id) => { + self.compute_target_location(lsp_location, server_id, cx) + } + HoverLink::Url(url) => { + cx.open_url(&url); + Task::ready(Ok(None)) + } + }; + cx.spawn(|editor, mut cx| async move { + let target = target_task.await.context("target resolution task")?; + if let Some(target) = target { + editor.update(&mut cx, |editor, cx| { + let Some(workspace) = editor.workspace() else { + return false; + }; + let pane = workspace.read(cx).active_pane().clone(); + + let range = target.range.to_offset(target.buffer.read(cx)); + let range = editor.range_for_match(&range); + + /// If select range has more than one line, we + /// just point the cursor to range.start. + fn check_multiline_range( + buffer: &Buffer, + range: Range, + ) -> Range { + if buffer.offset_to_point(range.start).row + == buffer.offset_to_point(range.end).row + { + range + } else { + range.start..range.start + } + } + + if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() { + let buffer = target.buffer.read(cx); + let range = check_multiline_range(buffer, range); + editor.change_selections(Some(Autoscroll::focused()), cx, |s| { + s.select_ranges([range]); + }); + } else { + cx.window_context().defer(move |cx| { + let target_editor: View = + workspace.update(cx, |workspace, cx| { + let pane = if split { + workspace.adjacent_pane(cx) + } else { + workspace.active_pane().clone() + }; + + workspace.open_project_item(pane, target.buffer.clone(), cx) + }); + target_editor.update(cx, |target_editor, cx| { + // When selecting a definition in a different buffer, disable the nav history + // to avoid creating a history entry at the previous cursor location. + pane.update(cx, |pane, _| pane.disable_history()); + let buffer = target.buffer.read(cx); + let range = check_multiline_range(buffer, range); + target_editor.change_selections( + Some(Autoscroll::focused()), + cx, + |s| { + s.select_ranges([range]); + }, + ); + pane.update(cx, |pane, _| pane.enable_history()); + }); + }); + } + true + }) + } else { + Ok(false) + } + }) + } else if !definitions.is_empty() { + let replica_id = self.replica_id(cx); + cx.spawn(|editor, mut cx| async move { + let (title, location_tasks, workspace) = editor + .update(&mut cx, |editor, cx| { + let tab_kind = match kind { + Some(GotoDefinitionKind::Implementation) => "Implementations", + _ => "Definitions", + }; + let title = definitions + .iter() + .find_map(|definition| match definition { + HoverLink::Text(link) => link.origin.as_ref().map(|origin| { + let buffer = origin.buffer.read(cx); + format!( + "{} for {}", + tab_kind, + buffer + .text_for_range(origin.range.clone()) + .collect::() + ) + }), + HoverLink::InlayHint(_, _) => None, + HoverLink::Url(_) => None, + }) + .unwrap_or(tab_kind.to_string()); + let location_tasks = definitions + .into_iter() + .map(|definition| match definition { + HoverLink::Text(link) => Task::Ready(Some(Ok(Some(link.target)))), + HoverLink::InlayHint(lsp_location, server_id) => { + editor.compute_target_location(lsp_location, server_id, cx) + } + HoverLink::Url(_) => Task::ready(Ok(None)), + }) + .collect::>(); + (title, location_tasks, editor.workspace().clone()) + }) + .context("location tasks preparation")?; + + let locations = futures::future::join_all(location_tasks) + .await + .into_iter() + .filter_map(|location| location.transpose()) + .collect::>() + .context("location tasks")?; + + let Some(workspace) = workspace else { + return Ok(false); + }; + let opened = workspace + .update(&mut cx, |workspace, cx| { + Self::open_locations_in_multibuffer( + workspace, locations, replica_id, title, split, cx, + ) + }) + .ok(); + + anyhow::Ok(opened.is_some()) + }) + } else { + Task::ready(Ok(false)) + } + } + + fn compute_target_location( + &self, + lsp_location: lsp::Location, + server_id: LanguageServerId, + cx: &mut ViewContext, + ) -> Task>> { + let Some(project) = self.project.clone() else { + return Task::Ready(Some(Ok(None))); + }; + + cx.spawn(move |editor, mut cx| async move { + let location_task = editor.update(&mut cx, |editor, cx| { + project.update(cx, |project, cx| { + let language_server_name = + editor.buffer.read(cx).as_singleton().and_then(|buffer| { + project + .language_server_for_buffer(buffer.read(cx), server_id, cx) + .map(|(lsp_adapter, _)| lsp_adapter.name.clone()) + }); + language_server_name.map(|language_server_name| { + project.open_local_buffer_via_lsp( + lsp_location.uri.clone(), + server_id, + language_server_name, + cx, + ) + }) + }) + })?; + let location = match location_task { + Some(task) => Some({ + let target_buffer_handle = task.await.context("open local buffer")?; + let range = target_buffer_handle.update(&mut cx, |target_buffer, _| { + let target_start = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); + let target_end = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); + target_buffer.anchor_after(target_start) + ..target_buffer.anchor_before(target_end) + })?; + Location { + buffer: target_buffer_handle, + range, + } + }), + None => None, + }; + Ok(location) + }) + } + + pub fn find_all_references( + &mut self, + _: &FindAllReferences, + cx: &mut ViewContext, + ) -> Option>> { + let multi_buffer = self.buffer.read(cx); + let selection = self.selections.newest::(cx); + let head = selection.head(); + + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let head_anchor = multi_buffer_snapshot.anchor_at( + head, + if head < selection.tail() { + Bias::Right + } else { + Bias::Left + }, + ); + + match self + .find_all_references_task_sources + .binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot)) + { + Ok(_) => { + log::info!( + "Ignoring repeated FindAllReferences invocation with the position of already running task" + ); + return None; + } + Err(i) => { + self.find_all_references_task_sources.insert(i, head_anchor); + } + } + + let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?; + let replica_id = self.replica_id(cx); + let workspace = self.workspace()?; + let project = workspace.read(cx).project().clone(); + let references = project.update(cx, |project, cx| project.references(&buffer, head, cx)); + Some(cx.spawn(|editor, mut cx| async move { + let _cleanup = defer({ + let mut cx = cx.clone(); + move || { + let _ = editor.update(&mut cx, |editor, _| { + if let Ok(i) = + editor + .find_all_references_task_sources + .binary_search_by(|anchor| { + anchor.cmp(&head_anchor, &multi_buffer_snapshot) + }) + { + editor.find_all_references_task_sources.remove(i); + } + }); + } + }); + + let locations = references.await?; + if locations.is_empty() { + return anyhow::Ok(()); + } + + workspace.update(&mut cx, |workspace, cx| { + let title = locations + .first() + .as_ref() + .map(|location| { + let buffer = location.buffer.read(cx); + format!( + "References to `{}`", + buffer + .text_for_range(location.range.clone()) + .collect::() + ) + }) + .unwrap(); + Self::open_locations_in_multibuffer( + workspace, locations, replica_id, title, false, cx, + ); + }) + })) + } + + /// Opens a multibuffer with the given project locations in it + pub fn open_locations_in_multibuffer( + workspace: &mut Workspace, + mut locations: Vec, + replica_id: ReplicaId, + title: String, + split: bool, + cx: &mut ViewContext, + ) { + // If there are multiple definitions, open them in a multibuffer + locations.sort_by_key(|location| location.buffer.read(cx).remote_id()); + let mut locations = locations.into_iter().peekable(); + let mut ranges_to_highlight = Vec::new(); + let capability = workspace.project().read(cx).capability(); + + let excerpt_buffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(replica_id, capability); + while let Some(location) = locations.next() { + let buffer = location.buffer.read(cx); + let mut ranges_for_buffer = Vec::new(); + let range = location.range.to_offset(buffer); + ranges_for_buffer.push(range.clone()); + + while let Some(next_location) = locations.peek() { + if next_location.buffer == location.buffer { + ranges_for_buffer.push(next_location.range.to_offset(buffer)); + locations.next(); + } else { + break; + } + } + + ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end))); + ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines( + location.buffer.clone(), + ranges_for_buffer, + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + )) + } + + multibuffer.with_title(title) + }); + + let editor = cx.new_view(|cx| { + Editor::for_multibuffer(excerpt_buffer, Some(workspace.project().clone()), cx) + }); + editor.update(cx, |editor, cx| { + editor.highlight_background::( + &ranges_to_highlight, + |theme| theme.editor_highlighted_line_background, + cx, + ); + }); + + let item = Box::new(editor); + let item_id = item.item_id(); + + if split { + workspace.split_item(SplitDirection::Right, item.clone(), cx); + } else { + let destination_index = workspace.active_pane().update(cx, |pane, cx| { + if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation { + pane.close_current_preview_item(cx) + } else { + None + } + }); + workspace.add_item_to_active_pane(item.clone(), destination_index, cx); + } + workspace.active_pane().update(cx, |pane, cx| { + pane.set_preview_item_id(Some(item_id), cx); + }); + } + + pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext) -> Option>> { + use language::ToOffset as _; + + let project = self.project.clone()?; + let selection = self.selections.newest_anchor().clone(); + let (cursor_buffer, cursor_buffer_position) = self + .buffer + .read(cx) + .text_anchor_for_position(selection.head(), cx)?; + let (tail_buffer, cursor_buffer_position_end) = self + .buffer + .read(cx) + .text_anchor_for_position(selection.tail(), cx)?; + if tail_buffer != cursor_buffer { + return None; + } + + let snapshot = cursor_buffer.read(cx).snapshot(); + let cursor_buffer_offset = cursor_buffer_position.to_offset(&snapshot); + let cursor_buffer_offset_end = cursor_buffer_position_end.to_offset(&snapshot); + let prepare_rename = project.update(cx, |project, cx| { + project.prepare_rename(cursor_buffer.clone(), cursor_buffer_offset, cx) + }); + drop(snapshot); + + Some(cx.spawn(|this, mut cx| async move { + let rename_range = if let Some(range) = prepare_rename.await? { + Some(range) + } else { + this.update(&mut cx, |this, cx| { + let buffer = this.buffer.read(cx).snapshot(cx); + let mut buffer_highlights = this + .document_highlights_for_position(selection.head(), &buffer) + .filter(|highlight| { + highlight.start.excerpt_id == selection.head().excerpt_id + && highlight.end.excerpt_id == selection.head().excerpt_id + }); + buffer_highlights + .next() + .map(|highlight| highlight.start.text_anchor..highlight.end.text_anchor) + })? + }; + if let Some(rename_range) = rename_range { + this.update(&mut cx, |this, cx| { + let snapshot = cursor_buffer.read(cx).snapshot(); + let rename_buffer_range = rename_range.to_offset(&snapshot); + let cursor_offset_in_rename_range = + cursor_buffer_offset.saturating_sub(rename_buffer_range.start); + let cursor_offset_in_rename_range_end = + cursor_buffer_offset_end.saturating_sub(rename_buffer_range.start); + + this.take_rename(false, cx); + let buffer = this.buffer.read(cx).read(cx); + let cursor_offset = selection.head().to_offset(&buffer); + let rename_start = cursor_offset.saturating_sub(cursor_offset_in_rename_range); + let rename_end = rename_start + rename_buffer_range.len(); + let range = buffer.anchor_before(rename_start)..buffer.anchor_after(rename_end); + let mut old_highlight_id = None; + let old_name: Arc = buffer + .chunks(rename_start..rename_end, true) + .map(|chunk| { + if old_highlight_id.is_none() { + old_highlight_id = chunk.syntax_highlight_id; + } + chunk.text + }) + .collect::() + .into(); + + drop(buffer); + + // Position the selection in the rename editor so that it matches the current selection. + this.show_local_selections = false; + let rename_editor = cx.new_view(|cx| { + let mut editor = Editor::single_line(cx); + editor.buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, old_name.clone())], None, cx) + }); + let rename_selection_range = match cursor_offset_in_rename_range + .cmp(&cursor_offset_in_rename_range_end) + { + Ordering::Equal => { + editor.select_all(&SelectAll, cx); + return editor; + } + Ordering::Less => { + cursor_offset_in_rename_range..cursor_offset_in_rename_range_end + } + Ordering::Greater => { + cursor_offset_in_rename_range_end..cursor_offset_in_rename_range + } + }; + if rename_selection_range.end > old_name.len() { + editor.select_all(&SelectAll, cx); + } else { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges([rename_selection_range]); + }); + } + editor + }); + + let write_highlights = + this.clear_background_highlights::(cx); + let read_highlights = + this.clear_background_highlights::(cx); + let ranges = write_highlights + .iter() + .flat_map(|(_, ranges)| ranges.iter()) + .chain(read_highlights.iter().flat_map(|(_, ranges)| ranges.iter())) + .cloned() + .collect(); + + this.highlight_text::( + ranges, + HighlightStyle { + fade_out: Some(0.6), + ..Default::default() + }, + cx, + ); + let rename_focus_handle = rename_editor.focus_handle(cx); + cx.focus(&rename_focus_handle); + let block_id = this.insert_blocks( + [BlockProperties { + style: BlockStyle::Flex, + position: range.start, + height: 1, + render: Box::new({ + let rename_editor = rename_editor.clone(); + move |cx: &mut BlockContext| { + let mut text_style = cx.editor_style.text.clone(); + if let Some(highlight_style) = old_highlight_id + .and_then(|h| h.style(&cx.editor_style.syntax)) + { + text_style = text_style.highlight(highlight_style); + } + div() + .pl(cx.anchor_x) + .child(EditorElement::new( + &rename_editor, + EditorStyle { + background: cx.theme().system().transparent, + local_player: cx.editor_style.local_player, + text: text_style, + scrollbar_width: cx.editor_style.scrollbar_width, + syntax: cx.editor_style.syntax.clone(), + status: cx.editor_style.status.clone(), + inlay_hints_style: HighlightStyle { + color: Some(cx.theme().status().hint), + font_weight: Some(FontWeight::BOLD), + ..HighlightStyle::default() + }, + suggestions_style: HighlightStyle { + color: Some(cx.theme().status().predictive), + ..HighlightStyle::default() + }, + }, + )) + .into_any_element() + } + }), + disposition: BlockDisposition::Below, + }], + Some(Autoscroll::fit()), + cx, + )[0]; + this.pending_rename = Some(RenameState { + range, + old_name, + editor: rename_editor, + block_id, + }); + })?; + } + + Ok(()) + })) + } + + pub fn confirm_rename( + &mut self, + _: &ConfirmRename, + cx: &mut ViewContext, + ) -> Option>> { + let rename = self.take_rename(false, cx)?; + let workspace = self.workspace()?; + let (start_buffer, start) = self + .buffer + .read(cx) + .text_anchor_for_position(rename.range.start, cx)?; + let (end_buffer, end) = self + .buffer + .read(cx) + .text_anchor_for_position(rename.range.end, cx)?; + if start_buffer != end_buffer { + return None; + } + + let buffer = start_buffer; + let range = start..end; + let old_name = rename.old_name; + let new_name = rename.editor.read(cx).text(cx); + + let rename = workspace + .read(cx) + .project() + .clone() + .update(cx, |project, cx| { + project.perform_rename(buffer.clone(), range.start, new_name.clone(), true, cx) + }); + let workspace = workspace.downgrade(); + + Some(cx.spawn(|editor, mut cx| async move { + let project_transaction = rename.await?; + Self::open_project_transaction( + &editor, + workspace, + project_transaction, + format!("Rename: {} → {}", old_name, new_name), + cx.clone(), + ) + .await?; + + editor.update(&mut cx, |editor, cx| { + editor.refresh_document_highlights(cx); + })?; + Ok(()) + })) + } + + fn take_rename( + &mut self, + moving_cursor: bool, + cx: &mut ViewContext, + ) -> Option { + let rename = self.pending_rename.take()?; + if rename.editor.focus_handle(cx).is_focused(cx) { + cx.focus(&self.focus_handle); + } + + self.remove_blocks( + [rename.block_id].into_iter().collect(), + Some(Autoscroll::fit()), + cx, + ); + self.clear_highlights::(cx); + self.show_local_selections = true; + + if moving_cursor { + let rename_editor = rename.editor.read(cx); + let cursor_in_rename_editor = rename_editor.selections.newest::(cx).head(); + + // Update the selection to match the position of the selection inside + // the rename editor. + let snapshot = self.buffer.read(cx).read(cx); + let rename_range = rename.range.to_offset(&snapshot); + let cursor_in_editor = snapshot + .clip_offset(rename_range.start + cursor_in_rename_editor, Bias::Left) + .min(rename_range.end); + drop(snapshot); + + self.change_selections(None, cx, |s| { + s.select_ranges(vec![cursor_in_editor..cursor_in_editor]) + }); + } else { + self.refresh_document_highlights(cx); + } + + Some(rename) + } + + pub fn pending_rename(&self) -> Option<&RenameState> { + self.pending_rename.as_ref() + } + + fn format(&mut self, _: &Format, cx: &mut ViewContext) -> Option>> { + let project = match &self.project { + Some(project) => project.clone(), + None => return None, + }; + + Some(self.perform_format(project, FormatTrigger::Manual, cx)) + } + + fn perform_format( + &mut self, + project: Model, + trigger: FormatTrigger, + cx: &mut ViewContext, + ) -> Task> { + let buffer = self.buffer().clone(); + let mut buffers = buffer.read(cx).all_buffers(); + if trigger == FormatTrigger::Save { + buffers.retain(|buffer| buffer.read(cx).is_dirty()); + } + + let mut timeout = cx.background_executor().timer(FORMAT_TIMEOUT).fuse(); + let format = project.update(cx, |project, cx| project.format(buffers, true, trigger, cx)); + + cx.spawn(|_, mut cx| async move { + let transaction = futures::select_biased! { + () = timeout => { + log::warn!("timed out waiting for formatting"); + None + } + transaction = format.log_err().fuse() => transaction, + }; + + buffer + .update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !buffer.is_singleton() { + buffer.push_transaction(&transaction.0, cx); + } + } + + cx.notify(); + }) + .ok(); + + Ok(()) + }) + } + + fn restart_language_server(&mut self, _: &RestartLanguageServer, cx: &mut ViewContext) { + if let Some(project) = self.project.clone() { + self.buffer.update(cx, |multi_buffer, cx| { + project.update(cx, |project, cx| { + project.restart_language_servers_for_buffers(multi_buffer.all_buffers(), cx); + }); + }) + } + } + + fn show_character_palette(&mut self, _: &ShowCharacterPalette, cx: &mut ViewContext) { + cx.show_character_palette(); + } + + fn refresh_active_diagnostics(&mut self, cx: &mut ViewContext) { + if let Some(active_diagnostics) = self.active_diagnostics.as_mut() { + let buffer = self.buffer.read(cx).snapshot(cx); + let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer); + let is_valid = buffer + .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone(), false) + .any(|entry| { + entry.diagnostic.is_primary + && !entry.range.is_empty() + && entry.range.start == primary_range_start + && entry.diagnostic.message == active_diagnostics.primary_message + }); + + if is_valid != active_diagnostics.is_valid { + active_diagnostics.is_valid = is_valid; + let mut new_styles = HashMap::default(); + for (block_id, diagnostic) in &active_diagnostics.blocks { + new_styles.insert( + *block_id, + diagnostic_block_renderer(diagnostic.clone(), is_valid), + ); + } + self.display_map + .update(cx, |display_map, _| display_map.replace_blocks(new_styles)); + } + } + } + + fn activate_diagnostics(&mut self, group_id: usize, cx: &mut ViewContext) -> bool { + self.dismiss_diagnostics(cx); + let snapshot = self.snapshot(cx); + self.active_diagnostics = self.display_map.update(cx, |display_map, cx| { + let buffer = self.buffer.read(cx).snapshot(cx); + + let mut primary_range = None; + let mut primary_message = None; + let mut group_end = Point::zero(); + let diagnostic_group = buffer + .diagnostic_group::(group_id) + .filter_map(|entry| { + if snapshot.is_line_folded(MultiBufferRow(entry.range.start.row)) + && (entry.range.start.row == entry.range.end.row + || snapshot.is_line_folded(MultiBufferRow(entry.range.end.row))) + { + return None; + } + if entry.range.end > group_end { + group_end = entry.range.end; + } + if entry.diagnostic.is_primary { + primary_range = Some(entry.range.clone()); + primary_message = Some(entry.diagnostic.message.clone()); + } + Some(entry) + }) + .collect::>(); + let primary_range = primary_range?; + let primary_message = primary_message?; + let primary_range = + buffer.anchor_after(primary_range.start)..buffer.anchor_before(primary_range.end); + + let blocks = display_map + .insert_blocks( + diagnostic_group.iter().map(|entry| { + let diagnostic = entry.diagnostic.clone(); + let message_height = diagnostic.message.matches('\n').count() as u8 + 1; + BlockProperties { + style: BlockStyle::Fixed, + position: buffer.anchor_after(entry.range.start), + height: message_height, + render: diagnostic_block_renderer(diagnostic, true), + disposition: BlockDisposition::Below, + } + }), + cx, + ) + .into_iter() + .zip(diagnostic_group.into_iter().map(|entry| entry.diagnostic)) + .collect(); + + Some(ActiveDiagnosticGroup { + primary_range, + primary_message, + group_id, + blocks, + is_valid: true, + }) + }); + self.active_diagnostics.is_some() + } + + fn dismiss_diagnostics(&mut self, cx: &mut ViewContext) { + if let Some(active_diagnostic_group) = self.active_diagnostics.take() { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(active_diagnostic_group.blocks.into_keys().collect(), cx); + }); + cx.notify(); + } + } + + pub fn set_selections_from_remote( + &mut self, + selections: Vec>, + pending_selection: Option>, + cx: &mut ViewContext, + ) { + let old_cursor_position = self.selections.newest_anchor().head(); + self.selections.change_with(cx, |s| { + s.select_anchors(selections); + if let Some(pending_selection) = pending_selection { + s.set_pending(pending_selection, SelectMode::Character); + } else { + s.clear_pending(); + } + }); + self.selections_did_change(false, &old_cursor_position, true, cx); + } + + fn push_to_selection_history(&mut self) { + self.selection_history.push(SelectionHistoryEntry { + selections: self.selections.disjoint_anchors(), + select_next_state: self.select_next_state.clone(), + select_prev_state: self.select_prev_state.clone(), + add_selections_state: self.add_selections_state.clone(), + }); + } + + pub fn transact( + &mut self, + cx: &mut ViewContext, + update: impl FnOnce(&mut Self, &mut ViewContext), + ) -> Option { + self.start_transaction_at(Instant::now(), cx); + update(self, cx); + self.end_transaction_at(Instant::now(), cx) + } + + fn start_transaction_at(&mut self, now: Instant, cx: &mut ViewContext) { + self.end_selection(cx); + if let Some(tx_id) = self + .buffer + .update(cx, |buffer, cx| buffer.start_transaction_at(now, cx)) + { + self.selection_history + .insert_transaction(tx_id, self.selections.disjoint_anchors()); + cx.emit(EditorEvent::TransactionBegun { + transaction_id: tx_id, + }) + } + } + + fn end_transaction_at( + &mut self, + now: Instant, + cx: &mut ViewContext, + ) -> Option { + if let Some(tx_id) = self + .buffer + .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + { + if let Some((_, end_selections)) = self.selection_history.transaction_mut(tx_id) { + *end_selections = Some(self.selections.disjoint_anchors()); + } else { + log::error!("unexpectedly ended a transaction that wasn't started by this editor"); + } + + cx.emit(EditorEvent::Edited); + Some(tx_id) + } else { + None + } + } + + pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + let selections = self.selections.all_adjusted(cx); + for selection in selections { + let range = selection.range().sorted(); + let buffer_start_row = range.start.row; + + for row in (0..=range.end.row).rev() { + let fold_range = display_map.foldable_range(MultiBufferRow(row)); + + if let Some(fold_range) = fold_range { + if fold_range.end.row >= buffer_start_row { + fold_ranges.push(fold_range); + if row <= range.start.row { + break; + } + } + } + } + } + + self.fold_ranges(fold_ranges, true, cx); + } + + pub fn fold_at(&mut self, fold_at: &FoldAt, cx: &mut ViewContext) { + let buffer_row = fold_at.buffer_row; + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + if let Some(fold_range) = display_map.foldable_range(buffer_row) { + let autoscroll = self + .selections + .all::(cx) + .iter() + .any(|selection| fold_range.overlaps(&selection.range())); + + self.fold_ranges(std::iter::once(fold_range), autoscroll, cx); + } + } + + pub fn unfold_lines(&mut self, _: &UnfoldLines, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let selections = self.selections.all::(cx); + let ranges = selections + .iter() + .map(|s| { + let range = s.display_range(&display_map).sorted(); + let mut start = range.start.to_point(&display_map); + let mut end = range.end.to_point(&display_map); + start.column = 0; + end.column = buffer.line_len(MultiBufferRow(end.row)); + start..end + }) + .collect::>(); + + self.unfold_ranges(ranges, true, true, cx); + } + + pub fn unfold_at(&mut self, unfold_at: &UnfoldAt, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + let intersection_range = Point::new(unfold_at.buffer_row.0, 0) + ..Point::new( + unfold_at.buffer_row.0, + display_map.buffer_snapshot.line_len(unfold_at.buffer_row), + ); + + let autoscroll = self + .selections + .all::(cx) + .iter() + .any(|selection| selection.range().overlaps(&intersection_range)); + + self.unfold_ranges(std::iter::once(intersection_range), true, autoscroll, cx) + } + + pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext) { + let selections = self.selections.all::(cx); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let line_mode = self.selections.line_mode; + let ranges = selections.into_iter().map(|s| { + if line_mode { + let start = Point::new(s.start.row, 0); + let end = Point::new( + s.end.row, + display_map + .buffer_snapshot + .line_len(MultiBufferRow(s.end.row)), + ); + start..end + } else { + s.start..s.end + } + }); + self.fold_ranges(ranges, true, cx); + } + + pub fn fold_ranges( + &mut self, + ranges: impl IntoIterator>, + auto_scroll: bool, + cx: &mut ViewContext, + ) { + let mut fold_ranges = Vec::new(); + let mut buffers_affected = HashMap::default(); + let multi_buffer = self.buffer().read(cx); + for range in ranges { + if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) { + buffers_affected.insert(buffer.read(cx).remote_id(), buffer); + }; + fold_ranges.push(range); + } + + let mut ranges = fold_ranges.into_iter().peekable(); + if ranges.peek().is_some() { + self.display_map.update(cx, |map, cx| map.fold(ranges, cx)); + + if auto_scroll { + self.request_autoscroll(Autoscroll::fit(), cx); + } + + for buffer in buffers_affected.into_values() { + self.sync_expanded_diff_hunks(buffer, cx); + } + + cx.notify(); + + if let Some(active_diagnostics) = self.active_diagnostics.take() { + // Clear diagnostics block when folding a range that contains it. + let snapshot = self.snapshot(cx); + if snapshot.intersects_fold(active_diagnostics.primary_range.start) { + drop(snapshot); + self.active_diagnostics = Some(active_diagnostics); + self.dismiss_diagnostics(cx); + } else { + self.active_diagnostics = Some(active_diagnostics); + } + } + + self.scrollbar_marker_state.dirty = true; + } + } + + pub fn unfold_ranges( + &mut self, + ranges: impl IntoIterator>, + inclusive: bool, + auto_scroll: bool, + cx: &mut ViewContext, + ) { + let mut unfold_ranges = Vec::new(); + let mut buffers_affected = HashMap::default(); + let multi_buffer = self.buffer().read(cx); + for range in ranges { + if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) { + buffers_affected.insert(buffer.read(cx).remote_id(), buffer); + }; + unfold_ranges.push(range); + } + + let mut ranges = unfold_ranges.into_iter().peekable(); + if ranges.peek().is_some() { + self.display_map + .update(cx, |map, cx| map.unfold(ranges, inclusive, cx)); + if auto_scroll { + self.request_autoscroll(Autoscroll::fit(), cx); + } + + for buffer in buffers_affected.into_values() { + self.sync_expanded_diff_hunks(buffer, cx); + } + + cx.notify(); + self.scrollbar_marker_state.dirty = true; + } + } + + pub fn set_gutter_hovered(&mut self, hovered: bool, cx: &mut ViewContext) { + if hovered != self.gutter_hovered { + self.gutter_hovered = hovered; + cx.notify(); + } + } + + pub fn insert_blocks( + &mut self, + blocks: impl IntoIterator>, + autoscroll: Option, + cx: &mut ViewContext, + ) -> Vec { + let blocks = self + .display_map + .update(cx, |display_map, cx| display_map.insert_blocks(blocks, cx)); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + blocks + } + + pub fn replace_blocks( + &mut self, + blocks: HashMap, + autoscroll: Option, + cx: &mut ViewContext, + ) { + self.display_map + .update(cx, |display_map, _| display_map.replace_blocks(blocks)); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + } + + pub fn remove_blocks( + &mut self, + block_ids: HashSet, + autoscroll: Option, + cx: &mut ViewContext, + ) { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(block_ids, cx) + }); + if let Some(autoscroll) = autoscroll { + self.request_autoscroll(autoscroll, cx); + } + } + + pub fn longest_row(&self, cx: &mut AppContext) -> DisplayRow { + self.display_map + .update(cx, |map, cx| map.snapshot(cx)) + .longest_row() + } + + pub fn max_point(&self, cx: &mut AppContext) -> DisplayPoint { + self.display_map + .update(cx, |map, cx| map.snapshot(cx)) + .max_point() + } + + pub fn text(&self, cx: &AppContext) -> String { + self.buffer.read(cx).read(cx).text() + } + + pub fn text_option(&self, cx: &AppContext) -> Option { + let text = self.text(cx); + let text = text.trim(); + + if text.is_empty() { + return None; + } + + Some(text.to_string()) + } + + pub fn set_text(&mut self, text: impl Into>, cx: &mut ViewContext) { + self.transact(cx, |this, cx| { + this.buffer + .read(cx) + .as_singleton() + .expect("you can only call set_text on editors for singleton buffers") + .update(cx, |buffer, cx| buffer.set_text(text, cx)); + }); + } + + pub fn display_text(&self, cx: &mut AppContext) -> String { + self.display_map + .update(cx, |map, cx| map.snapshot(cx)) + .text() + } + + pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> { + let mut wrap_guides = smallvec::smallvec![]; + + if self.show_wrap_guides == Some(false) { + return wrap_guides; + } + + let settings = self.buffer.read(cx).settings_at(0, cx); + if settings.show_wrap_guides { + if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) { + wrap_guides.push((soft_wrap as usize, true)); + } + wrap_guides.extend(settings.wrap_guides.iter().map(|guide| (*guide, false))) + } + + wrap_guides + } + + pub fn soft_wrap_mode(&self, cx: &AppContext) -> SoftWrap { + let settings = self.buffer.read(cx).settings_at(0, cx); + let mode = self + .soft_wrap_mode_override + .unwrap_or_else(|| settings.soft_wrap); + match mode { + language_settings::SoftWrap::None => SoftWrap::None, + language_settings::SoftWrap::PreferLine => SoftWrap::PreferLine, + language_settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth, + language_settings::SoftWrap::PreferredLineLength => { + SoftWrap::Column(settings.preferred_line_length) + } + } + } + + pub fn set_soft_wrap_mode( + &mut self, + mode: language_settings::SoftWrap, + cx: &mut ViewContext, + ) { + self.soft_wrap_mode_override = Some(mode); + cx.notify(); + } + + pub fn set_style(&mut self, style: EditorStyle, cx: &mut ViewContext) { + let rem_size = cx.rem_size(); + self.display_map.update(cx, |map, cx| { + map.set_font( + style.text.font(), + style.text.font_size.to_pixels(rem_size), + cx, + ) + }); + self.style = Some(style); + } + + pub fn style(&self) -> Option<&EditorStyle> { + self.style.as_ref() + } + + // Called by the element. This method is not designed to be called outside of the editor + // element's layout code because it does not notify when rewrapping is computed synchronously. + pub(crate) fn set_wrap_width(&self, width: Option, cx: &mut AppContext) -> bool { + self.display_map + .update(cx, |map, cx| map.set_wrap_width(width, cx)) + } + + pub fn toggle_soft_wrap(&mut self, _: &ToggleSoftWrap, cx: &mut ViewContext) { + if self.soft_wrap_mode_override.is_some() { + self.soft_wrap_mode_override.take(); + } else { + let soft_wrap = match self.soft_wrap_mode(cx) { + SoftWrap::None | SoftWrap::PreferLine => language_settings::SoftWrap::EditorWidth, + SoftWrap::EditorWidth | SoftWrap::Column(_) => { + language_settings::SoftWrap::PreferLine + } + }; + self.soft_wrap_mode_override = Some(soft_wrap); + } + cx.notify(); + } + + pub fn toggle_line_numbers(&mut self, _: &ToggleLineNumbers, cx: &mut ViewContext) { + let mut editor_settings = EditorSettings::get_global(cx).clone(); + editor_settings.gutter.line_numbers = !editor_settings.gutter.line_numbers; + EditorSettings::override_global(editor_settings, cx); + } + + pub fn set_show_gutter(&mut self, show_gutter: bool, cx: &mut ViewContext) { + self.show_gutter = show_gutter; + cx.notify(); + } + + pub fn set_show_wrap_guides(&mut self, show_gutter: bool, cx: &mut ViewContext) { + self.show_wrap_guides = Some(show_gutter); + cx.notify(); + } + + pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext) { + if let Some(buffer) = self.buffer().read(cx).as_singleton() { + if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { + cx.reveal_path(&file.abs_path(cx)); + } + } + } + + pub fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext) { + if let Some(buffer) = self.buffer().read(cx).as_singleton() { + if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { + if let Some(path) = file.abs_path(cx).to_str() { + cx.write_to_clipboard(ClipboardItem::new(path.to_string())); + } + } + } + } + + pub fn copy_relative_path(&mut self, _: &CopyRelativePath, cx: &mut ViewContext) { + if let Some(buffer) = self.buffer().read(cx).as_singleton() { + if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { + if let Some(path) = file.path().to_str() { + cx.write_to_clipboard(ClipboardItem::new(path.to_string())); + } + } + } + } + + pub fn toggle_git_blame(&mut self, _: &ToggleGitBlame, cx: &mut ViewContext) { + self.show_git_blame_gutter = !self.show_git_blame_gutter; + + if self.show_git_blame_gutter && !self.has_blame_entries(cx) { + self.start_git_blame(true, cx); + } + + cx.notify(); + } + + pub fn toggle_git_blame_inline( + &mut self, + _: &ToggleGitBlameInline, + cx: &mut ViewContext, + ) { + self.toggle_git_blame_inline_internal(true, cx); + cx.notify(); + } + + pub fn git_blame_inline_enabled(&self) -> bool { + self.git_blame_inline_enabled + } + + fn start_git_blame(&mut self, user_triggered: bool, cx: &mut ViewContext) { + if let Some(project) = self.project.as_ref() { + let Some(buffer) = self.buffer().read(cx).as_singleton() else { + return; + }; + + if buffer.read(cx).file().is_none() { + return; + } + + let focused = self.focus_handle(cx).contains_focused(cx); + + let project = project.clone(); + let blame = + cx.new_model(|cx| GitBlame::new(buffer, project, user_triggered, focused, cx)); + self.blame_subscription = Some(cx.observe(&blame, |_, _, cx| cx.notify())); + self.blame = Some(blame); + } + } + + fn toggle_git_blame_inline_internal( + &mut self, + user_triggered: bool, + cx: &mut ViewContext, + ) { + if self.git_blame_inline_enabled { + self.git_blame_inline_enabled = false; + self.show_git_blame_inline = false; + self.show_git_blame_inline_delay_task.take(); + } else { + self.git_blame_inline_enabled = true; + self.start_git_blame_inline(user_triggered, cx); + } + + cx.notify(); + } + + fn start_git_blame_inline(&mut self, user_triggered: bool, cx: &mut ViewContext) { + self.start_git_blame(user_triggered, cx); + + if ProjectSettings::get_global(cx) + .git + .inline_blame_delay() + .is_some() + { + self.start_inline_blame_timer(cx); + } else { + self.show_git_blame_inline = true + } + } + + pub fn blame(&self) -> Option<&Model> { + self.blame.as_ref() + } + + pub fn render_git_blame_gutter(&mut self, cx: &mut WindowContext) -> bool { + self.show_git_blame_gutter && self.has_blame_entries(cx) + } + + pub fn render_git_blame_inline(&mut self, cx: &mut WindowContext) -> bool { + self.show_git_blame_inline + && self.focus_handle.is_focused(cx) + && !self.newest_selection_head_on_empty_line(cx) + && self.has_blame_entries(cx) + } + + fn has_blame_entries(&self, cx: &mut WindowContext) -> bool { + self.blame() + .map_or(false, |blame| blame.read(cx).has_generated_entries()) + } + + fn newest_selection_head_on_empty_line(&mut self, cx: &mut WindowContext) -> bool { + let cursor_anchor = self.selections.newest_anchor().head(); + + let snapshot = self.buffer.read(cx).snapshot(cx); + let buffer_row = MultiBufferRow(cursor_anchor.to_point(&snapshot).row); + + snapshot.line_len(buffer_row) == 0 + } + + fn get_permalink_to_line(&mut self, cx: &mut ViewContext) -> Result { + let (path, repo) = maybe!({ + let project_handle = self.project.as_ref()?.clone(); + let project = project_handle.read(cx); + let buffer = self.buffer().read(cx).as_singleton()?; + let path = buffer + .read(cx) + .file()? + .as_local()? + .path() + .to_str()? + .to_string(); + let repo = project.get_repo(&buffer.read(cx).project_path(cx)?, cx)?; + Some((path, repo)) + }) + .ok_or_else(|| anyhow!("unable to open git repository"))?; + + const REMOTE_NAME: &str = "origin"; + let origin_url = repo + .lock() + .remote_url(REMOTE_NAME) + .ok_or_else(|| anyhow!("remote \"{REMOTE_NAME}\" not found"))?; + let sha = repo + .lock() + .head_sha() + .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?; + let selections = self.selections.all::(cx); + let selection = selections.iter().peekable().next(); + + let (provider, remote) = + parse_git_remote_url(GitHostingProviderRegistry::default_global(cx), &origin_url) + .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?; + + Ok(provider.build_permalink( + remote, + BuildPermalinkParams { + sha: &sha, + path: &path, + selection: selection.map(|selection| { + let range = selection.range(); + let start = range.start.row; + let end = range.end.row; + start..end + }), + }, + )) + } + + pub fn copy_permalink_to_line(&mut self, _: &CopyPermalinkToLine, cx: &mut ViewContext) { + let permalink = self.get_permalink_to_line(cx); + + match permalink { + Ok(permalink) => { + cx.write_to_clipboard(ClipboardItem::new(permalink.to_string())); + } + Err(err) => { + let message = format!("Failed to copy permalink: {err}"); + + Err::<(), anyhow::Error>(err).log_err(); + + if let Some(workspace) = self.workspace() { + workspace.update(cx, |workspace, cx| { + struct CopyPermalinkToLine; + + workspace.show_toast( + Toast::new(NotificationId::unique::(), message), + cx, + ) + }) + } + } + } + } + + pub fn open_permalink_to_line(&mut self, _: &OpenPermalinkToLine, cx: &mut ViewContext) { + let permalink = self.get_permalink_to_line(cx); + + match permalink { + Ok(permalink) => { + cx.open_url(permalink.as_ref()); + } + Err(err) => { + let message = format!("Failed to open permalink: {err}"); + + Err::<(), anyhow::Error>(err).log_err(); + + if let Some(workspace) = self.workspace() { + workspace.update(cx, |workspace, cx| { + struct OpenPermalinkToLine; + + workspace.show_toast( + Toast::new(NotificationId::unique::(), message), + cx, + ) + }) + } + } + } + } + + /// Adds or removes (on `None` color) a highlight for the rows corresponding to the anchor range given. + /// On matching anchor range, replaces the old highlight; does not clear the other existing highlights. + /// If multiple anchor ranges will produce highlights for the same row, the last range added will be used. + pub fn highlight_rows( + &mut self, + rows: RangeInclusive, + color: Option, + should_autoscroll: bool, + cx: &mut ViewContext, + ) { + let snapshot = self.buffer().read(cx).snapshot(cx); + let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); + let existing_highlight_index = row_highlights.binary_search_by(|highlight| { + highlight + .range + .start() + .cmp(&rows.start(), &snapshot) + .then(highlight.range.end().cmp(&rows.end(), &snapshot)) + }); + match (color, existing_highlight_index) { + (Some(_), Ok(ix)) | (_, Err(ix)) => row_highlights.insert( + ix, + RowHighlight { + index: post_inc(&mut self.highlight_order), + range: rows, + should_autoscroll, + color, + }, + ), + (None, Ok(i)) => { + row_highlights.remove(i); + } + } + } + + /// Clear all anchor ranges for a certain highlight context type, so no corresponding rows will be highlighted. + pub fn clear_row_highlights(&mut self) { + self.highlighted_rows.remove(&TypeId::of::()); + } + + /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. + pub fn highlighted_rows( + &self, + ) -> Option, Option<&Hsla>)>> { + Some( + self.highlighted_rows + .get(&TypeId::of::())? + .iter() + .map(|highlight| (&highlight.range, highlight.color.as_ref())), + ) + } + + /// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict. + /// Rerturns a map of display rows that are highlighted and their corresponding highlight color. + /// Allows to ignore certain kinds of highlights. + pub fn highlighted_display_rows( + &mut self, + cx: &mut WindowContext, + ) -> BTreeMap { + let snapshot = self.snapshot(cx); + let mut used_highlight_orders = HashMap::default(); + self.highlighted_rows + .iter() + .flat_map(|(_, highlighted_rows)| highlighted_rows.iter()) + .fold( + BTreeMap::::new(), + |mut unique_rows, highlight| { + let start_row = highlight.range.start().to_display_point(&snapshot).row(); + let end_row = highlight.range.end().to_display_point(&snapshot).row(); + for row in start_row.0..=end_row.0 { + let used_index = + used_highlight_orders.entry(row).or_insert(highlight.index); + if highlight.index >= *used_index { + *used_index = highlight.index; + match highlight.color { + Some(hsla) => unique_rows.insert(DisplayRow(row), hsla), + None => unique_rows.remove(&DisplayRow(row)), + }; + } + } + unique_rows + }, + ) + } + + pub fn highlighted_display_row_for_autoscroll( + &self, + snapshot: &DisplaySnapshot, + ) -> Option { + self.highlighted_rows + .values() + .flat_map(|highlighted_rows| highlighted_rows.iter()) + .filter_map(|highlight| { + if highlight.color.is_none() || !highlight.should_autoscroll { + return None; + } + Some(highlight.range.start().to_display_point(&snapshot).row()) + }) + .min() + } + + pub fn set_search_within_ranges( + &mut self, + ranges: &[Range], + cx: &mut ViewContext, + ) { + self.highlight_background::( + ranges, + |colors| colors.editor_document_highlight_read_background, + cx, + ) + } + + pub fn highlight_background( + &mut self, + ranges: &[Range], + color_fetcher: fn(&ThemeColors) -> Hsla, + cx: &mut ViewContext, + ) { + let snapshot = self.snapshot(cx); + // this is to try and catch a panic sooner + for range in ranges { + snapshot + .buffer_snapshot + .summary_for_anchor::(&range.start); + snapshot + .buffer_snapshot + .summary_for_anchor::(&range.end); + } + + self.background_highlights + .insert(TypeId::of::(), (color_fetcher, Arc::from(ranges))); + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } + + pub fn clear_background_highlights( + &mut self, + cx: &mut ViewContext, + ) -> Option { + let text_highlights = self.background_highlights.remove(&TypeId::of::())?; + if !text_highlights.1.is_empty() { + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } + Some(text_highlights) + } + + #[cfg(feature = "test-support")] + pub fn all_text_background_highlights( + &mut self, + cx: &mut ViewContext, + ) -> Vec<(Range, Hsla)> { + let snapshot = self.snapshot(cx); + let buffer = &snapshot.buffer_snapshot; + let start = buffer.anchor_before(0); + let end = buffer.anchor_after(buffer.len()); + let theme = cx.theme().colors(); + self.background_highlights_in_range(start..end, &snapshot, theme) + } + + fn document_highlights_for_position<'a>( + &'a self, + position: Anchor, + buffer: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator> { + let read_highlights = self + .background_highlights + .get(&TypeId::of::()) + .map(|h| &h.1); + let write_highlights = self + .background_highlights + .get(&TypeId::of::()) + .map(|h| &h.1); + let left_position = position.bias_left(buffer); + let right_position = position.bias_right(buffer); + read_highlights + .into_iter() + .chain(write_highlights) + .flat_map(move |ranges| { + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe.end.cmp(&left_position, buffer); + if cmp.is_ge() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + + ranges[start_ix..] + .iter() + .take_while(move |range| range.start.cmp(&right_position, buffer).is_le()) + }) + } + + pub fn has_background_highlights(&self) -> bool { + self.background_highlights + .get(&TypeId::of::()) + .map_or(false, |(_, highlights)| !highlights.is_empty()) + } + + pub fn background_highlights_in_range( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + theme: &ThemeColors, + ) -> Vec<(Range, Hsla)> { + let mut results = Vec::new(); + for (color_fetcher, ranges) in self.background_highlights.values() { + let color = color_fetcher(theme); + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe + .end + .cmp(&search_range.start, &display_snapshot.buffer_snapshot); + if cmp.is_gt() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + for range in &ranges[start_ix..] { + if range + .start + .cmp(&search_range.end, &display_snapshot.buffer_snapshot) + .is_ge() + { + break; + } + + let start = range.start.to_display_point(&display_snapshot); + let end = range.end.to_display_point(&display_snapshot); + results.push((start..end, color)) + } + } + results + } + + pub fn background_highlight_row_ranges( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + count: usize, + ) -> Vec> { + let mut results = Vec::new(); + let Some((_, ranges)) = self.background_highlights.get(&TypeId::of::()) else { + return vec![]; + }; + + let start_ix = match ranges.binary_search_by(|probe| { + let cmp = probe + .end + .cmp(&search_range.start, &display_snapshot.buffer_snapshot); + if cmp.is_gt() { + Ordering::Greater + } else { + Ordering::Less + } + }) { + Ok(i) | Err(i) => i, + }; + let mut push_region = |start: Option, end: Option| { + if let (Some(start_display), Some(end_display)) = (start, end) { + results.push( + start_display.to_display_point(display_snapshot) + ..=end_display.to_display_point(display_snapshot), + ); + } + }; + let mut start_row: Option = None; + let mut end_row: Option = None; + if ranges.len() > count { + return Vec::new(); + } + for range in &ranges[start_ix..] { + if range + .start + .cmp(&search_range.end, &display_snapshot.buffer_snapshot) + .is_ge() + { + break; + } + let end = range.end.to_point(&display_snapshot.buffer_snapshot); + if let Some(current_row) = &end_row { + if end.row == current_row.row { + continue; + } + } + let start = range.start.to_point(&display_snapshot.buffer_snapshot); + if start_row.is_none() { + assert_eq!(end_row, None); + start_row = Some(start); + end_row = Some(end); + continue; + } + if let Some(current_end) = end_row.as_mut() { + if start.row > current_end.row + 1 { + push_region(start_row, end_row); + start_row = Some(start); + end_row = Some(end); + } else { + // Merge two hunks. + *current_end = end; + } + } else { + unreachable!(); + } + } + // We might still have a hunk that was not rendered (if there was a search hit on the last line) + push_region(start_row, end_row); + results + } + + /// Get the text ranges corresponding to the redaction query + pub fn redacted_ranges( + &self, + search_range: Range, + display_snapshot: &DisplaySnapshot, + cx: &WindowContext, + ) -> Vec> { + display_snapshot + .buffer_snapshot + .redacted_ranges(search_range, |file| { + if let Some(file) = file { + file.is_private() + && EditorSettings::get(Some(file.as_ref().into()), cx).redact_private_values + } else { + false + } + }) + .map(|range| { + range.start.to_display_point(display_snapshot) + ..range.end.to_display_point(display_snapshot) + }) + .collect() + } + + pub fn highlight_text( + &mut self, + ranges: Vec>, + style: HighlightStyle, + cx: &mut ViewContext, + ) { + self.display_map.update(cx, |map, _| { + map.highlight_text(TypeId::of::(), ranges, style) + }); + cx.notify(); + } + + pub(crate) fn highlight_inlays( + &mut self, + highlights: Vec, + style: HighlightStyle, + cx: &mut ViewContext, + ) { + self.display_map.update(cx, |map, _| { + map.highlight_inlays(TypeId::of::(), highlights, style) + }); + cx.notify(); + } + + pub fn text_highlights<'a, T: 'static>( + &'a self, + cx: &'a AppContext, + ) -> Option<(HighlightStyle, &'a [Range])> { + self.display_map.read(cx).text_highlights(TypeId::of::()) + } + + pub fn clear_highlights(&mut self, cx: &mut ViewContext) { + let cleared = self + .display_map + .update(cx, |map, _| map.clear_highlights(TypeId::of::())); + if cleared { + cx.notify(); + } + } + + pub fn show_local_cursors(&self, cx: &WindowContext) -> bool { + (self.read_only(cx) || self.blink_manager.read(cx).visible()) + && self.focus_handle.is_focused(cx) + } + + fn on_buffer_changed(&mut self, _: Model, cx: &mut ViewContext) { + cx.notify(); + } + + fn on_buffer_event( + &mut self, + multibuffer: Model, + event: &multi_buffer::Event, + cx: &mut ViewContext, + ) { + match event { + multi_buffer::Event::Edited { + singleton_buffer_edited, + } => { + self.scrollbar_marker_state.dirty = true; + self.refresh_active_diagnostics(cx); + self.refresh_code_actions(cx); + if self.has_active_inline_completion(cx) { + self.update_visible_inline_completion(cx); + } + cx.emit(EditorEvent::BufferEdited); + cx.emit(SearchEvent::MatchesInvalidated); + + if *singleton_buffer_edited { + if let Some(project) = &self.project { + let project = project.read(cx); + let languages_affected = multibuffer + .read(cx) + .all_buffers() + .into_iter() + .filter_map(|buffer| { + let buffer = buffer.read(cx); + let language = buffer.language()?; + if project.is_local() + && project.language_servers_for_buffer(buffer, cx).count() == 0 + { + None + } else { + Some(language) + } + }) + .cloned() + .collect::>(); + if !languages_affected.is_empty() { + self.refresh_inlay_hints( + InlayHintRefreshReason::BufferEdited(languages_affected), + cx, + ); + } + } + } + + let Some(project) = &self.project else { return }; + let telemetry = project.read(cx).client().telemetry().clone(); + telemetry.log_edit_event("editor"); + } + multi_buffer::Event::ExcerptsAdded { + buffer, + predecessor, + excerpts, + } => { + self.tasks_update_task = Some(self.refresh_runnables(cx)); + cx.emit(EditorEvent::ExcerptsAdded { + buffer: buffer.clone(), + predecessor: *predecessor, + excerpts: excerpts.clone(), + }); + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + } + multi_buffer::Event::ExcerptsRemoved { ids } => { + self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); + cx.emit(EditorEvent::ExcerptsRemoved { ids: ids.clone() }) + } + multi_buffer::Event::Reparsed => { + self.tasks_update_task = Some(self.refresh_runnables(cx)); + + cx.emit(EditorEvent::Reparsed); + } + multi_buffer::Event::LanguageChanged => { + cx.emit(EditorEvent::Reparsed); + cx.notify(); + } + multi_buffer::Event::DirtyChanged => cx.emit(EditorEvent::DirtyChanged), + multi_buffer::Event::Saved => cx.emit(EditorEvent::Saved), + multi_buffer::Event::FileHandleChanged | multi_buffer::Event::Reloaded => { + cx.emit(EditorEvent::TitleChanged) + } + multi_buffer::Event::DiffBaseChanged => { + self.scrollbar_marker_state.dirty = true; + cx.emit(EditorEvent::DiffBaseChanged); + cx.notify(); + } + multi_buffer::Event::DiffUpdated { buffer } => { + self.sync_expanded_diff_hunks(buffer.clone(), cx); + cx.notify(); + } + multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed), + multi_buffer::Event::DiagnosticsUpdated => { + self.refresh_active_diagnostics(cx); + self.scrollbar_marker_state.dirty = true; + cx.notify(); + } + _ => {} + }; + } + + fn on_display_map_changed(&mut self, _: Model, cx: &mut ViewContext) { + cx.notify(); + } + + fn settings_changed(&mut self, cx: &mut ViewContext) { + self.refresh_inline_completion(true, cx); + self.refresh_inlay_hints( + InlayHintRefreshReason::SettingsChange(inlay_hint_settings( + self.selections.newest_anchor().head(), + &self.buffer.read(cx).snapshot(cx), + cx, + )), + cx, + ); + let editor_settings = EditorSettings::get_global(cx); + self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; + self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; + self.current_line_highlight = editor_settings.current_line_highlight; + + if self.mode == EditorMode::Full { + let inline_blame_enabled = ProjectSettings::get_global(cx).git.inline_blame_enabled(); + if self.git_blame_inline_enabled != inline_blame_enabled { + self.toggle_git_blame_inline_internal(false, cx); + } + } + + cx.notify(); + } + + pub fn set_searchable(&mut self, searchable: bool) { + self.searchable = searchable; + } + + pub fn searchable(&self) -> bool { + self.searchable + } + + fn open_excerpts_in_split(&mut self, _: &OpenExcerptsSplit, cx: &mut ViewContext) { + self.open_excerpts_common(true, cx) + } + + fn open_excerpts(&mut self, _: &OpenExcerpts, cx: &mut ViewContext) { + self.open_excerpts_common(false, cx) + } + + fn open_excerpts_common(&mut self, split: bool, cx: &mut ViewContext) { + let buffer = self.buffer.read(cx); + if buffer.is_singleton() { + cx.propagate(); + return; + } + + let Some(workspace) = self.workspace() else { + cx.propagate(); + return; + }; + + let mut new_selections_by_buffer = HashMap::default(); + for selection in self.selections.all::(cx) { + for (buffer, mut range, _) in + buffer.range_to_buffer_ranges(selection.start..selection.end, cx) + { + if selection.reversed { + mem::swap(&mut range.start, &mut range.end); + } + new_selections_by_buffer + .entry(buffer) + .or_insert(Vec::new()) + .push(range) + } + } + + // We defer the pane interaction because we ourselves are a workspace item + // and activating a new item causes the pane to call a method on us reentrantly, + // which panics if we're on the stack. + cx.window_context().defer(move |cx| { + workspace.update(cx, |workspace, cx| { + let pane = if split { + workspace.adjacent_pane(cx) + } else { + workspace.active_pane().clone() + }; + + for (buffer, ranges) in new_selections_by_buffer { + let editor = workspace.open_project_item::(pane.clone(), buffer, cx); + editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::newest()), cx, |s| { + s.select_ranges(ranges); + }); + }); + } + }) + }); + } + + fn jump( + &mut self, + path: ProjectPath, + position: Point, + anchor: language::Anchor, + offset_from_top: u32, + cx: &mut ViewContext, + ) { + let workspace = self.workspace(); + cx.spawn(|_, mut cx| async move { + let workspace = workspace.ok_or_else(|| anyhow!("cannot jump without workspace"))?; + let editor = workspace.update(&mut cx, |workspace, cx| { + // Reset the preview item id before opening the new item + workspace.active_pane().update(cx, |pane, cx| { + pane.set_preview_item_id(None, cx); + }); + workspace.open_path_preview(path, None, true, true, cx) + })?; + let editor = editor + .await? + .downcast::() + .ok_or_else(|| anyhow!("opened item was not an editor"))? + .downgrade(); + editor.update(&mut cx, |editor, cx| { + let buffer = editor + .buffer() + .read(cx) + .as_singleton() + .ok_or_else(|| anyhow!("cannot jump in a multi-buffer"))?; + let buffer = buffer.read(cx); + let cursor = if buffer.can_resolve(&anchor) { + language::ToPoint::to_point(&anchor, buffer) + } else { + buffer.clip_point(position, Bias::Left) + }; + + let nav_history = editor.nav_history.take(); + editor.change_selections( + Some(Autoscroll::top_relative(offset_from_top as usize)), + cx, + |s| { + s.select_ranges([cursor..cursor]); + }, + ); + editor.nav_history = nav_history; + + anyhow::Ok(()) + })??; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn marked_text_ranges(&self, cx: &AppContext) -> Option>> { + let snapshot = self.buffer.read(cx).read(cx); + let (_, ranges) = self.text_highlights::(cx)?; + Some( + ranges + .iter() + .map(move |range| { + range.start.to_offset_utf16(&snapshot)..range.end.to_offset_utf16(&snapshot) + }) + .collect(), + ) + } + + fn selection_replacement_ranges( + &self, + range: Range, + cx: &AppContext, + ) -> Vec> { + let selections = self.selections.all::(cx); + let newest_selection = selections + .iter() + .max_by_key(|selection| selection.id) + .unwrap(); + let start_delta = range.start.0 as isize - newest_selection.start.0 as isize; + let end_delta = range.end.0 as isize - newest_selection.end.0 as isize; + let snapshot = self.buffer.read(cx).read(cx); + selections + .into_iter() + .map(|mut selection| { + selection.start.0 = + (selection.start.0 as isize).saturating_add(start_delta) as usize; + selection.end.0 = (selection.end.0 as isize).saturating_add(end_delta) as usize; + snapshot.clip_offset_utf16(selection.start, Bias::Left) + ..snapshot.clip_offset_utf16(selection.end, Bias::Right) + }) + .collect() + } + + fn report_editor_event( + &self, + operation: &'static str, + file_extension: Option, + cx: &AppContext, + ) { + if cfg!(any(test, feature = "test-support")) { + return; + } + + let Some(project) = &self.project else { return }; + + // If None, we are in a file without an extension + let file = self + .buffer + .read(cx) + .as_singleton() + .and_then(|b| b.read(cx).file()); + let file_extension = file_extension.or(file + .as_ref() + .and_then(|file| Path::new(file.file_name(cx)).extension()) + .and_then(|e| e.to_str()) + .map(|a| a.to_string())); + + let vim_mode = cx + .global::() + .raw_user_settings() + .get("vim_mode") + == Some(&serde_json::Value::Bool(true)); + + let copilot_enabled = all_language_settings(file, cx).inline_completions.provider + == language::language_settings::InlineCompletionProvider::Copilot; + let copilot_enabled_for_language = self + .buffer + .read(cx) + .settings_at(0, cx) + .show_inline_completions; + + let telemetry = project.read(cx).client().telemetry().clone(); + telemetry.report_editor_event( + file_extension, + vim_mode, + operation, + copilot_enabled, + copilot_enabled_for_language, + ) + } + + /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines, + /// with each line being an array of {text, highlight} objects. + fn copy_highlight_json(&mut self, _: &CopyHighlightJson, cx: &mut ViewContext) { + let Some(buffer) = self.buffer.read(cx).as_singleton() else { + return; + }; + + #[derive(Serialize)] + struct Chunk<'a> { + text: String, + highlight: Option<&'a str>, + } + + let snapshot = buffer.read(cx).snapshot(); + let range = self + .selected_text_range(cx) + .and_then(|selected_range| { + if selected_range.is_empty() { + None + } else { + Some(selected_range) + } + }) + .unwrap_or_else(|| 0..snapshot.len()); + + let chunks = snapshot.chunks(range, true); + let mut lines = Vec::new(); + let mut line: VecDeque = VecDeque::new(); + + let Some(style) = self.style.as_ref() else { + return; + }; + + for chunk in chunks { + let highlight = chunk + .syntax_highlight_id + .and_then(|id| id.name(&style.syntax)); + let mut chunk_lines = chunk.text.split('\n').peekable(); + while let Some(text) = chunk_lines.next() { + let mut merged_with_last_token = false; + if let Some(last_token) = line.back_mut() { + if last_token.highlight == highlight { + last_token.text.push_str(text); + merged_with_last_token = true; + } + } + + if !merged_with_last_token { + line.push_back(Chunk { + text: text.into(), + highlight, + }); + } + + if chunk_lines.peek().is_some() { + if line.len() > 1 && line.front().unwrap().text.is_empty() { + line.pop_front(); + } + if line.len() > 1 && line.back().unwrap().text.is_empty() { + line.pop_back(); + } + + lines.push(mem::take(&mut line)); + } + } + } + + let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else { + return; + }; + cx.write_to_clipboard(ClipboardItem::new(lines)); + } + + pub fn inlay_hint_cache(&self) -> &InlayHintCache { + &self.inlay_hint_cache + } + + pub fn replay_insert_event( + &mut self, + text: &str, + relative_utf16_range: Option>, + cx: &mut ViewContext, + ) { + if !self.input_enabled { + cx.emit(EditorEvent::InputIgnored { text: text.into() }); + return; + } + if let Some(relative_utf16_range) = relative_utf16_range { + let selections = self.selections.all::(cx); + self.change_selections(None, cx, |s| { + let new_ranges = selections.into_iter().map(|range| { + let start = OffsetUtf16( + range + .head() + .0 + .saturating_add_signed(relative_utf16_range.start), + ); + let end = OffsetUtf16( + range + .head() + .0 + .saturating_add_signed(relative_utf16_range.end), + ); + start..end + }); + s.select_ranges(new_ranges); + }); + } + + self.handle_input(text, cx); + } + + pub fn supports_inlay_hints(&self, cx: &AppContext) -> bool { + let Some(project) = self.project.as_ref() else { + return false; + }; + let project = project.read(cx); + + let mut supports = false; + self.buffer().read(cx).for_each_buffer(|buffer| { + if !supports { + supports = project + .language_servers_for_buffer(buffer.read(cx), cx) + .any( + |(_, server)| match server.capabilities().inlay_hint_provider { + Some(lsp::OneOf::Left(enabled)) => enabled, + Some(lsp::OneOf::Right(_)) => true, + None => false, + }, + ) + } + }); + supports + } + + pub fn focus(&self, cx: &mut WindowContext) { + cx.focus(&self.focus_handle) + } + + pub fn is_focused(&self, cx: &WindowContext) -> bool { + self.focus_handle.is_focused(cx) + } + + fn handle_focus(&mut self, cx: &mut ViewContext) { + cx.emit(EditorEvent::Focused); + + if let Some(rename) = self.pending_rename.as_ref() { + let rename_editor_focus_handle = rename.editor.read(cx).focus_handle.clone(); + cx.focus(&rename_editor_focus_handle); + } else { + if let Some(blame) = self.blame.as_ref() { + blame.update(cx, GitBlame::focus) + } + + self.blink_manager.update(cx, BlinkManager::enable); + self.show_cursor_names(cx); + self.buffer.update(cx, |buffer, cx| { + buffer.finalize_last_transaction(cx); + if self.leader_peer_id.is_none() { + buffer.set_active_selections( + &self.selections.disjoint_anchors(), + self.selections.line_mode, + self.cursor_shape, + cx, + ); + } + }); + } + } + + pub fn handle_blur(&mut self, cx: &mut ViewContext) { + self.blink_manager.update(cx, BlinkManager::disable); + self.buffer + .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); + + if let Some(blame) = self.blame.as_ref() { + blame.update(cx, GitBlame::blur) + } + self.hide_context_menu(cx); + hide_hover(self, cx); + cx.emit(EditorEvent::Blurred); + cx.notify(); + } + + pub fn register_action( + &mut self, + listener: impl Fn(&A, &mut WindowContext) + 'static, + ) -> &mut Self { + let listener = Arc::new(listener); + + self.editor_actions.push(Box::new(move |cx| { + let _view = cx.view().clone(); + let cx = cx.window_context(); + let listener = listener.clone(); + cx.on_action(TypeId::of::(), move |action, phase, cx| { + let action = action.downcast_ref().unwrap(); + if phase == DispatchPhase::Bubble { + listener(action, cx) + } + }) + })); + self + } +} + +fn hunks_for_selections( + multi_buffer_snapshot: &MultiBufferSnapshot, + selections: &[Selection], +) -> Vec> { + let mut hunks = Vec::with_capacity(selections.len()); + let mut processed_buffer_rows: HashMap>> = + HashMap::default(); + let buffer_rows_for_selections = selections.iter().map(|selection| { + let head = selection.head(); + let tail = selection.tail(); + let start = MultiBufferRow(tail.to_point(&multi_buffer_snapshot).row); + let end = MultiBufferRow(head.to_point(&multi_buffer_snapshot).row); + if start > end { + end..start + } else { + start..end + } + }); + + for selected_multi_buffer_rows in buffer_rows_for_selections { + let query_rows = + selected_multi_buffer_rows.start..selected_multi_buffer_rows.end.next_row(); + for hunk in multi_buffer_snapshot.git_diff_hunks_in_range(query_rows.clone()) { + // Deleted hunk is an empty row range, no caret can be placed there and Zed allows to revert it + // when the caret is just above or just below the deleted hunk. + let allow_adjacent = hunk_status(&hunk) == DiffHunkStatus::Removed; + let related_to_selection = if allow_adjacent { + hunk.associated_range.overlaps(&query_rows) + || hunk.associated_range.start == query_rows.end + || hunk.associated_range.end == query_rows.start + } else { + // `selected_multi_buffer_rows` are inclusive (e.g. [2..2] means 2nd row is selected) + // `hunk.associated_range` is exclusive (e.g. [2..3] means 2nd row is selected) + hunk.associated_range.overlaps(&selected_multi_buffer_rows) + || selected_multi_buffer_rows.end == hunk.associated_range.start + }; + if related_to_selection { + if !processed_buffer_rows + .entry(hunk.buffer_id) + .or_default() + .insert(hunk.buffer_range.start..hunk.buffer_range.end) + { + continue; + } + hunks.push(hunk); + } + } + } + + hunks +} + +pub trait CollaborationHub { + fn collaborators<'a>(&self, cx: &'a AppContext) -> &'a HashMap; + fn user_participant_indices<'a>( + &self, + cx: &'a AppContext, + ) -> &'a HashMap; + fn user_names(&self, cx: &AppContext) -> HashMap; +} + +impl CollaborationHub for Model { + fn collaborators<'a>(&self, cx: &'a AppContext) -> &'a HashMap { + self.read(cx).collaborators() + } + + fn user_participant_indices<'a>( + &self, + cx: &'a AppContext, + ) -> &'a HashMap { + self.read(cx).user_store().read(cx).participant_indices() + } + + fn user_names(&self, cx: &AppContext) -> HashMap { + let this = self.read(cx); + let user_ids = this.collaborators().values().map(|c| c.user_id); + this.user_store().read_with(cx, |user_store, cx| { + user_store.participant_names(user_ids, cx) + }) + } +} + +pub trait CompletionProvider { + fn completions( + &self, + buffer: &Model, + buffer_position: text::Anchor, + cx: &mut ViewContext, + ) -> Task>>; + + fn resolve_completions( + &self, + buffer: Model, + completion_indices: Vec, + completions: Arc>>, + cx: &mut ViewContext, + ) -> Task>; + + fn apply_additional_edits_for_completion( + &self, + buffer: Model, + completion: Completion, + push_to_history: bool, + cx: &mut ViewContext, + ) -> Task>>; +} + +impl CompletionProvider for Model { + fn completions( + &self, + buffer: &Model, + buffer_position: text::Anchor, + cx: &mut ViewContext, + ) -> Task>> { + self.update(cx, |project, cx| { + project.completions(&buffer, buffer_position, cx) + }) + } + + fn resolve_completions( + &self, + buffer: Model, + completion_indices: Vec, + completions: Arc>>, + cx: &mut ViewContext, + ) -> Task> { + self.update(cx, |project, cx| { + project.resolve_completions(buffer, completion_indices, completions, cx) + }) + } + + fn apply_additional_edits_for_completion( + &self, + buffer: Model, + completion: Completion, + push_to_history: bool, + cx: &mut ViewContext, + ) -> Task>> { + self.update(cx, |project, cx| { + project.apply_additional_edits_for_completion(buffer, completion, push_to_history, cx) + }) + } +} + +fn inlay_hint_settings( + location: Anchor, + snapshot: &MultiBufferSnapshot, + cx: &mut ViewContext<'_, Editor>, +) -> InlayHintSettings { + let file = snapshot.file_at(location); + let language = snapshot.language_at(location); + let settings = all_language_settings(file, cx); + settings + .language(language.map(|l| l.name()).as_deref()) + .inlay_hints +} + +fn consume_contiguous_rows( + contiguous_row_selections: &mut Vec>, + selection: &Selection, + display_map: &DisplaySnapshot, + selections: &mut std::iter::Peekable>>, +) -> (MultiBufferRow, MultiBufferRow) { + contiguous_row_selections.push(selection.clone()); + let start_row = MultiBufferRow(selection.start.row); + let mut end_row = ending_row(selection, display_map); + + while let Some(next_selection) = selections.peek() { + if next_selection.start.row <= end_row.0 { + end_row = ending_row(next_selection, display_map); + contiguous_row_selections.push(selections.next().unwrap().clone()); + } else { + break; + } + } + (start_row, end_row) +} + +fn ending_row(next_selection: &Selection, display_map: &DisplaySnapshot) -> MultiBufferRow { + if next_selection.end.column > 0 || next_selection.is_empty() { + MultiBufferRow(display_map.next_line_boundary(next_selection.end).0.row + 1) + } else { + MultiBufferRow(next_selection.end.row) + } +} + +impl EditorSnapshot { + pub fn remote_selections_in_range<'a>( + &'a self, + range: &'a Range, + collaboration_hub: &dyn CollaborationHub, + cx: &'a AppContext, + ) -> impl 'a + Iterator { + let participant_names = collaboration_hub.user_names(cx); + let participant_indices = collaboration_hub.user_participant_indices(cx); + let collaborators_by_peer_id = collaboration_hub.collaborators(cx); + let collaborators_by_replica_id = collaborators_by_peer_id + .iter() + .map(|(_, collaborator)| (collaborator.replica_id, collaborator)) + .collect::>(); + self.buffer_snapshot + .remote_selections_in_range(range) + .filter_map(move |(replica_id, line_mode, cursor_shape, selection)| { + let collaborator = collaborators_by_replica_id.get(&replica_id)?; + let participant_index = participant_indices.get(&collaborator.user_id).copied(); + let user_name = participant_names.get(&collaborator.user_id).cloned(); + Some(RemoteSelection { + replica_id, + selection, + cursor_shape, + line_mode, + participant_index, + peer_id: collaborator.peer_id, + user_name, + }) + }) + } + + pub fn language_at(&self, position: T) -> Option<&Arc> { + self.display_snapshot.buffer_snapshot.language_at(position) + } + + pub fn is_focused(&self) -> bool { + self.is_focused + } + + pub fn placeholder_text(&self) -> Option<&Arc> { + self.placeholder_text.as_ref() + } + + pub fn scroll_position(&self) -> gpui::Point { + self.scroll_anchor.scroll_position(&self.display_snapshot) + } + + pub fn gutter_dimensions( + &self, + font_id: FontId, + font_size: Pixels, + em_width: Pixels, + max_line_number_width: Pixels, + cx: &AppContext, + ) -> GutterDimensions { + if !self.show_gutter { + return GutterDimensions::default(); + } + let descent = cx.text_system().descent(font_id, font_size); + + let show_git_gutter = matches!( + ProjectSettings::get_global(cx).git.git_gutter, + Some(GitGutterSetting::TrackedFiles) + ); + let gutter_settings = EditorSettings::get_global(cx).gutter; + let gutter_lines_enabled = gutter_settings.line_numbers; + let line_gutter_width = if gutter_lines_enabled { + // Avoid flicker-like gutter resizes when the line number gains another digit and only resize the gutter on files with N*10^5 lines. + let min_width_for_number_on_gutter = em_width * 4.0; + max_line_number_width.max(min_width_for_number_on_gutter) + } else { + 0.0.into() + }; + + let git_blame_entries_width = self + .render_git_blame_gutter + .then_some(em_width * GIT_BLAME_GUTTER_WIDTH_CHARS); + + let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); + left_padding += if gutter_settings.code_actions { + em_width * 3.0 + } else if show_git_gutter && gutter_lines_enabled { + em_width * 2.0 + } else if show_git_gutter || gutter_lines_enabled { + em_width + } else { + px(0.) + }; + + let right_padding = if gutter_settings.folds && gutter_lines_enabled { + em_width * 4.0 + } else if gutter_settings.folds { + em_width * 3.0 + } else if gutter_lines_enabled { + em_width + } else { + px(0.) + }; + + GutterDimensions { + left_padding, + right_padding, + width: line_gutter_width + left_padding + right_padding, + margin: -descent, + git_blame_entries_width, + } + } +} + +impl Deref for EditorSnapshot { + type Target = DisplaySnapshot; + + fn deref(&self) -> &Self::Target { + &self.display_snapshot + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum EditorEvent { + InputIgnored { + text: Arc, + }, + InputHandled { + utf16_range_to_replace: Option>, + text: Arc, + }, + ExcerptsAdded { + buffer: Model, + predecessor: ExcerptId, + excerpts: Vec<(ExcerptId, ExcerptRange)>, + }, + ExcerptsRemoved { + ids: Vec, + }, + BufferEdited, + Edited, + Reparsed, + Focused, + Blurred, + DirtyChanged, + Saved, + TitleChanged, + DiffBaseChanged, + SelectionsChanged { + local: bool, + }, + ScrollPositionChanged { + local: bool, + autoscroll: bool, + }, + Closed, + TransactionUndone { + transaction_id: clock::Lamport, + }, + TransactionBegun { + transaction_id: clock::Lamport, + }, +} + +impl EventEmitter for Editor {} + +impl FocusableView for Editor { + fn focus_handle(&self, _cx: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for Editor { + fn render<'a>(&mut self, cx: &mut ViewContext<'a, Self>) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + + let text_style = match self.mode { + EditorMode::SingleLine | EditorMode::AutoHeight { .. } => TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: rems(0.875).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(settings.buffer_line_height.value()), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }, + EditorMode::Full => TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.buffer_font.family.clone(), + font_features: settings.buffer_font.features.clone(), + font_size: settings.buffer_font_size(cx).into(), + font_weight: FontWeight::NORMAL, + font_style: FontStyle::Normal, + line_height: relative(settings.buffer_line_height.value()), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + }, + }; + + let background = match self.mode { + EditorMode::SingleLine => cx.theme().system().transparent, + EditorMode::AutoHeight { max_lines: _ } => cx.theme().system().transparent, + EditorMode::Full => cx.theme().colors().editor_background, + }; + + EditorElement::new( + cx.view(), + EditorStyle { + background, + local_player: cx.theme().players().local(), + text: text_style, + scrollbar_width: EditorElement::SCROLLBAR_WIDTH, + syntax: cx.theme().syntax().clone(), + status: cx.theme().status().clone(), + inlay_hints_style: HighlightStyle { + color: Some(cx.theme().status().hint), + ..HighlightStyle::default() + }, + suggestions_style: HighlightStyle { + color: Some(cx.theme().status().predictive), + ..HighlightStyle::default() + }, + }, + ) + } +} + +impl ViewInputHandler for Editor { + fn text_for_range( + &mut self, + range_utf16: Range, + cx: &mut ViewContext, + ) -> Option { + Some( + self.buffer + .read(cx) + .read(cx) + .text_for_range(OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end)) + .collect(), + ) + } + + fn selected_text_range(&mut self, cx: &mut ViewContext) -> Option> { + // Prevent the IME menu from appearing when holding down an alphabetic key + // while input is disabled. + if !self.input_enabled { + return None; + } + + let range = self.selections.newest::(cx).range(); + Some(range.start.0..range.end.0) + } + + fn marked_text_range(&self, cx: &mut ViewContext) -> Option> { + let snapshot = self.buffer.read(cx).read(cx); + let range = self.text_highlights::(cx)?.1.get(0)?; + Some(range.start.to_offset_utf16(&snapshot).0..range.end.to_offset_utf16(&snapshot).0) + } + + fn unmark_text(&mut self, cx: &mut ViewContext) { + self.clear_highlights::(cx); + self.ime_transaction.take(); + } + + fn replace_text_in_range( + &mut self, + range_utf16: Option>, + text: &str, + cx: &mut ViewContext, + ) { + if !self.input_enabled { + cx.emit(EditorEvent::InputIgnored { text: text.into() }); + return; + } + + self.transact(cx, |this, cx| { + let new_selected_ranges = if let Some(range_utf16) = range_utf16 { + let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); + Some(this.selection_replacement_ranges(range_utf16, cx)) + } else { + this.marked_text_ranges(cx) + }; + + let range_to_replace = new_selected_ranges.as_ref().and_then(|ranges_to_replace| { + let newest_selection_id = this.selections.newest_anchor().id; + this.selections + .all::(cx) + .iter() + .zip(ranges_to_replace.iter()) + .find_map(|(selection, range)| { + if selection.id == newest_selection_id { + Some( + (range.start.0 as isize - selection.head().0 as isize) + ..(range.end.0 as isize - selection.head().0 as isize), + ) + } else { + None + } + }) + }); + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: range_to_replace, + text: text.into(), + }); + + if let Some(new_selected_ranges) = new_selected_ranges { + this.change_selections(None, cx, |selections| { + selections.select_ranges(new_selected_ranges) + }); + this.backspace(&Default::default(), cx); + } + + this.handle_input(text, cx); + }); + + if let Some(transaction) = self.ime_transaction { + self.buffer.update(cx, |buffer, cx| { + buffer.group_until_transaction(transaction, cx); + }); + } + + self.unmark_text(cx); + } + + fn replace_and_mark_text_in_range( + &mut self, + range_utf16: Option>, + text: &str, + new_selected_range_utf16: Option>, + cx: &mut ViewContext, + ) { + if !self.input_enabled { + cx.emit(EditorEvent::InputIgnored { text: text.into() }); + return; + } + + let transaction = self.transact(cx, |this, cx| { + let ranges_to_replace = if let Some(mut marked_ranges) = this.marked_text_ranges(cx) { + let snapshot = this.buffer.read(cx).read(cx); + if let Some(relative_range_utf16) = range_utf16.as_ref() { + for marked_range in &mut marked_ranges { + marked_range.end.0 = marked_range.start.0 + relative_range_utf16.end; + marked_range.start.0 += relative_range_utf16.start; + marked_range.start = + snapshot.clip_offset_utf16(marked_range.start, Bias::Left); + marked_range.end = + snapshot.clip_offset_utf16(marked_range.end, Bias::Right); + } + } + Some(marked_ranges) + } else if let Some(range_utf16) = range_utf16 { + let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); + Some(this.selection_replacement_ranges(range_utf16, cx)) + } else { + None + }; + + let range_to_replace = ranges_to_replace.as_ref().and_then(|ranges_to_replace| { + let newest_selection_id = this.selections.newest_anchor().id; + this.selections + .all::(cx) + .iter() + .zip(ranges_to_replace.iter()) + .find_map(|(selection, range)| { + if selection.id == newest_selection_id { + Some( + (range.start.0 as isize - selection.head().0 as isize) + ..(range.end.0 as isize - selection.head().0 as isize), + ) + } else { + None + } + }) + }); + + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: range_to_replace, + text: text.into(), + }); + + if let Some(ranges) = ranges_to_replace { + this.change_selections(None, cx, |s| s.select_ranges(ranges)); + } + + let marked_ranges = { + let snapshot = this.buffer.read(cx).read(cx); + this.selections + .disjoint_anchors() + .iter() + .map(|selection| { + selection.start.bias_left(&snapshot)..selection.end.bias_right(&snapshot) + }) + .collect::>() + }; + + if text.is_empty() { + this.unmark_text(cx); + } else { + this.highlight_text::( + marked_ranges.clone(), + HighlightStyle { + underline: Some(UnderlineStyle { + thickness: px(1.), + color: None, + wavy: false, + }), + ..Default::default() + }, + cx, + ); + } + + // Disable auto-closing when composing text (i.e. typing a `"` on a Brazilian keyboard) + let use_autoclose = this.use_autoclose; + this.set_use_autoclose(false); + this.handle_input(text, cx); + this.set_use_autoclose(use_autoclose); + + if let Some(new_selected_range) = new_selected_range_utf16 { + let snapshot = this.buffer.read(cx).read(cx); + let new_selected_ranges = marked_ranges + .into_iter() + .map(|marked_range| { + let insertion_start = marked_range.start.to_offset_utf16(&snapshot).0; + let new_start = OffsetUtf16(new_selected_range.start + insertion_start); + let new_end = OffsetUtf16(new_selected_range.end + insertion_start); + snapshot.clip_offset_utf16(new_start, Bias::Left) + ..snapshot.clip_offset_utf16(new_end, Bias::Right) + }) + .collect::>(); + + drop(snapshot); + this.change_selections(None, cx, |selections| { + selections.select_ranges(new_selected_ranges) + }); + } + }); + + self.ime_transaction = self.ime_transaction.or(transaction); + if let Some(transaction) = self.ime_transaction { + self.buffer.update(cx, |buffer, cx| { + buffer.group_until_transaction(transaction, cx); + }); + } + + if self.text_highlights::(cx).is_none() { + self.ime_transaction.take(); + } + } + + fn bounds_for_range( + &mut self, + range_utf16: Range, + element_bounds: gpui::Bounds, + cx: &mut ViewContext, + ) -> Option> { + let text_layout_details = self.text_layout_details(cx); + let style = &text_layout_details.editor_style; + let font_id = cx.text_system().resolve_font(&style.text.font()); + let font_size = style.text.font_size.to_pixels(cx.rem_size()); + let line_height = style.text.line_height_in_pixels(cx.rem_size()); + let em_width = cx + .text_system() + .typographic_bounds(font_id, font_size, 'm') + .unwrap() + .size + .width; + + let snapshot = self.snapshot(cx); + let scroll_position = snapshot.scroll_position(); + let scroll_left = scroll_position.x * em_width; + + let start = OffsetUtf16(range_utf16.start).to_display_point(&snapshot); + let x = snapshot.x_for_display_point(start, &text_layout_details) - scroll_left + + self.gutter_dimensions.width; + let y = line_height * (start.row().as_f32() - scroll_position.y); + + Some(Bounds { + origin: element_bounds.origin + point(x, y), + size: size(em_width, line_height), + }) + } +} + +trait SelectionExt { + fn display_range(&self, map: &DisplaySnapshot) -> Range; + fn spanned_rows( + &self, + include_end_if_at_line_start: bool, + map: &DisplaySnapshot, + ) -> Range; +} + +impl SelectionExt for Selection { + fn display_range(&self, map: &DisplaySnapshot) -> Range { + let start = self + .start + .to_point(&map.buffer_snapshot) + .to_display_point(map); + let end = self + .end + .to_point(&map.buffer_snapshot) + .to_display_point(map); + if self.reversed { + end..start + } else { + start..end + } + } + + fn spanned_rows( + &self, + include_end_if_at_line_start: bool, + map: &DisplaySnapshot, + ) -> Range { + let start = self.start.to_point(&map.buffer_snapshot); + let mut end = self.end.to_point(&map.buffer_snapshot); + if !include_end_if_at_line_start && start.row != end.row && end.column == 0 { + end.row -= 1; + } + + let buffer_start = map.prev_line_boundary(start).0; + let buffer_end = map.next_line_boundary(end).0; + MultiBufferRow(buffer_start.row)..MultiBufferRow(buffer_end.row + 1) + } +} + +impl InvalidationStack { + fn invalidate(&mut self, selections: &[Selection], buffer: &MultiBufferSnapshot) + where + S: Clone + ToOffset, + { + while let Some(region) = self.last() { + let all_selections_inside_invalidation_ranges = + if selections.len() == region.ranges().len() { + selections + .iter() + .zip(region.ranges().iter().map(|r| r.to_offset(buffer))) + .all(|(selection, invalidation_range)| { + let head = selection.head().to_offset(buffer); + invalidation_range.start <= head && invalidation_range.end >= head + }) + } else { + false + }; + + if all_selections_inside_invalidation_ranges { + break; + } else { + self.pop(); + } + } + } +} + +impl Default for InvalidationStack { + fn default() -> Self { + Self(Default::default()) + } +} + +impl Deref for InvalidationStack { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for InvalidationStack { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl InvalidationRegion for SnippetState { + fn ranges(&self) -> &[Range] { + &self.ranges[self.active_index] + } +} + +pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> RenderBlock { + let (text_without_backticks, code_ranges) = highlight_diagnostic_message(&diagnostic); + + Box::new(move |cx: &mut BlockContext| { + let group_id: SharedString = cx.block_id.to_string().into(); + + let mut text_style = cx.text_style().clone(); + text_style.color = diagnostic_style(diagnostic.severity, true, cx.theme().status()); + let theme_settings = ThemeSettings::get_global(cx); + text_style.font_family = theme_settings.buffer_font.family.clone(); + text_style.font_style = theme_settings.buffer_font.style; + text_style.font_features = theme_settings.buffer_font.features.clone(); + text_style.font_weight = theme_settings.buffer_font.weight; + + let multi_line_diagnostic = diagnostic.message.contains('\n'); + + let buttons = |diagnostic: &Diagnostic, block_id: usize| { + if multi_line_diagnostic { + v_flex() + } else { + h_flex() + } + .children(diagnostic.is_primary.then(|| { + IconButton::new(("close-block", block_id), IconName::XCircle) + .icon_color(Color::Muted) + .size(ButtonSize::Compact) + .style(ButtonStyle::Transparent) + .visible_on_hover(group_id.clone()) + .on_click(move |_click, cx| cx.dispatch_action(Box::new(Cancel))) + .tooltip(|cx| Tooltip::for_action("Close Diagnostics", &Cancel, cx)) + })) + .child( + IconButton::new(("copy-block", block_id), IconName::Copy) + .icon_color(Color::Muted) + .size(ButtonSize::Compact) + .style(ButtonStyle::Transparent) + .visible_on_hover(group_id.clone()) + .on_click({ + let message = diagnostic.message.clone(); + move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone())) + }) + .tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)), + ) + }; + + let icon_size = buttons(&diagnostic, cx.block_id) + .into_any_element() + .layout_as_root(AvailableSpace::min_size(), cx); + + h_flex() + .id(cx.block_id) + .group(group_id.clone()) + .relative() + .size_full() + .pl(cx.gutter_dimensions.width) + .w(cx.max_width + cx.gutter_dimensions.width) + .child( + div() + .flex() + .w(cx.anchor_x - cx.gutter_dimensions.width - icon_size.width) + .flex_shrink(), + ) + .child(buttons(&diagnostic, cx.block_id)) + .child(div().flex().flex_shrink_0().child( + StyledText::new(text_without_backticks.clone()).with_highlights( + &text_style, + code_ranges.iter().map(|range| { + ( + range.clone(), + HighlightStyle { + font_weight: Some(FontWeight::BOLD), + ..Default::default() + }, + ) + }), + ), + )) + .into_any_element() + }) +} + +pub fn highlight_diagnostic_message(diagnostic: &Diagnostic) -> (SharedString, Vec>) { + let mut text_without_backticks = String::new(); + let mut code_ranges = Vec::new(); + + if let Some(source) = &diagnostic.source { + text_without_backticks.push_str(&source); + code_ranges.push(0..source.len()); + text_without_backticks.push_str(": "); + } + + let mut prev_offset = 0; + let mut in_code_block = false; + for (ix, _) in diagnostic + .message + .match_indices('`') + .chain([(diagnostic.message.len(), "")]) + { + let prev_len = text_without_backticks.len(); + text_without_backticks.push_str(&diagnostic.message[prev_offset..ix]); + prev_offset = ix + 1; + if in_code_block { + code_ranges.push(prev_len..text_without_backticks.len()); + in_code_block = false; + } else { + in_code_block = true; + } + } + + (text_without_backticks.into(), code_ranges) +} + +fn diagnostic_style(severity: DiagnosticSeverity, valid: bool, colors: &StatusColors) -> Hsla { + match (severity, valid) { + (DiagnosticSeverity::ERROR, true) => colors.error, + (DiagnosticSeverity::ERROR, false) => colors.error, + (DiagnosticSeverity::WARNING, true) => colors.warning, + (DiagnosticSeverity::WARNING, false) => colors.warning, + (DiagnosticSeverity::INFORMATION, true) => colors.info, + (DiagnosticSeverity::INFORMATION, false) => colors.info, + (DiagnosticSeverity::HINT, true) => colors.info, + (DiagnosticSeverity::HINT, false) => colors.info, + _ => colors.ignored, + } +} + +pub fn styled_runs_for_code_label<'a>( + label: &'a CodeLabel, + syntax_theme: &'a theme::SyntaxTheme, +) -> impl 'a + Iterator, HighlightStyle)> { + let fade_out = HighlightStyle { + fade_out: Some(0.35), + ..Default::default() + }; + + let mut prev_end = label.filter_range.end; + label + .runs + .iter() + .enumerate() + .flat_map(move |(ix, (range, highlight_id))| { + let style = if let Some(style) = highlight_id.style(syntax_theme) { + style + } else { + return Default::default(); + }; + let mut muted_style = style; + muted_style.highlight(fade_out); + + let mut runs = SmallVec::<[(Range, HighlightStyle); 3]>::new(); + if range.start >= label.filter_range.end { + if range.start > prev_end { + runs.push((prev_end..range.start, fade_out)); + } + runs.push((range.clone(), muted_style)); + } else if range.end <= label.filter_range.end { + runs.push((range.clone(), style)); + } else { + runs.push((range.start..label.filter_range.end, style)); + runs.push((label.filter_range.end..range.end, muted_style)); + } + prev_end = cmp::max(prev_end, range.end); + + if ix + 1 == label.runs.len() && label.text.len() > prev_end { + runs.push((prev_end..label.text.len(), fade_out)); + } + + runs + }) +} + +pub(crate) fn split_words(text: &str) -> impl std::iter::Iterator + '_ { + let mut prev_index = 0; + let mut prev_codepoint: Option = None; + text.char_indices() + .chain([(text.len(), '\0')]) + .filter_map(move |(index, codepoint)| { + let prev_codepoint = prev_codepoint.replace(codepoint)?; + let is_boundary = index == text.len() + || !prev_codepoint.is_uppercase() && codepoint.is_uppercase() + || !prev_codepoint.is_alphanumeric() && codepoint.is_alphanumeric(); + if is_boundary { + let chunk = &text[prev_index..index]; + prev_index = index; + Some(chunk) + } else { + None + } + }) +} + +trait RangeToAnchorExt { + fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range; +} + +impl RangeToAnchorExt for Range { + fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range { + let start_offset = self.start.to_offset(snapshot); + let end_offset = self.end.to_offset(snapshot); + if start_offset == end_offset { + snapshot.anchor_before(start_offset)..snapshot.anchor_before(end_offset) + } else { + snapshot.anchor_after(self.start)..snapshot.anchor_before(self.end) + } + } +} + +pub trait RowExt { + fn as_f32(&self) -> f32; + + fn next_row(&self) -> Self; + + fn previous_row(&self) -> Self; + + fn minus(&self, other: Self) -> u32; +} + +impl RowExt for DisplayRow { + fn as_f32(&self) -> f32 { + self.0 as f32 + } + + fn next_row(&self) -> Self { + Self(self.0 + 1) + } + + fn previous_row(&self) -> Self { + Self(self.0.saturating_sub(1)) + } + + fn minus(&self, other: Self) -> u32 { + self.0 - other.0 + } +} + +impl RowExt for MultiBufferRow { + fn as_f32(&self) -> f32 { + self.0 as f32 + } + + fn next_row(&self) -> Self { + Self(self.0 + 1) + } + + fn previous_row(&self) -> Self { + Self(self.0.saturating_sub(1)) + } + + fn minus(&self, other: Self) -> u32 { + self.0 - other.0 + } +} + +trait RowRangeExt { + type Row; + + fn len(&self) -> usize; + + fn iter_rows(&self) -> impl DoubleEndedIterator; +} + +impl RowRangeExt for Range { + type Row = MultiBufferRow; + + fn len(&self) -> usize { + (self.end.0 - self.start.0) as usize + } + + fn iter_rows(&self) -> impl DoubleEndedIterator { + (self.start.0..self.end.0).map(MultiBufferRow) + } +} + +impl RowRangeExt for Range { + type Row = DisplayRow; + + fn len(&self) -> usize { + (self.end.0 - self.start.0) as usize + } + + fn iter_rows(&self) -> impl DoubleEndedIterator { + (self.start.0..self.end.0).map(DisplayRow) + } +} + +fn hunk_status(hunk: &DiffHunk) -> DiffHunkStatus { + if hunk.diff_base_byte_range.is_empty() { + DiffHunkStatus::Added + } else if hunk.associated_range.is_empty() { + DiffHunkStatus::Removed + } else { + DiffHunkStatus::Modified + } +} diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs new file mode 100644 index 0000000..a09d1f1 --- /dev/null +++ b/crates/editor/src/editor_settings.rs @@ -0,0 +1,262 @@ +use gpui::AppContext; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Clone)] +pub struct EditorSettings { + pub cursor_blink: bool, + pub current_line_highlight: CurrentLineHighlight, + pub hover_popover_enabled: bool, + pub show_completions_on_input: bool, + pub show_completion_documentation: bool, + pub completion_documentation_secondary_query_debounce: u64, + pub use_on_type_format: bool, + pub toolbar: Toolbar, + pub scrollbar: Scrollbar, + pub gutter: Gutter, + pub vertical_scroll_margin: f32, + pub scroll_sensitivity: f32, + pub relative_line_numbers: bool, + pub seed_search_query_from_cursor: SeedQuerySetting, + pub multi_cursor_modifier: MultiCursorModifier, + pub redact_private_values: bool, + #[serde(default)] + pub double_click_in_multibuffer: DoubleClickInMultibuffer, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum CurrentLineHighlight { + // Don't highlight the current line. + None, + // Highlight the gutter area. + Gutter, + // Highlight the editor area. + Line, + // Highlight the full line. + All, +} + +/// When to populate a new search's query based on the text under the cursor. +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum SeedQuerySetting { + /// Always populate the search query with the word under the cursor. + Always, + /// Only populate the search query when there is text selected. + Selection, + /// Never populate the search query + Never, +} + +/// What to do when multibuffer is double clicked in some of its excerpts (parts of singleton buffers). +#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum DoubleClickInMultibuffer { + /// Behave as a regular buffer and select the whole word. + #[default] + Select, + /// Open the excerpt clicked as a new buffer in the new tab, if no `alt` modifier was pressed during double click. + /// Otherwise, behave as a regular buffer and select the whole word. + Open, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct Toolbar { + pub breadcrumbs: bool, + pub quick_actions: bool, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct Scrollbar { + pub show: ShowScrollbar, + pub git_diff: bool, + pub selected_symbol: bool, + pub search_results: bool, + pub diagnostics: bool, + pub cursors: bool, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct Gutter { + pub line_numbers: bool, + pub code_actions: bool, + pub folds: bool, +} + +/// When to show the scrollbar in the editor. +/// +/// Default: auto +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ShowScrollbar { + /// Show the scrollbar if there's important information or + /// follow the system's configured behavior. + Auto, + /// Match the system's configured behavior. + System, + /// Always show the scrollbar. + Always, + /// Never show the scrollbar. + Never, +} + +/// The key to use for adding multiple cursors +/// +/// Default: alt +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum MultiCursorModifier { + Alt, + #[serde(alias = "cmd", alias = "ctrl")] + CmdOrCtrl, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct EditorSettingsContent { + /// Whether the cursor blinks in the editor. + /// + /// Default: true + pub cursor_blink: Option, + /// How to highlight the current line in the editor. + /// + /// Default: all + pub current_line_highlight: Option, + /// Whether to show the informational hover box when moving the mouse + /// over symbols in the editor. + /// + /// Default: true + pub hover_popover_enabled: Option, + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. + /// + /// Default: true + pub show_completions_on_input: Option, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. + /// + /// Default: true + pub show_completion_documentation: Option, + /// The debounce delay before re-querying the language server for completion + /// documentation when not included in original completion list. + /// + /// Default: 300 ms + pub completion_documentation_secondary_query_debounce: Option, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + /// + /// Default: true + pub use_on_type_format: Option, + /// Toolbar related settings + pub toolbar: Option, + /// Scrollbar related settings + pub scrollbar: Option, + /// Gutter related settings + pub gutter: Option, + /// The number of lines to keep above/below the cursor when auto-scrolling. + /// + /// Default: 3. + pub vertical_scroll_margin: Option, + /// Scroll sensitivity multiplier. This multiplier is applied + /// to both the horizontal and vertical delta values while scrolling. + /// + /// Default: 1.0 + pub scroll_sensitivity: Option, + /// Whether the line numbers on editors gutter are relative or not. + /// + /// Default: false + pub relative_line_numbers: Option, + /// When to populate a new search's query based on the text under the cursor. + /// + /// Default: always + pub seed_search_query_from_cursor: Option, + /// The key to use for adding multiple cursors + /// + /// Default: alt + pub multi_cursor_modifier: Option, + /// Hide the values of variables in `private` files, as defined by the + /// private_files setting. This only changes the visual representation, + /// the values are still present in the file and can be selected / copied / pasted + /// + /// Default: false + pub redact_private_values: Option, + + /// What to do when multibuffer is double clicked in some of its excerpts + /// (parts of singleton buffers). + /// + /// Default: select + pub double_click_in_multibuffer: Option, +} + +// Toolbar related settings +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ToolbarContent { + /// Whether to display breadcrumbs in the editor toolbar. + /// + /// Default: true + pub breadcrumbs: Option, + /// Whether to display quik action buttons in the editor toolbar. + /// + /// Default: true + pub quick_actions: Option, +} + +/// Scrollbar related settings +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct ScrollbarContent { + /// When to show the scrollbar in the editor. + /// + /// Default: auto + pub show: Option, + /// Whether to show git diff indicators in the scrollbar. + /// + /// Default: true + pub git_diff: Option, + /// Whether to show buffer search result indicators in the scrollbar. + /// + /// Default: true + pub search_results: Option, + /// Whether to show selected symbol occurrences in the scrollbar. + /// + /// Default: true + pub selected_symbol: Option, + /// Whether to show diagnostic indicators in the scrollbar. + /// + /// Default: true + pub diagnostics: Option, + /// Whether to show cursor positions in the scrollbar. + /// + /// Default: true + pub cursors: Option, +} + +/// Gutter related settings +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct GutterContent { + /// Whether to show line numbers in the gutter. + /// + /// Default: true + pub line_numbers: Option, + /// Whether to show code action buttons in the gutter. + /// + /// Default: true + pub code_actions: Option, + /// Whether to show fold buttons in the gutter. + /// + /// Default: true + pub folds: Option, +} + +impl Settings for EditorSettings { + const KEY: Option<&'static str> = None; + + type FileContent = EditorSettingsContent; + + fn load( + sources: SettingsSources, + _: &mut AppContext, + ) -> anyhow::Result { + sources.json_merge() + } +} diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs new file mode 100644 index 0000000..61d7b17 --- /dev/null +++ b/crates/editor/src/editor_tests.rs @@ -0,0 +1,11635 @@ +use super::*; +use crate::{ + scroll::scroll_amount::ScrollAmount, + test::{ + assert_text_with_selections, build_editor, editor_hunks, + editor_lsp_test_context::EditorLspTestContext, editor_test_context::EditorTestContext, + expanded_hunks, expanded_hunks_background_highlights, select_ranges, + }, + JoinLines, +}; +use futures::StreamExt; +use gpui::{div, TestAppContext, UpdateGlobal, VisualTestContext, WindowBounds, WindowOptions}; +use indoc::indoc; +use language::{ + language_settings::{ + AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent, PrettierSettings, + }, + BracketPairConfig, + Capability::ReadWrite, + FakeLspAdapter, LanguageConfig, LanguageConfigOverride, LanguageMatcher, Override, Point, +}; +use parking_lot::Mutex; +use project::project_settings::{LspSettings, ProjectSettings}; +use project::FakeFs; +use serde_json::{self, json}; +use std::sync::atomic; +use std::sync::atomic::AtomicUsize; +use std::{cell::RefCell, future::Future, rc::Rc, time::Instant}; +use unindent::Unindent; +use util::{ + assert_set_eq, + test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker}, +}; +use workspace::{ + item::{FollowEvent, FollowableItem, Item, ItemHandle}, + NavigationEntry, ViewId, +}; + +#[gpui::test] +fn test_edit_events(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer = cx.new_model(|cx| { + let mut buffer = language::Buffer::local("123456", cx); + buffer.set_group_interval(Duration::from_secs(1)); + buffer + }); + + let events = Rc::new(RefCell::new(Vec::new())); + let editor1 = cx.add_window({ + let events = events.clone(); + |cx| { + let view = cx.view().clone(); + cx.subscribe(&view, move |_, _, event: &EditorEvent, _| { + if matches!(event, EditorEvent::Edited | EditorEvent::BufferEdited) { + events.borrow_mut().push(("editor1", event.clone())); + } + }) + .detach(); + Editor::for_buffer(buffer.clone(), None, cx) + } + }); + + let editor2 = cx.add_window({ + let events = events.clone(); + |cx| { + cx.subscribe(&cx.view().clone(), move |_, _, event: &EditorEvent, _| { + if matches!(event, EditorEvent::Edited | EditorEvent::BufferEdited) { + events.borrow_mut().push(("editor2", event.clone())); + } + }) + .detach(); + Editor::for_buffer(buffer.clone(), None, cx) + } + }); + + assert_eq!(mem::take(&mut *events.borrow_mut()), []); + + // Mutating editor 1 will emit an `Edited` event only for that editor. + _ = editor1.update(cx, |editor, cx| editor.insert("X", cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor1", EditorEvent::Edited), + ("editor1", EditorEvent::BufferEdited), + ("editor2", EditorEvent::BufferEdited), + ] + ); + + // Mutating editor 2 will emit an `Edited` event only for that editor. + _ = editor2.update(cx, |editor, cx| editor.delete(&Delete, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor2", EditorEvent::Edited), + ("editor1", EditorEvent::BufferEdited), + ("editor2", EditorEvent::BufferEdited), + ] + ); + + // Undoing on editor 1 will emit an `Edited` event only for that editor. + _ = editor1.update(cx, |editor, cx| editor.undo(&Undo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor1", EditorEvent::Edited), + ("editor1", EditorEvent::BufferEdited), + ("editor2", EditorEvent::BufferEdited), + ] + ); + + // Redoing on editor 1 will emit an `Edited` event only for that editor. + _ = editor1.update(cx, |editor, cx| editor.redo(&Redo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor1", EditorEvent::Edited), + ("editor1", EditorEvent::BufferEdited), + ("editor2", EditorEvent::BufferEdited), + ] + ); + + // Undoing on editor 2 will emit an `Edited` event only for that editor. + _ = editor2.update(cx, |editor, cx| editor.undo(&Undo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor2", EditorEvent::Edited), + ("editor1", EditorEvent::BufferEdited), + ("editor2", EditorEvent::BufferEdited), + ] + ); + + // Redoing on editor 2 will emit an `Edited` event only for that editor. + _ = editor2.update(cx, |editor, cx| editor.redo(&Redo, cx)); + assert_eq!( + mem::take(&mut *events.borrow_mut()), + [ + ("editor2", EditorEvent::Edited), + ("editor1", EditorEvent::BufferEdited), + ("editor2", EditorEvent::BufferEdited), + ] + ); + + // No event is emitted when the mutation is a no-op. + _ = editor2.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([0..0])); + + editor.backspace(&Backspace, cx); + }); + assert_eq!(mem::take(&mut *events.borrow_mut()), []); +} + +#[gpui::test] +fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut now = Instant::now(); + let buffer = cx.new_model(|cx| language::Buffer::local("123456", cx)); + let group_interval = buffer.update(cx, |buffer, _| buffer.transaction_group_interval()); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let editor = cx.add_window(|cx| build_editor(buffer.clone(), cx)); + + _ = editor.update(cx, |editor, cx| { + editor.start_transaction_at(now, cx); + editor.change_selections(None, cx, |s| s.select_ranges([2..4])); + + editor.insert("cd", cx); + editor.end_transaction_at(now, cx); + assert_eq!(editor.text(cx), "12cd56"); + assert_eq!(editor.selections.ranges(cx), vec![4..4]); + + editor.start_transaction_at(now, cx); + editor.change_selections(None, cx, |s| s.select_ranges([4..5])); + editor.insert("e", cx); + editor.end_transaction_at(now, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selections.ranges(cx), vec![5..5]); + + now += group_interval + Duration::from_millis(1); + editor.change_selections(None, cx, |s| s.select_ranges([2..2])); + + // Simulate an edit in another editor + _ = buffer.update(cx, |buffer, cx| { + buffer.start_transaction_at(now, cx); + buffer.edit([(0..1, "a")], None, cx); + buffer.edit([(1..1, "b")], None, cx); + buffer.end_transaction_at(now, cx); + }); + + assert_eq!(editor.text(cx), "ab2cde6"); + assert_eq!(editor.selections.ranges(cx), vec![3..3]); + + // Last transaction happened past the group interval in a different editor. + // Undo it individually and don't restore selections. + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selections.ranges(cx), vec![2..2]); + + // First two transactions happened within the group interval in this editor. + // Undo them together and restore selections. + editor.undo(&Undo, cx); + editor.undo(&Undo, cx); // Undo stack is empty here, so this is a no-op. + assert_eq!(editor.text(cx), "123456"); + assert_eq!(editor.selections.ranges(cx), vec![0..0]); + + // Redo the first two transactions together. + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selections.ranges(cx), vec![5..5]); + + // Redo the last transaction on its own. + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "ab2cde6"); + assert_eq!(editor.selections.ranges(cx), vec![6..6]); + + // Test empty transactions. + editor.start_transaction_at(now, cx); + editor.end_transaction_at(now, cx); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "12cde6"); + }); +} + +#[gpui::test] +fn test_ime_composition(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer = cx.new_model(|cx| { + let mut buffer = language::Buffer::local("abcde", cx); + // Ensure automatic grouping doesn't occur. + buffer.set_group_interval(Duration::ZERO); + buffer + }); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + cx.add_window(|cx| { + let mut editor = build_editor(buffer.clone(), cx); + + // Start a new IME composition. + editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx); + editor.replace_and_mark_text_in_range(Some(0..1), "á", None, cx); + editor.replace_and_mark_text_in_range(Some(0..1), "ä", None, cx); + assert_eq!(editor.text(cx), "äbcde"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) + ); + + // Finalize IME composition. + editor.replace_text_in_range(None, "ā", cx); + assert_eq!(editor.text(cx), "ābcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // IME composition edits are grouped and are undone/redone at once. + editor.undo(&Default::default(), cx); + assert_eq!(editor.text(cx), "abcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + editor.redo(&Default::default(), cx); + assert_eq!(editor.text(cx), "ābcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // Start a new IME composition. + editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) + ); + + // Undoing during an IME composition cancels it. + editor.undo(&Default::default(), cx); + assert_eq!(editor.text(cx), "ābcde"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // Start a new IME composition with an invalid marked range, ensuring it gets clipped. + editor.replace_and_mark_text_in_range(Some(4..999), "è", None, cx); + assert_eq!(editor.text(cx), "ābcdè"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![OffsetUtf16(4)..OffsetUtf16(5)]) + ); + + // Finalize IME composition with an invalid replacement range, ensuring it gets clipped. + editor.replace_text_in_range(Some(4..999), "ę", cx); + assert_eq!(editor.text(cx), "ābcdę"); + assert_eq!(editor.marked_text_ranges(cx), None); + + // Start a new IME composition with multiple cursors. + editor.change_selections(None, cx, |s| { + s.select_ranges([ + OffsetUtf16(1)..OffsetUtf16(1), + OffsetUtf16(3)..OffsetUtf16(3), + OffsetUtf16(5)..OffsetUtf16(5), + ]) + }); + editor.replace_and_mark_text_in_range(Some(4..5), "XYZ", None, cx); + assert_eq!(editor.text(cx), "XYZbXYZdXYZ"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![ + OffsetUtf16(0)..OffsetUtf16(3), + OffsetUtf16(4)..OffsetUtf16(7), + OffsetUtf16(8)..OffsetUtf16(11) + ]) + ); + + // Ensure the newly-marked range gets treated as relative to the previously-marked ranges. + editor.replace_and_mark_text_in_range(Some(1..2), "1", None, cx); + assert_eq!(editor.text(cx), "X1ZbX1ZdX1Z"); + assert_eq!( + editor.marked_text_ranges(cx), + Some(vec![ + OffsetUtf16(1)..OffsetUtf16(2), + OffsetUtf16(5)..OffsetUtf16(6), + OffsetUtf16(9)..OffsetUtf16(10) + ]) + ); + + // Finalize IME composition with multiple cursors. + editor.replace_text_in_range(Some(9..10), "2", cx); + assert_eq!(editor.text(cx), "X2ZbX2ZdX2Z"); + assert_eq!(editor.marked_text_ranges(cx), None); + + editor + }); +} + +#[gpui::test] +fn test_selection_with_mouse(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\nddddddd\n", cx); + build_editor(buffer, cx) + }); + + _ = editor.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(DisplayRow(2), 2), false, 1, cx); + }); + assert_eq!( + editor + .update(cx, |view, cx| view.selections.display_ranges(cx)) + .unwrap(), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(2), 2)] + ); + + _ = editor.update(cx, |view, cx| { + view.update_selection( + DisplayPoint::new(DisplayRow(3), 3), + 0, + gpui::Point::::default(), + cx, + ); + }); + + assert_eq!( + editor + .update(cx, |view, cx| view.selections.display_ranges(cx)) + .unwrap(), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3)] + ); + + _ = editor.update(cx, |view, cx| { + view.update_selection( + DisplayPoint::new(DisplayRow(1), 1), + 0, + gpui::Point::::default(), + cx, + ); + }); + + assert_eq!( + editor + .update(cx, |view, cx| view.selections.display_ranges(cx)) + .unwrap(), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(1), 1)] + ); + + _ = editor.update(cx, |view, cx| { + view.end_selection(cx); + view.update_selection( + DisplayPoint::new(DisplayRow(3), 3), + 0, + gpui::Point::::default(), + cx, + ); + }); + + assert_eq!( + editor + .update(cx, |view, cx| view.selections.display_ranges(cx)) + .unwrap(), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(1), 1)] + ); + + _ = editor.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(DisplayRow(3), 3), true, 1, cx); + view.update_selection( + DisplayPoint::new(DisplayRow(0), 0), + 0, + gpui::Point::::default(), + cx, + ); + }); + + assert_eq!( + editor + .update(cx, |view, cx| view.selections.display_ranges(cx)) + .unwrap(), + [ + DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(3), 3)..DisplayPoint::new(DisplayRow(0), 0) + ] + ); + + _ = editor.update(cx, |view, cx| { + view.end_selection(cx); + }); + + assert_eq!( + editor + .update(cx, |view, cx| view.selections.display_ranges(cx)) + .unwrap(), + [DisplayPoint::new(DisplayRow(3), 3)..DisplayPoint::new(DisplayRow(0), 0)] + ); +} + +#[gpui::test] +fn test_canceling_pending_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); + build_editor(buffer, cx) + }); + + _ = view.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(DisplayRow(2), 2), false, 1, cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(2), 2)] + ); + }); + + _ = view.update(cx, |view, cx| { + view.update_selection( + DisplayPoint::new(DisplayRow(3), 3), + 0, + gpui::Point::::default(), + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3)] + ); + }); + + _ = view.update(cx, |view, cx| { + view.cancel(&Cancel, cx); + view.update_selection( + DisplayPoint::new(DisplayRow(1), 1), + 0, + gpui::Point::::default(), + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3)] + ); + }); +} + +#[gpui::test] +fn test_clone(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let (text, selection_ranges) = marked_text_ranges( + indoc! {" + one + two + threeˇ + four + fiveˇ + "}, + true, + ); + + let editor = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&text, cx); + build_editor(buffer, cx) + }); + + _ = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges(selection_ranges.clone())); + editor.fold_ranges( + [ + Point::new(1, 0)..Point::new(2, 0), + Point::new(3, 0)..Point::new(4, 0), + ], + true, + cx, + ); + }); + + let cloned_editor = editor + .update(cx, |editor, cx| { + cx.open_window(Default::default(), |cx| cx.new_view(|cx| editor.clone(cx))) + }) + .unwrap(); + + let snapshot = editor.update(cx, |e, cx| e.snapshot(cx)).unwrap(); + let cloned_snapshot = cloned_editor.update(cx, |e, cx| e.snapshot(cx)).unwrap(); + + assert_eq!( + cloned_editor + .update(cx, |e, cx| e.display_text(cx)) + .unwrap(), + editor.update(cx, |e, cx| e.display_text(cx)).unwrap() + ); + assert_eq!( + cloned_snapshot + .folds_in_range(0..text.len()) + .collect::>(), + snapshot.folds_in_range(0..text.len()).collect::>(), + ); + assert_set_eq!( + cloned_editor + .update(cx, |editor, cx| editor.selections.ranges::(cx)) + .unwrap(), + editor + .update(cx, |editor, cx| editor.selections.ranges(cx)) + .unwrap() + ); + assert_set_eq!( + cloned_editor + .update(cx, |e, cx| e.selections.display_ranges(cx)) + .unwrap(), + editor + .update(cx, |e, cx| e.selections.display_ranges(cx)) + .unwrap() + ); +} + +#[gpui::test] +async fn test_navigation_history(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + use workspace::item::Item; + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project, cx)); + let pane = workspace + .update(cx, |workspace, _| workspace.active_pane().clone()) + .unwrap(); + + _ = workspace.update(cx, |_v, cx| { + cx.new_view(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx); + let mut editor = build_editor(buffer.clone(), cx); + let handle = cx.view(); + editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle))); + + fn pop_history(editor: &mut Editor, cx: &mut WindowContext) -> Option { + editor.nav_history.as_mut().unwrap().pop_backward(cx) + } + + // Move the cursor a small distance. + // Nothing is added to the navigation history. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0) + ]) + }); + editor.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0) + ]) + }); + assert!(pop_history(&mut editor, cx).is_none()); + + // Move the cursor a large distance. + // The history can jump back to the previous position. + editor.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(13), 0)..DisplayPoint::new(DisplayRow(13), 3) + ]) + }); + let nav_entry = pop_history(&mut editor, cx).unwrap(); + editor.navigate(nav_entry.data.unwrap(), cx); + assert_eq!(nav_entry.item.id(), cx.entity_id()); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0)] + ); + assert!(pop_history(&mut editor, cx).is_none()); + + // Move the cursor a small distance via the mouse. + // Nothing is added to the navigation history. + editor.begin_selection(DisplayPoint::new(DisplayRow(5), 0), false, 1, cx); + editor.end_selection(cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 0)] + ); + assert!(pop_history(&mut editor, cx).is_none()); + + // Move the cursor a large distance via the mouse. + // The history can jump back to the previous position. + editor.begin_selection(DisplayPoint::new(DisplayRow(15), 0), false, 1, cx); + editor.end_selection(cx); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(15), 0)..DisplayPoint::new(DisplayRow(15), 0)] + ); + let nav_entry = pop_history(&mut editor, cx).unwrap(); + editor.navigate(nav_entry.data.unwrap(), cx); + assert_eq!(nav_entry.item.id(), cx.entity_id()); + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 0)] + ); + assert!(pop_history(&mut editor, cx).is_none()); + + // Set scroll position to check later + editor.set_scroll_position(gpui::Point::::new(5.5, 5.5), cx); + let original_scroll_position = editor.scroll_manager.anchor(); + + // Jump to the end of the document and adjust scroll + editor.move_to_end(&MoveToEnd, cx); + editor.set_scroll_position(gpui::Point::::new(-2.5, -0.5), cx); + assert_ne!(editor.scroll_manager.anchor(), original_scroll_position); + + let nav_entry = pop_history(&mut editor, cx).unwrap(); + editor.navigate(nav_entry.data.unwrap(), cx); + assert_eq!(editor.scroll_manager.anchor(), original_scroll_position); + + // Ensure we don't panic when navigation data contains invalid anchors *and* points. + let mut invalid_anchor = editor.scroll_manager.anchor().anchor; + invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok(); + let invalid_point = Point::new(9999, 0); + editor.navigate( + Box::new(NavigationData { + cursor_anchor: invalid_anchor, + cursor_position: invalid_point, + scroll_anchor: ScrollAnchor { + anchor: invalid_anchor, + offset: Default::default(), + }, + scroll_top_row: invalid_point.row, + }), + cx, + ); + assert_eq!( + editor.selections.display_ranges(cx), + &[editor.max_point(cx)..editor.max_point(cx)] + ); + assert_eq!( + editor.scroll_position(cx), + gpui::Point::new(0., editor.max_point(cx).row().as_f32()) + ); + + editor + }) + }); +} + +#[gpui::test] +fn test_cancel(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); + build_editor(buffer, cx) + }); + + _ = view.update(cx, |view, cx| { + view.begin_selection(DisplayPoint::new(DisplayRow(3), 4), false, 1, cx); + view.update_selection( + DisplayPoint::new(DisplayRow(1), 1), + 0, + gpui::Point::::default(), + cx, + ); + view.end_selection(cx); + + view.begin_selection(DisplayPoint::new(DisplayRow(0), 1), true, 1, cx); + view.update_selection( + DisplayPoint::new(DisplayRow(0), 3), + 0, + gpui::Point::::default(), + cx, + ); + view.end_selection(cx); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 3), + DisplayPoint::new(DisplayRow(3), 4)..DisplayPoint::new(DisplayRow(1), 1), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.cancel(&Cancel, cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(3), 4)..DisplayPoint::new(DisplayRow(1), 1)] + ); + }); + + _ = view.update(cx, |view, cx| { + view.cancel(&Cancel, cx); + assert_eq!( + view.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1)] + ); + }); +} + +#[gpui::test] +fn test_fold_action(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple( + &" + impl Foo { + // Hello! + + fn a() { + 1 + } + + fn b() { + 2 + } + + fn c() { + 3 + } + } + " + .unindent(), + cx, + ); + build_editor(buffer.clone(), cx) + }); + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(8), 0)..DisplayPoint::new(DisplayRow(12), 0) + ]); + }); + view.fold(&Fold, cx); + assert_eq!( + view.display_text(cx), + " + impl Foo { + // Hello! + + fn a() { + 1 + } + + fn b() {⋯ + } + + fn c() {⋯ + } + } + " + .unindent(), + ); + + view.fold(&Fold, cx); + assert_eq!( + view.display_text(cx), + " + impl Foo {⋯ + } + " + .unindent(), + ); + + view.unfold_lines(&UnfoldLines, cx); + assert_eq!( + view.display_text(cx), + " + impl Foo { + // Hello! + + fn a() { + 1 + } + + fn b() {⋯ + } + + fn c() {⋯ + } + } + " + .unindent(), + ); + + view.unfold_lines(&UnfoldLines, cx); + assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text()); + }); +} + +#[gpui::test] +fn test_move_cursor(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer = cx.update(|cx| MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx)); + let view = cx.add_window(|cx| build_editor(buffer.clone(), cx)); + + _ = buffer.update(cx, |buffer, cx| { + buffer.edit( + vec![ + (Point::new(1, 0)..Point::new(1, 0), "\t"), + (Point::new(1, 1)..Point::new(1, 1), "\t"), + ], + None, + cx, + ); + }); + _ = view.update(cx, |view, cx| { + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)] + ); + + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 4)] + ); + + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + view.move_to_end(&MoveToEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(5), 6)] + ); + + view.move_to_beginning(&MoveToBeginning, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 2) + ]); + }); + view.select_to_beginning(&SelectToBeginning, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + view.select_to_end(&SelectToEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(5), 6)] + ); + }); +} + +#[gpui::test] +fn test_move_cursor_multibyte(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcde\nαβγδε", cx); + build_editor(buffer.clone(), cx) + }); + + assert_eq!('ⓐ'.len_utf8(), 3); + assert_eq!('α'.len_utf8(), 2); + + _ = view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 6)..Point::new(0, 12), + Point::new(1, 2)..Point::new(1, 4), + Point::new(2, 4)..Point::new(2, 8), + ], + true, + cx, + ); + assert_eq!(view.display_text(cx), "ⓐⓑ⋯ⓔ\nab⋯e\nαβ⋯ε"); + + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐ".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ⋯".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab⋯e".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab⋯".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "a".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "α".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ⋯".len())] + ); + view.move_right(&MoveRight, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ⋯ε".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab⋯e".len())] + ); + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβ⋯ε".len())] + ); + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "ab⋯e".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐⓑ".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "ⓐ".len())] + ); + view.move_left(&MoveLeft, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(0, "".len())] + ); + }); +} + +#[gpui::test] +fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx); + build_editor(buffer.clone(), cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]); + }); + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(1, "abcd".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβγ".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(3, "abcd".len())] + ); + + view.move_down(&MoveDown, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(3, "abcd".len())] + ); + + view.move_up(&MoveUp, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[empty_range(2, "αβγ".len())] + ); + }); +} + +#[gpui::test] +fn test_beginning_end_of_line(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let move_to_beg = MoveToBeginningOfLine { + stop_at_soft_wraps: true, + }; + + let move_to_end = MoveToEndOfLine { + stop_at_soft_wraps: true, + }; + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\n def", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 4), + ]); + }); + }); + + _ = view.update(cx, |view, cx| { + view.move_to_beginning_of_line(&move_to_beg, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_to_beginning_of_line(&move_to_beg, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_to_beginning_of_line(&move_to_beg, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_to_end_of_line(&move_to_end, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3), + DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(1), 5), + ] + ); + }); + + // Moving to the end of line again is a no-op. + _ = view.update(cx, |view, cx| { + view.move_to_end_of_line(&move_to_end, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3), + DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(1), 5), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_left(&MoveLeft, cx); + view.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 2), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 0), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 2), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.select_to_end_of_line( + &SelectToEndOfLine { + stop_at_soft_wraps: true, + }, + cx, + ); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 3), + DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 5), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.delete_to_end_of_line(&DeleteToEndOfLine, cx); + assert_eq!(view.display_text(cx), "ab\n de"); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 4), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); + assert_eq!(view.display_text(cx), "\n"); + assert_eq!( + view.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + ] + ); + }); +} + +#[gpui::test] +fn test_beginning_end_of_line_ignore_soft_wrap(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let move_to_beg = MoveToBeginningOfLine { + stop_at_soft_wraps: false, + }; + + let move_to_end = MoveToEndOfLine { + stop_at_soft_wraps: false, + }; + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("thequickbrownfox\njumpedoverthelazydogs", cx); + build_editor(buffer, cx) + }); + + _ = view.update(cx, |view, cx| { + view.set_wrap_width(Some(140.0.into()), cx); + + // We expect the following lines after wrapping + // ``` + // thequickbrownfox + // jumpedoverthelazydo + // gs + // ``` + // The final `gs` was soft-wrapped onto a new line. + assert_eq!( + "thequickbrownfox\njumpedoverthelaz\nydogs", + view.display_text(cx), + ); + + // First, let's assert behavior on the first line, that was not soft-wrapped. + // Start the cursor at the `k` on the first line + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 7)..DisplayPoint::new(DisplayRow(0), 7) + ]); + }); + + // Moving to the beginning of the line should put us at the beginning of the line. + view.move_to_beginning_of_line(&move_to_beg, cx); + assert_eq!( + vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),], + view.selections.display_ranges(cx) + ); + + // Moving to the end of the line should put us at the end of the line. + view.move_to_end_of_line(&move_to_end, cx); + assert_eq!( + vec![DisplayPoint::new(DisplayRow(0), 16)..DisplayPoint::new(DisplayRow(0), 16),], + view.selections.display_ranges(cx) + ); + + // Now, let's assert behavior on the second line, that ended up being soft-wrapped. + // Start the cursor at the last line (`y` that was wrapped to a new line) + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 0) + ]); + }); + + // Moving to the beginning of the line should put us at the start of the second line of + // display text, i.e., the `j`. + view.move_to_beginning_of_line(&move_to_beg, cx); + assert_eq!( + vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),], + view.selections.display_ranges(cx) + ); + + // Moving to the beginning of the line again should be a no-op. + view.move_to_beginning_of_line(&move_to_beg, cx); + assert_eq!( + vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),], + view.selections.display_ranges(cx) + ); + + // Moving to the end of the line should put us right after the `s` that was soft-wrapped to the + // next display line. + view.move_to_end_of_line(&move_to_end, cx); + assert_eq!( + vec![DisplayPoint::new(DisplayRow(2), 5)..DisplayPoint::new(DisplayRow(2), 5),], + view.selections.display_ranges(cx) + ); + + // Moving to the end of the line again should be a no-op. + view.move_to_end_of_line(&move_to_end, cx); + assert_eq!( + vec![DisplayPoint::new(DisplayRow(2), 5)..DisplayPoint::new(DisplayRow(2), 5),], + view.selections.display_ranges(cx) + ); + }); +} + +#[gpui::test] +fn test_prev_next_word_boundary(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("use std::str::{foo, bar}\n\n {baz.qux()}", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 11)..DisplayPoint::new(DisplayRow(0), 11), + DisplayPoint::new(DisplayRow(2), 4)..DisplayPoint::new(DisplayRow(2), 4), + ]) + }); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("use stdˇ::str::{foo, bar}\n\n ˇ{baz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("use ˇstd::str::{foo, bar}\n\nˇ {baz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("ˇuse std::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_selection_ranges("ˇuse std::str::{foo, barˇ}\n\n {baz.qux()}", view, cx); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_selection_ranges("useˇ std::str::{foo, bar}ˇ\n\n {baz.qux()}", view, cx); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_selection_ranges("use stdˇ::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx); + + view.move_right(&MoveRight, cx); + view.select_to_previous_word_start(&SelectToPreviousWordStart, cx); + assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx); + + view.select_to_previous_word_start(&SelectToPreviousWordStart, cx); + assert_selection_ranges("use std«ˇ::s»tr::{foo, bar}\n\n «ˇ{b»az.qux()}", view, cx); + + view.select_to_next_word_end(&SelectToNextWordEnd, cx); + assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx); + }); +} + +#[gpui::test] +fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx); + build_editor(buffer, cx) + }); + + _ = view.update(cx, |view, cx| { + view.set_wrap_width(Some(140.0.into()), cx); + assert_eq!( + view.display_text(cx), + "use one::{\n two::three::\n four::five\n};" + ); + + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(1), 7)..DisplayPoint::new(DisplayRow(1), 7) + ]); + }); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(1), 9)..DisplayPoint::new(DisplayRow(1), 9)] + ); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(1), 14)..DisplayPoint::new(DisplayRow(1), 14)] + ); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(2), 4)..DisplayPoint::new(DisplayRow(2), 4)] + ); + + view.move_to_next_word_end(&MoveToNextWordEnd, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(2), 8)..DisplayPoint::new(DisplayRow(2), 8)] + ); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(2), 4)..DisplayPoint::new(DisplayRow(2), 4)] + ); + + view.move_to_previous_word_start(&MoveToPreviousWordStart, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(1), 14)..DisplayPoint::new(DisplayRow(1), 14)] + ); + }); +} + +#[gpui::test] +async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let line_height = cx.editor(|editor, cx| { + editor + .style() + .unwrap() + .text + .line_height_in_pixels(cx.rem_size()) + }); + cx.simulate_window_resize(cx.window, size(px(100.), 4. * line_height)); + + cx.set_state( + &r#"ˇone + two + + three + fourˇ + five + + six"# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx)); + cx.assert_editor_state( + &r#"one + two + ˇ + three + four + five + ˇ + six"# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx)); + cx.assert_editor_state( + &r#"one + two + + three + four + five + ˇ + sixˇ"# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx)); + cx.assert_editor_state( + &r#"one + two + + three + four + five + + sixˇ"# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx)); + cx.assert_editor_state( + &r#"one + two + + three + four + five + ˇ + six"# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx)); + cx.assert_editor_state( + &r#"one + two + ˇ + three + four + five + + six"# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx)); + cx.assert_editor_state( + &r#"ˇone + two + + three + four + five + + six"# + .unindent(), + ); +} + +#[gpui::test] +async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + let line_height = cx.editor(|editor, cx| { + editor + .style() + .unwrap() + .text + .line_height_in_pixels(cx.rem_size()) + }); + let window = cx.window; + cx.simulate_window_resize(window, size(px(1000.), 4. * line_height + px(0.5))); + + cx.set_state( + &r#"ˇone + two + three + four + five + six + seven + eight + nine + ten + "#, + ); + + cx.update_editor(|editor, cx| { + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 0.) + ); + editor.scroll_screen(&ScrollAmount::Page(1.), cx); + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 3.) + ); + editor.scroll_screen(&ScrollAmount::Page(1.), cx); + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 6.) + ); + editor.scroll_screen(&ScrollAmount::Page(-1.), cx); + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 3.) + ); + + editor.scroll_screen(&ScrollAmount::Page(-0.5), cx); + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 1.) + ); + editor.scroll_screen(&ScrollAmount::Page(0.5), cx); + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 3.) + ); + }); +} + +#[gpui::test] +async fn test_autoscroll(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let line_height = cx.update_editor(|editor, cx| { + editor.set_vertical_scroll_margin(2, cx); + editor + .style() + .unwrap() + .text + .line_height_in_pixels(cx.rem_size()) + }); + let window = cx.window; + cx.simulate_window_resize(window, size(px(1000.), 6. * line_height)); + + cx.set_state( + &r#"ˇone + two + three + four + five + six + seven + eight + nine + ten + "#, + ); + cx.update_editor(|editor, cx| { + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 0.0) + ); + }); + + // Add a cursor below the visible area. Since both cursors cannot fit + // on screen, the editor autoscrolls to reveal the newest cursor, and + // allows the vertical scroll margin below that cursor. + cx.update_editor(|editor, cx| { + editor.change_selections(Some(Autoscroll::fit()), cx, |selections| { + selections.select_ranges([ + Point::new(0, 0)..Point::new(0, 0), + Point::new(6, 0)..Point::new(6, 0), + ]); + }) + }); + cx.update_editor(|editor, cx| { + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 3.0) + ); + }); + + // Move down. The editor cursor scrolls down to track the newest cursor. + cx.update_editor(|editor, cx| { + editor.move_down(&Default::default(), cx); + }); + cx.update_editor(|editor, cx| { + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 4.0) + ); + }); + + // Add a cursor above the visible area. Since both cursors fit on screen, + // the editor scrolls to show both. + cx.update_editor(|editor, cx| { + editor.change_selections(Some(Autoscroll::fit()), cx, |selections| { + selections.select_ranges([ + Point::new(1, 0)..Point::new(1, 0), + Point::new(6, 0)..Point::new(6, 0), + ]); + }) + }); + cx.update_editor(|editor, cx| { + assert_eq!( + editor.snapshot(cx).scroll_position(), + gpui::Point::new(0., 1.0) + ); + }); +} + +#[gpui::test] +async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let line_height = cx.editor(|editor, cx| { + editor + .style() + .unwrap() + .text + .line_height_in_pixels(cx.rem_size()) + }); + let window = cx.window; + cx.simulate_window_resize(window, size(px(100.), 4. * line_height)); + cx.set_state( + &r#" + ˇone + two + threeˇ + four + five + six + seven + eight + nine + ten + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_page_down(&MovePageDown::default(), cx)); + cx.assert_editor_state( + &r#" + one + two + three + ˇfour + five + sixˇ + seven + eight + nine + ten + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_page_down(&MovePageDown::default(), cx)); + cx.assert_editor_state( + &r#" + one + two + three + four + five + six + ˇseven + eight + nineˇ + ten + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_page_up(&MovePageUp::default(), cx)); + cx.assert_editor_state( + &r#" + one + two + three + ˇfour + five + sixˇ + seven + eight + nine + ten + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.move_page_up(&MovePageUp::default(), cx)); + cx.assert_editor_state( + &r#" + ˇone + two + threeˇ + four + five + six + seven + eight + nine + ten + "# + .unindent(), + ); + + // Test select collapsing + cx.update_editor(|editor, cx| { + editor.move_page_down(&MovePageDown::default(), cx); + editor.move_page_down(&MovePageDown::default(), cx); + editor.move_page_down(&MovePageDown::default(), cx); + }); + cx.assert_editor_state( + &r#" + one + two + three + four + five + six + seven + eight + nine + ˇten + ˇ"# + .unindent(), + ); +} + +#[gpui::test] +async fn test_delete_to_beginning_of_line(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + cx.set_state("one «two threeˇ» four"); + cx.update_editor(|editor, cx| { + editor.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); + assert_eq!(editor.text(cx), " four"); + }); +} + +#[gpui::test] +fn test_delete_to_word_boundary(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("one two three four", cx); + build_editor(buffer.clone(), cx) + }); + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + // an empty selection - the preceding word fragment is deleted + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + // characters selected - they are deleted + DisplayPoint::new(DisplayRow(0), 9)..DisplayPoint::new(DisplayRow(0), 12), + ]) + }); + view.delete_to_previous_word_start(&DeleteToPreviousWordStart, cx); + assert_eq!(view.buffer.read(cx).read(cx).text(), "e two te four"); + }); + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + // an empty selection - the following word fragment is deleted + DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3), + // characters selected - they are deleted + DisplayPoint::new(DisplayRow(0), 9)..DisplayPoint::new(DisplayRow(0), 10), + ]) + }); + view.delete_to_next_word_end(&DeleteToNextWordEnd, cx); + assert_eq!(view.buffer.read(cx).read(cx).text(), "e t te our"); + }); +} + +#[gpui::test] +fn test_newline(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx); + build_editor(buffer.clone(), cx) + }); + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2), + DisplayPoint::new(DisplayRow(1), 6)..DisplayPoint::new(DisplayRow(1), 6), + ]) + }); + + view.newline(&Newline, cx); + assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n"); + }); +} + +#[gpui::test] +fn test_newline_with_old_selections(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple( + " + a + b( + X + ) + c( + X + ) + " + .unindent() + .as_str(), + cx, + ); + let mut editor = build_editor(buffer.clone(), cx); + editor.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(2, 4)..Point::new(2, 5), + Point::new(5, 4)..Point::new(5, 5), + ]) + }); + editor + }); + + _ = editor.update(cx, |editor, cx| { + // Edit the buffer directly, deleting ranges surrounding the editor's selections + editor.buffer.update(cx, |buffer, cx| { + buffer.edit( + [ + (Point::new(1, 2)..Point::new(3, 0), ""), + (Point::new(4, 2)..Point::new(6, 0), ""), + ], + None, + cx, + ); + assert_eq!( + buffer.read(cx).text(), + " + a + b() + c() + " + .unindent() + ); + }); + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(1, 2)..Point::new(1, 2), + Point::new(2, 2)..Point::new(2, 2), + ], + ); + + editor.newline(&Newline, cx); + assert_eq!( + editor.text(cx), + " + a + b( + ) + c( + ) + " + .unindent() + ); + + // The selections are moved after the inserted newlines + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(2, 0)..Point::new(2, 0), + Point::new(4, 0)..Point::new(4, 0), + ], + ); + }); +} + +#[gpui::test] +async fn test_newline_above(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(4) + }); + + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + ) + .with_indents_query(r#"(_ "(" ")" @end) @indent"#) + .unwrap(), + ); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + cx.set_state(indoc! {" + const a: ˇA = ( + (ˇ + «const_functionˇ»(ˇ), + so«mˇ»et«hˇ»ing_ˇelse,ˇ + )ˇ + ˇ);ˇ + "}); + + cx.update_editor(|e, cx| e.newline_above(&NewlineAbove, cx)); + cx.assert_editor_state(indoc! {" + ˇ + const a: A = ( + ˇ + ( + ˇ + ˇ + const_function(), + ˇ + ˇ + ˇ + ˇ + something_else, + ˇ + ) + ˇ + ˇ + ); + "}); +} + +#[gpui::test] +async fn test_newline_below(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(4) + }); + + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + ) + .with_indents_query(r#"(_ "(" ")" @end) @indent"#) + .unwrap(), + ); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + cx.set_state(indoc! {" + const a: ˇA = ( + (ˇ + «const_functionˇ»(ˇ), + so«mˇ»et«hˇ»ing_ˇelse,ˇ + )ˇ + ˇ);ˇ + "}); + + cx.update_editor(|e, cx| e.newline_below(&NewlineBelow, cx)); + cx.assert_editor_state(indoc! {" + const a: A = ( + ˇ + ( + ˇ + const_function(), + ˇ + ˇ + something_else, + ˇ + ˇ + ˇ + ˇ + ) + ˇ + ); + ˇ + ˇ + "}); +} + +#[gpui::test] +async fn test_newline_comments(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(4) + }); + + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["//".into()], + ..LanguageConfig::default() + }, + None, + )); + { + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + cx.set_state(indoc! {" + // Fooˇ + "}); + + cx.update_editor(|e, cx| e.newline(&Newline, cx)); + cx.assert_editor_state(indoc! {" + // Foo + //ˇ + "}); + // Ensure that if cursor is before the comment start, we do not actually insert a comment prefix. + cx.set_state(indoc! {" + ˇ// Foo + "}); + cx.update_editor(|e, cx| e.newline(&Newline, cx)); + cx.assert_editor_state(indoc! {" + + ˇ// Foo + "}); + } + // Ensure that comment continuations can be disabled. + update_test_language_settings(cx, |settings| { + settings.defaults.extend_comment_on_newline = Some(false); + }); + let mut cx = EditorTestContext::new(cx).await; + cx.set_state(indoc! {" + // Fooˇ + "}); + cx.update_editor(|e, cx| e.newline(&Newline, cx)); + cx.assert_editor_state(indoc! {" + // Foo + ˇ + "}); +} + +#[gpui::test] +fn test_insert_with_old_selections(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx); + let mut editor = build_editor(buffer.clone(), cx); + editor.change_selections(None, cx, |s| s.select_ranges([3..4, 11..12, 19..20])); + editor + }); + + _ = editor.update(cx, |editor, cx| { + // Edit the buffer directly, deleting ranges surrounding the editor's selections + editor.buffer.update(cx, |buffer, cx| { + buffer.edit([(2..5, ""), (10..13, ""), (18..21, "")], None, cx); + assert_eq!(buffer.read(cx).text(), "a(), b(), c()".unindent()); + }); + assert_eq!(editor.selections.ranges(cx), &[2..2, 7..7, 12..12],); + + editor.insert("Z", cx); + assert_eq!(editor.text(cx), "a(Z), b(Z), c(Z)"); + + // The selections are moved after the inserted characters + assert_eq!(editor.selections.ranges(cx), &[3..3, 9..9, 15..15],); + }); +} + +#[gpui::test] +async fn test_tab(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(3) + }); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state(indoc! {" + ˇabˇc + ˇ🏀ˇ🏀ˇefg + dˇ + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + ˇab ˇc + ˇ🏀 ˇ🏀 ˇefg + d ˇ + "}); + + cx.set_state(indoc! {" + a + «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + a + «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ» + "}); +} + +#[gpui::test] +async fn test_tab_in_leading_whitespace_auto_indents_lines(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + ) + .with_indents_query(r#"(_ "(" ")" @end) @indent"#) + .unwrap(), + ); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // cursors that are already at the suggested indent level insert + // a soft tab. cursors that are to the left of the suggested indent + // auto-indent their line. + cx.set_state(indoc! {" + ˇ + const a: B = ( + c( + d( + ˇ + ) + ˇ + ˇ ) + ); + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + ˇ + const a: B = ( + c( + d( + ˇ + ) + ˇ + ˇ) + ); + "}); + + // handle auto-indent when there are multiple cursors on the same line + cx.set_state(indoc! {" + const a: B = ( + c( + ˇ ˇ + ˇ ) + ); + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c( + ˇ + ˇ) + ); + "}); +} + +#[gpui::test] +async fn test_tab_with_mixed_whitespace(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(4) + }); + + let language = Arc::new( + Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + ) + .with_indents_query(r#"(_ "{" "}" @end) @indent"#) + .unwrap(), + ); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + cx.set_state(indoc! {" + fn a() { + if b { + \t ˇc + } + } + "}); + + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + fn a() { + if b { + ˇc + } + } + "}); +} + +#[gpui::test] +async fn test_indent_outdent(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(4); + }); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + + // select across line ending + cx.set_state(indoc! {" + one two + t«hree + ˇ» four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + t«hree + ˇ» four + "}); + + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + t«hree + ˇ» four + "}); + + // Ensure that indenting/outdenting works when the cursor is at column 0. + cx.set_state(indoc! {" + one two + ˇthree + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); + + cx.set_state(indoc! {" + one two + ˇ three + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); +} + +#[gpui::test] +async fn test_indent_outdent_with_hard_tabs(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.hard_tabs = Some(true); + }); + + let mut cx = EditorTestContext::new(cx).await; + + // select two ranges on one line + cx.set_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + \t«oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + \t\t«oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + \t«oneˇ» «twoˇ» + three + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + «oneˇ» «twoˇ» + three + four + "}); + + // select across a line ending + cx.set_state(indoc! {" + one two + t«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + \tt«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + \t\tt«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + \tt«hree + ˇ»four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + t«hree + ˇ»four + "}); + + // Ensure that indenting/outdenting works when the cursor is at column 0. + cx.set_state(indoc! {" + one two + ˇthree + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); + cx.update_editor(|e, cx| e.tab(&Tab, cx)); + cx.assert_editor_state(indoc! {" + one two + \tˇthree + four + "}); + cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx)); + cx.assert_editor_state(indoc! {" + one two + ˇthree + four + "}); +} + +#[gpui::test] +fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.languages.extend([ + ( + "TOML".into(), + LanguageSettingsContent { + tab_size: NonZeroU32::new(2), + ..Default::default() + }, + ), + ( + "Rust".into(), + LanguageSettingsContent { + tab_size: NonZeroU32::new(4), + ..Default::default() + }, + ), + ]); + }); + + let toml_language = Arc::new(Language::new( + LanguageConfig { + name: "TOML".into(), + ..Default::default() + }, + None, + )); + let rust_language = Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + ..Default::default() + }, + None, + )); + + let toml_buffer = + cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx).with_language(toml_language, cx)); + let rust_buffer = cx.new_model(|cx| { + Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx) + }); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts( + toml_buffer.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }], + cx, + ); + multibuffer.push_excerpts( + rust_buffer.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 0), + primary: None, + }], + cx, + ); + multibuffer + }); + + cx.add_window(|cx| { + let mut editor = build_editor(multibuffer, cx); + + assert_eq!( + editor.text(cx), + indoc! {" + a = 1 + b = 2 + + const c: usize = 3; + "} + ); + + select_ranges( + &mut editor, + indoc! {" + «aˇ» = 1 + b = 2 + + «const c:ˇ» usize = 3; + "}, + cx, + ); + + editor.tab(&Tab, cx); + assert_text_with_selections( + &mut editor, + indoc! {" + «aˇ» = 1 + b = 2 + + «const c:ˇ» usize = 3; + "}, + cx, + ); + editor.tab_prev(&TabPrev, cx); + assert_text_with_selections( + &mut editor, + indoc! {" + «aˇ» = 1 + b = 2 + + «const c:ˇ» usize = 3; + "}, + cx, + ); + + editor + }); +} + +#[gpui::test] +async fn test_backspace(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Basic backspace + cx.set_state(indoc! {" + onˇe two three + fou«rˇ» five six + seven «ˇeight nine + »ten + "}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + oˇe two three + fouˇ five six + seven ˇten + "}); + + // Test backspace inside and around indents + cx.set_state(indoc! {" + zero + ˇone + ˇtwo + ˇ ˇ ˇ three + ˇ ˇ four + "}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + zero + ˇone + ˇtwo + ˇ threeˇ four + "}); + + // Test backspace with line_mode set to true + cx.update_editor(|e, _| e.selections.line_mode = true); + cx.set_state(indoc! {" + The ˇquick ˇbrown + fox jumps over + the lazy dog + ˇThe qu«ick bˇ»rown"}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state(indoc! {" + ˇfox jumps over + the lazy dogˇ"}); +} + +#[gpui::test] +async fn test_delete(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state(indoc! {" + onˇe two three + fou«rˇ» five six + seven «ˇeight nine + »ten + "}); + cx.update_editor(|e, cx| e.delete(&Delete, cx)); + cx.assert_editor_state(indoc! {" + onˇ two three + fouˇ five six + seven ˇten + "}); + + // Test backspace with line_mode set to true + cx.update_editor(|e, _| e.selections.line_mode = true); + cx.set_state(indoc! {" + The ˇquick ˇbrown + fox «ˇjum»ps over + the lazy dog + ˇThe qu«ick bˇ»rown"}); + cx.update_editor(|e, cx| e.backspace(&Backspace, cx)); + cx.assert_editor_state("ˇthe lazy dogˇ"); +} + +#[gpui::test] +fn test_delete_line(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0), + ]) + }); + view.delete_line(&DeleteLine, cx); + assert_eq!(view.display_text(cx), "ghi"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1) + ] + ); + }); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(0), 1) + ]) + }); + view.delete_line(&DeleteLine, cx); + assert_eq!(view.display_text(cx), "ghi\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1)] + ); + }); +} + +#[gpui::test] +fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("aaa\nbbb\nccc\nddd\n\n", cx); + let mut editor = build_editor(buffer.clone(), cx); + let buffer = buffer.read(cx).as_singleton().unwrap(); + + assert_eq!( + editor.selections.ranges::(cx), + &[Point::new(0, 0)..Point::new(0, 0)] + ); + + // When on single line, replace newline at end by space + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n\n"); + assert_eq!( + editor.selections.ranges::(cx), + &[Point::new(0, 3)..Point::new(0, 3)] + ); + + // When multiple lines are selected, remove newlines that are spanned by the selection + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(0, 5)..Point::new(2, 2)]) + }); + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb ccc ddd\n\n"); + assert_eq!( + editor.selections.ranges::(cx), + &[Point::new(0, 11)..Point::new(0, 11)] + ); + + // Undo should be transactional + editor.undo(&Undo, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n\n"); + assert_eq!( + editor.selections.ranges::(cx), + &[Point::new(0, 5)..Point::new(2, 2)] + ); + + // When joining an empty line don't insert a space + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(2, 1)..Point::new(2, 2)]) + }); + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n"); + assert_eq!( + editor.selections.ranges::(cx), + [Point::new(2, 3)..Point::new(2, 3)] + ); + + // We can remove trailing newlines + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd"); + assert_eq!( + editor.selections.ranges::(cx), + [Point::new(2, 3)..Point::new(2, 3)] + ); + + // We don't blow up on the last line + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd"); + assert_eq!( + editor.selections.ranges::(cx), + [Point::new(2, 3)..Point::new(2, 3)] + ); + + // reset to test indentation + editor.buffer.update(cx, |buffer, cx| { + buffer.edit( + [ + (Point::new(1, 0)..Point::new(1, 2), " "), + (Point::new(2, 0)..Point::new(2, 3), " \n\td"), + ], + None, + cx, + ) + }); + + // We remove any leading spaces + assert_eq!(buffer.read(cx).text(), "aaa bbb\n c\n \n\td"); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(0, 1)..Point::new(0, 1)]) + }); + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb c\n \n\td"); + + // We don't insert a space for a line containing only spaces + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb c\n\td"); + + // We ignore any leading tabs + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb c d"); + + editor + }); +} + +#[gpui::test] +fn test_join_lines_with_multi_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("aaa\nbbb\nccc\nddd\n\n", cx); + let mut editor = build_editor(buffer.clone(), cx); + let buffer = buffer.read(cx).as_singleton().unwrap(); + + editor.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(0, 2)..Point::new(1, 1), + Point::new(1, 2)..Point::new(1, 2), + Point::new(3, 1)..Point::new(3, 2), + ]) + }); + + editor.join_lines(&JoinLines, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb ccc\nddd\n"); + + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 7)..Point::new(0, 7), + Point::new(1, 3)..Point::new(1, 3) + ] + ); + editor + }); +} + +#[gpui::test] +async fn test_join_lines_with_git_diff_base( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + Line 0 + Line 1 + Line 2 + Line 3 + "# + .unindent(); + + cx.set_state( + &r#" + ˇLine 0 + Line 1 + Line 2 + Line 3 + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + + // Join lines + cx.update_editor(|editor, cx| { + editor.join_lines(&JoinLines, cx); + }); + executor.run_until_parked(); + + cx.assert_editor_state( + &r#" + Line 0ˇ Line 1 + Line 2 + Line 3 + "# + .unindent(), + ); + // Join again + cx.update_editor(|editor, cx| { + editor.join_lines(&JoinLines, cx); + }); + executor.run_until_parked(); + + cx.assert_editor_state( + &r#" + Line 0 Line 1ˇ Line 2 + Line 3 + "# + .unindent(), + ); +} + +#[gpui::test] +async fn test_custom_newlines_cause_no_false_positive_diffs( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + cx.set_state("Line 0\r\nLine 1\rˇ\nLine 2\r\nLine 3"); + cx.set_diff_base(Some("Line 0\r\nLine 1\r\nLine 2\r\nLine 3")); + executor.run_until_parked(); + + cx.update_editor(|editor, cx| { + assert_eq!( + editor + .buffer() + .read(cx) + .snapshot(cx) + .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + .collect::>(), + Vec::new(), + "Should not have any diffs for files with custom newlines" + ); + }); +} + +#[gpui::test] +async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Test sort_lines_case_insensitive() + cx.set_state(indoc! {" + «z + y + x + Z + Y + Xˇ» + "}); + cx.update_editor(|e, cx| e.sort_lines_case_insensitive(&SortLinesCaseInsensitive, cx)); + cx.assert_editor_state(indoc! {" + «x + X + y + Y + z + Zˇ» + "}); + + // Test reverse_lines() + cx.set_state(indoc! {" + «5 + 4 + 3 + 2 + 1ˇ» + "}); + cx.update_editor(|e, cx| e.reverse_lines(&ReverseLines, cx)); + cx.assert_editor_state(indoc! {" + «1 + 2 + 3 + 4 + 5ˇ» + "}); + + // Skip testing shuffle_line() + + // From here on out, test more complex cases of manipulate_lines() with a single driver method: sort_lines_case_sensitive() + // Since all methods calling manipulate_lines() are doing the exact same general thing (reordering lines) + + // Don't manipulate when cursor is on single line, but expand the selection + cx.set_state(indoc! {" + ddˇdd + ccc + bb + a + "}); + cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «ddddˇ» + ccc + bb + a + "}); + + // Basic manipulate case + // Start selection moves to column 0 + // End of selection shrinks to fit shorter line + cx.set_state(indoc! {" + dd«d + ccc + bb + aaaaaˇ» + "}); + cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «aaaaa + bb + ccc + dddˇ» + "}); + + // Manipulate case with newlines + cx.set_state(indoc! {" + dd«d + ccc + + bb + aaaaa + + ˇ» + "}); + cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + « + + aaaaa + bb + ccc + dddˇ» + + "}); + + // Adding new line + cx.set_state(indoc! {" + aa«a + bbˇ»b + "}); + cx.update_editor(|e, cx| e.manipulate_lines(cx, |lines| lines.push("added_line"))); + cx.assert_editor_state(indoc! {" + «aaa + bbb + added_lineˇ» + "}); + + // Removing line + cx.set_state(indoc! {" + aa«a + bbbˇ» + "}); + cx.update_editor(|e, cx| { + e.manipulate_lines(cx, |lines| { + lines.pop(); + }) + }); + cx.assert_editor_state(indoc! {" + «aaaˇ» + "}); + + // Removing all lines + cx.set_state(indoc! {" + aa«a + bbbˇ» + "}); + cx.update_editor(|e, cx| { + e.manipulate_lines(cx, |lines| { + lines.drain(..); + }) + }); + cx.assert_editor_state(indoc! {" + ˇ + "}); +} + +#[gpui::test] +async fn test_unique_lines_multi_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Consider continuous selection as single selection + cx.set_state(indoc! {" + Aaa«aa + cˇ»c«c + bb + aaaˇ»aa + "}); + cx.update_editor(|e, cx| e.unique_lines_case_sensitive(&UniqueLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «Aaaaa + ccc + bb + aaaaaˇ» + "}); + + cx.set_state(indoc! {" + Aaa«aa + cˇ»c«c + bb + aaaˇ»aa + "}); + cx.update_editor(|e, cx| e.unique_lines_case_insensitive(&UniqueLinesCaseInsensitive, cx)); + cx.assert_editor_state(indoc! {" + «Aaaaa + ccc + bbˇ» + "}); + + // Consider non continuous selection as distinct dedup operations + cx.set_state(indoc! {" + «aaaaa + bb + aaaaa + aaaaaˇ» + + aaa«aaˇ» + "}); + cx.update_editor(|e, cx| e.unique_lines_case_sensitive(&UniqueLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «aaaaa + bbˇ» + + «aaaaaˇ» + "}); +} + +#[gpui::test] +async fn test_unique_lines_single_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state(indoc! {" + «Aaa + aAa + Aaaˇ» + "}); + cx.update_editor(|e, cx| e.unique_lines_case_sensitive(&UniqueLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «Aaa + aAaˇ» + "}); + + cx.set_state(indoc! {" + «Aaa + aAa + aaAˇ» + "}); + cx.update_editor(|e, cx| e.unique_lines_case_insensitive(&UniqueLinesCaseInsensitive, cx)); + cx.assert_editor_state(indoc! {" + «Aaaˇ» + "}); +} + +#[gpui::test] +async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Manipulate with multiple selections on a single line + cx.set_state(indoc! {" + dd«dd + cˇ»c«c + bb + aaaˇ»aa + "}); + cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «aaaaa + bb + ccc + ddddˇ» + "}); + + // Manipulate with multiple disjoin selections + cx.set_state(indoc! {" + 5« + 4 + 3 + 2 + 1ˇ» + + dd«dd + ccc + bb + aaaˇ»aa + "}); + cx.update_editor(|e, cx| e.sort_lines_case_sensitive(&SortLinesCaseSensitive, cx)); + cx.assert_editor_state(indoc! {" + «1 + 2 + 3 + 4 + 5ˇ» + + «aaaaa + bb + ccc + ddddˇ» + "}); + + // Adding lines on each selection + cx.set_state(indoc! {" + 2« + 1ˇ» + + bb«bb + aaaˇ»aa + "}); + cx.update_editor(|e, cx| e.manipulate_lines(cx, |lines| lines.push("added line"))); + cx.assert_editor_state(indoc! {" + «2 + 1 + added lineˇ» + + «bbbb + aaaaa + added lineˇ» + "}); + + // Removing lines on each selection + cx.set_state(indoc! {" + 2« + 1ˇ» + + bb«bb + aaaˇ»aa + "}); + cx.update_editor(|e, cx| { + e.manipulate_lines(cx, |lines| { + lines.pop(); + }) + }); + cx.assert_editor_state(indoc! {" + «2ˇ» + + «bbbbˇ» + "}); +} + +#[gpui::test] +async fn test_manipulate_text(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Test convert_to_upper_case() + cx.set_state(indoc! {" + «hello worldˇ» + "}); + cx.update_editor(|e, cx| e.convert_to_upper_case(&ConvertToUpperCase, cx)); + cx.assert_editor_state(indoc! {" + «HELLO WORLDˇ» + "}); + + // Test convert_to_lower_case() + cx.set_state(indoc! {" + «HELLO WORLDˇ» + "}); + cx.update_editor(|e, cx| e.convert_to_lower_case(&ConvertToLowerCase, cx)); + cx.assert_editor_state(indoc! {" + «hello worldˇ» + "}); + + // Test multiple line, single selection case + // Test code hack that covers the fact that to_case crate doesn't support '\n' as a word boundary + cx.set_state(indoc! {" + «The quick brown + fox jumps over + the lazy dogˇ» + "}); + cx.update_editor(|e, cx| e.convert_to_title_case(&ConvertToTitleCase, cx)); + cx.assert_editor_state(indoc! {" + «The Quick Brown + Fox Jumps Over + The Lazy Dogˇ» + "}); + + // Test multiple line, single selection case + // Test code hack that covers the fact that to_case crate doesn't support '\n' as a word boundary + cx.set_state(indoc! {" + «The quick brown + fox jumps over + the lazy dogˇ» + "}); + cx.update_editor(|e, cx| e.convert_to_upper_camel_case(&ConvertToUpperCamelCase, cx)); + cx.assert_editor_state(indoc! {" + «TheQuickBrown + FoxJumpsOver + TheLazyDogˇ» + "}); + + // From here on out, test more complex cases of manipulate_text() + + // Test no selection case - should affect words cursors are in + // Cursor at beginning, middle, and end of word + cx.set_state(indoc! {" + ˇhello big beauˇtiful worldˇ + "}); + cx.update_editor(|e, cx| e.convert_to_upper_case(&ConvertToUpperCase, cx)); + cx.assert_editor_state(indoc! {" + «HELLOˇ» big «BEAUTIFULˇ» «WORLDˇ» + "}); + + // Test multiple selections on a single line and across multiple lines + cx.set_state(indoc! {" + «Theˇ» quick «brown + foxˇ» jumps «overˇ» + the «lazyˇ» dog + "}); + cx.update_editor(|e, cx| e.convert_to_upper_case(&ConvertToUpperCase, cx)); + cx.assert_editor_state(indoc! {" + «THEˇ» quick «BROWN + FOXˇ» jumps «OVERˇ» + the «LAZYˇ» dog + "}); + + // Test case where text length grows + cx.set_state(indoc! {" + «tschüߡ» + "}); + cx.update_editor(|e, cx| e.convert_to_upper_case(&ConvertToUpperCase, cx)); + cx.assert_editor_state(indoc! {" + «TSCHÜSSˇ» + "}); + + // Test to make sure we don't crash when text shrinks + cx.set_state(indoc! {" + aaa_bbbˇ + "}); + cx.update_editor(|e, cx| e.convert_to_lower_camel_case(&ConvertToLowerCamelCase, cx)); + cx.assert_editor_state(indoc! {" + «aaaBbbˇ» + "}); + + // Test to make sure we all aware of the fact that each word can grow and shrink + // Final selections should be aware of this fact + cx.set_state(indoc! {" + aaa_bˇbb bbˇb_ccc ˇccc_ddd + "}); + cx.update_editor(|e, cx| e.convert_to_lower_camel_case(&ConvertToLowerCamelCase, cx)); + cx.assert_editor_state(indoc! {" + «aaaBbbˇ» «bbbCccˇ» «cccDddˇ» + "}); + + cx.set_state(indoc! {" + «hElLo, WoRld!ˇ» + "}); + cx.update_editor(|e, cx| e.convert_to_opposite_case(&ConvertToOppositeCase, cx)); + cx.assert_editor_state(indoc! {" + «HeLlO, wOrLD!ˇ» + "}); +} + +#[gpui::test] +fn test_duplicate_line(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0), + ]) + }); + view.duplicate_line_down(&DuplicateLineDown, cx); + assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2), + DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0), + DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(6), 0), + ] + ); + }); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(2), 1), + ]) + }); + view.duplicate_line_down(&DuplicateLineDown, cx); + assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(4), 1), + DisplayPoint::new(DisplayRow(4), 2)..DisplayPoint::new(DisplayRow(5), 1), + ] + ); + }); + + // With `move_upwards` the selections stay in place, except for + // the lines inserted above them + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0), + ]) + }); + view.duplicate_line_up(&DuplicateLineUp, cx); + assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 0), + DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(6), 0), + ] + ); + }); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(2), 1), + ]) + }); + view.duplicate_line_up(&DuplicateLineUp, cx); + assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n"); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(2), 1), + ] + ); + }); +} + +#[gpui::test] +fn test_move_line_up_down(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 2)..Point::new(1, 2), + Point::new(2, 3)..Point::new(4, 1), + Point::new(7, 0)..Point::new(8, 4), + ], + true, + cx, + ); + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1), + DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(4), 3), + DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 2), + ]) + }); + assert_eq!( + view.display_text(cx), + "aa⋯bbb\nccc⋯eeee\nfffff\nggggg\n⋯i\njjjjj" + ); + + view.move_line_up(&MoveLineUp, cx); + assert_eq!( + view.display_text(cx), + "aa⋯bbb\nccc⋯eeee\nggggg\n⋯i\njjjjj\nfffff" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1), + DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3), + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(4), 2) + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_line_down(&MoveLineDown, cx); + assert_eq!( + view.display_text(cx), + "ccc⋯eeee\naa⋯bbb\nfffff\nggggg\n⋯i\njjjjj" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1), + DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(4), 3), + DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 2) + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_line_down(&MoveLineDown, cx); + assert_eq!( + view.display_text(cx), + "ccc⋯eeee\nfffff\naa⋯bbb\nggggg\n⋯i\njjjjj" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1), + DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1), + DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(4), 3), + DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 2) + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.move_line_up(&MoveLineUp, cx); + assert_eq!( + view.display_text(cx), + "ccc⋯eeee\naa⋯bbb\nggggg\n⋯i\njjjjj\nfffff" + ); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1), + DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3), + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(4), 2) + ] + ); + }); +} + +#[gpui::test] +fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); + build_editor(buffer, cx) + }); + _ = editor.update(cx, |editor, cx| { + let snapshot = editor.buffer.read(cx).snapshot(cx); + editor.insert_blocks( + [BlockProperties { + style: BlockStyle::Fixed, + position: snapshot.anchor_after(Point::new(2, 0)), + disposition: BlockDisposition::Below, + height: 1, + render: Box::new(|_| div().into_any()), + }], + Some(Autoscroll::fit()), + cx, + ); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(2, 0)..Point::new(2, 0)]) + }); + editor.move_line_down(&MoveLineDown, cx); + }); +} + +#[gpui::test] +fn test_transpose(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + _ = cx.add_window(|cx| { + let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), cx); + editor.set_style(EditorStyle::default(), cx); + editor.change_selections(None, cx, |s| s.select_ranges([1..1])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bac"); + assert_eq!(editor.selections.ranges(cx), [2..2]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bca"); + assert_eq!(editor.selections.ranges(cx), [3..3]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bac"); + assert_eq!(editor.selections.ranges(cx), [3..3]); + + editor + }); + + _ = cx.add_window(|cx| { + let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx); + editor.set_style(EditorStyle::default(), cx); + editor.change_selections(None, cx, |s| s.select_ranges([3..3])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acb\nde"); + assert_eq!(editor.selections.ranges(cx), [3..3]); + + editor.change_selections(None, cx, |s| s.select_ranges([4..4])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acbd\ne"); + assert_eq!(editor.selections.ranges(cx), [5..5]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acbde\n"); + assert_eq!(editor.selections.ranges(cx), [6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "acbd\ne"); + assert_eq!(editor.selections.ranges(cx), [6..6]); + + editor + }); + + _ = cx.add_window(|cx| { + let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx); + editor.set_style(EditorStyle::default(), cx); + editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2, 4..4])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bacd\ne"); + assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcade\n"); + assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcda\ne"); + assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcade\n"); + assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "bcaed\n"); + assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]); + + editor + }); + + _ = cx.add_window(|cx| { + let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), cx); + editor.set_style(EditorStyle::default(), cx); + editor.change_selections(None, cx, |s| s.select_ranges([4..4])); + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "🏀🍐✋"); + assert_eq!(editor.selections.ranges(cx), [8..8]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "🏀✋🍐"); + assert_eq!(editor.selections.ranges(cx), [11..11]); + + editor.transpose(&Default::default(), cx); + assert_eq!(editor.text(cx), "🏀🍐✋"); + assert_eq!(editor.selections.ranges(cx), [11..11]); + + editor + }); +} + +#[gpui::test] +async fn test_clipboard(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state("«one✅ ˇ»two «three ˇ»four «five ˇ»six "); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state("ˇtwo ˇfour ˇsix "); + + // Paste with three cursors. Each cursor pastes one slice of the clipboard text. + cx.set_state("two ˇfour ˇsix ˇ"); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state("two one✅ ˇfour three ˇsix five ˇ"); + + // Paste again but with only two cursors. Since the number of cursors doesn't + // match the number of slices in the clipboard, the entire clipboard text + // is pasted at each cursor. + cx.set_state("ˇtwo one✅ four three six five ˇ"); + cx.update_editor(|e, cx| { + e.handle_input("( ", cx); + e.paste(&Paste, cx); + e.handle_input(") ", cx); + }); + cx.assert_editor_state( + &([ + "( one✅ ", + "three ", + "five ) ˇtwo one✅ four three six five ( one✅ ", + "three ", + "five ) ˇ", + ] + .join("\n")), + ); + + // Cut with three selections, one of which is full-line. + cx.set_state(indoc! {" + 1«2ˇ»3 + 4ˇ567 + «8ˇ»9"}); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state(indoc! {" + 1ˇ3 + ˇ9"}); + + // Paste with three selections, noticing how the copied selection that was full-line + // gets inserted before the second cursor. + cx.set_state(indoc! {" + 1ˇ3 + 9ˇ + «oˇ»ne"}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + 12ˇ3 + 4567 + 9ˇ + 8ˇne"}); + + // Copy with a single cursor only, which writes the whole line into the clipboard. + cx.set_state(indoc! {" + The quick brown + fox juˇmps over + the lazy dog"}); + cx.update_editor(|e, cx| e.copy(&Copy, cx)); + assert_eq!( + cx.read_from_clipboard().map(|item| item.text().to_owned()), + Some("fox jumps over\n".to_owned()) + ); + + // Paste with three selections, noticing how the copied full-line selection is inserted + // before the empty selections but replaces the selection that is non-empty. + cx.set_state(indoc! {" + Tˇhe quick brown + «foˇ»x jumps over + tˇhe lazy dog"}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + fox jumps over + Tˇhe quick brown + fox jumps over + ˇx jumps over + fox jumps over + tˇhe lazy dog"}); +} + +#[gpui::test] +async fn test_paste_multiline(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = Arc::new(Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // Cut an indented block, without the leading whitespace. + cx.set_state(indoc! {" + const a: B = ( + c(), + «d( + e, + f + )ˇ» + ); + "}); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + ˇ + ); + "}); + + // Paste it at the same position. + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + d( + e, + f + )ˇ + ); + "}); + + // Paste it at a line with a lower indent level. + cx.set_state(indoc! {" + ˇ + const a: B = ( + c(), + ); + "}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + d( + e, + f + )ˇ + const a: B = ( + c(), + ); + "}); + + // Cut an indented block, with the leading whitespace. + cx.set_state(indoc! {" + const a: B = ( + c(), + « d( + e, + f + ) + ˇ»); + "}); + cx.update_editor(|e, cx| e.cut(&Cut, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + ˇ); + "}); + + // Paste it at the same position. + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + d( + e, + f + ) + ˇ); + "}); + + // Paste it at a line with a higher indent level. + cx.set_state(indoc! {" + const a: B = ( + c(), + d( + e, + fˇ + ) + ); + "}); + cx.update_editor(|e, cx| e.paste(&Paste, cx)); + cx.assert_editor_state(indoc! {" + const a: B = ( + c(), + d( + e, + f d( + e, + f + ) + ˇ + ) + ); + "}); +} + +#[gpui::test] +fn test_select_all(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("abc\nde\nfgh", cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.select_all(&SelectAll, cx); + assert_eq!( + view.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(2), 3)] + ); + }); +} + +#[gpui::test] +fn test_select_line(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(6, 5, 'a'), cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + DisplayPoint::new(DisplayRow(4), 2)..DisplayPoint::new(DisplayRow(4), 2), + ]) + }); + view.select_line(&SelectLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(2), 0), + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(5), 0), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.select_line(&SelectLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(3), 0), + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(5), 5), + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.select_line(&SelectLine, cx); + assert_eq!( + view.selections.display_ranges(cx), + vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(5), 5)] + ); + }); +} + +#[gpui::test] +fn test_split_selection_into_lines(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(9, 5, 'a'), cx); + build_editor(buffer, cx) + }); + _ = view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 2)..Point::new(1, 2), + Point::new(2, 3)..Point::new(4, 1), + Point::new(7, 0)..Point::new(8, 4), + ], + true, + cx, + ); + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + DisplayPoint::new(DisplayRow(4), 4)..DisplayPoint::new(DisplayRow(4), 4), + ]) + }); + assert_eq!(view.display_text(cx), "aa⋯bbb\nccc⋯eeee\nfffff\nggggg\n⋯i"); + }); + + _ = view.update(cx, |view, cx| { + view.split_selection_into_lines(&SplitSelectionIntoLines, cx); + assert_eq!( + view.display_text(cx), + "aaaaa\nbbbbb\nccc⋯eeee\nfffff\nggggg\n⋯i" + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 0), + DisplayPoint::new(DisplayRow(5), 4)..DisplayPoint::new(DisplayRow(5), 4) + ] + ); + }); + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(0), 1) + ]) + }); + view.split_selection_into_lines(&SplitSelectionIntoLines, cx); + assert_eq!( + view.display_text(cx), + "aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii" + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 5), + DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(1), 5), + DisplayPoint::new(DisplayRow(2), 5)..DisplayPoint::new(DisplayRow(2), 5), + DisplayPoint::new(DisplayRow(3), 5)..DisplayPoint::new(DisplayRow(3), 5), + DisplayPoint::new(DisplayRow(4), 5)..DisplayPoint::new(DisplayRow(4), 5), + DisplayPoint::new(DisplayRow(5), 5)..DisplayPoint::new(DisplayRow(5), 5), + DisplayPoint::new(DisplayRow(6), 5)..DisplayPoint::new(DisplayRow(6), 5), + DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(7), 0) + ] + ); + }); +} + +#[gpui::test] +async fn test_add_selection_above_below(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // let buffer = MultiBuffer::build_simple("abc\ndefghi\n\njk\nlmno\n", cx); + cx.set_state(indoc!( + r#"abc + defˇghi + + jk + nlmo + "# + )); + + cx.update_editor(|editor, cx| { + editor.add_selection_above(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abcˇ + defˇghi + + jk + nlmo + "# + )); + + cx.update_editor(|editor, cx| { + editor.add_selection_above(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abcˇ + defˇghi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + defˇghi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.undo_selection(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abcˇ + defˇghi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.redo_selection(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + defˇghi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + defˇghi + + jk + nlmˇo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + defˇghi + + jk + nlmˇo + "# + )); + + // change selections + cx.set_state(indoc!( + r#"abc + def«ˇg»hi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + def«ˇg»hi + + jk + nlm«ˇo» + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + def«ˇg»hi + + jk + nlm«ˇo» + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_above(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + def«ˇg»hi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_above(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + def«ˇg»hi + + jk + nlmo + "# + )); + + // Change selections again + cx.set_state(indoc!( + r#"a«bc + defgˇ»hi + + jk + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"a«bcˇ» + d«efgˇ»hi + + j«kˇ» + nlmo + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + cx.assert_editor_state(indoc!( + r#"a«bcˇ» + d«efgˇ»hi + + j«kˇ» + n«lmoˇ» + "# + )); + cx.update_editor(|view, cx| { + view.add_selection_above(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"a«bcˇ» + d«efgˇ»hi + + j«kˇ» + nlmo + "# + )); + + // Change selections again + cx.set_state(indoc!( + r#"abc + d«ˇefghi + + jk + nlm»o + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_above(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"a«ˇbc» + d«ˇef»ghi + + j«ˇk» + n«ˇlm»o + "# + )); + + cx.update_editor(|view, cx| { + view.add_selection_below(&Default::default(), cx); + }); + + cx.assert_editor_state(indoc!( + r#"abc + d«ˇef»ghi + + j«ˇk» + n«ˇlm»o + "# + )); +} + +#[gpui::test] +async fn test_select_next(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state("abc\nˇabc abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)) + .unwrap(); + cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)) + .unwrap(); + cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx)); + cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx)); + cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)) + .unwrap(); + cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); +} + +#[gpui::test] +async fn test_select_all_matches(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state("abc\nˇabc abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_all_matches(&SelectAllMatches, cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»"); +} + +#[gpui::test] +async fn test_select_next_with_multiple_carets(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state( + r#"let foo = 2; +lˇet foo = 2; +let fooˇ = 2; +let foo = 2; +let foo = ˇ2;"#, + ); + + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)) + .unwrap(); + cx.assert_editor_state( + r#"let foo = 2; +«letˇ» foo = 2; +let «fooˇ» = 2; +let foo = 2; +let foo = «2ˇ»;"#, + ); + + // noop for multiple selections with different contents + cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx)) + .unwrap(); + cx.assert_editor_state( + r#"let foo = 2; +«letˇ» foo = 2; +let «fooˇ» = 2; +let foo = 2; +let foo = «2ˇ»;"#, + ); +} + +#[gpui::test] +async fn test_select_previous_multibuffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new_multibuffer( + cx, + [ + indoc! { + "aaa\n«bbb\nccc\n»ddd" + }, + indoc! { + "aaa\n«bbb\nccc\n»ddd" + }, + ], + ); + + cx.assert_editor_state(indoc! {" + ˇbbb + ccc + + bbb + ccc + "}); + cx.dispatch_action(SelectPrevious::default()); + cx.assert_editor_state(indoc! {" + «bbbˇ» + ccc + + bbb + ccc + "}); + cx.dispatch_action(SelectPrevious::default()); + cx.assert_editor_state(indoc! {" + «bbbˇ» + ccc + + «bbbˇ» + ccc + "}); +} + +#[gpui::test] +async fn test_select_previous_with_single_caret(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state("abc\nˇabc abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx)); + cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx)); + cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndefabc\n«abcˇ»"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndef«abcˇ»\n«abcˇ»"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndef«abcˇ»\n«abcˇ»"); +} + +#[gpui::test] +async fn test_select_previous_with_multiple_carets(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state( + r#"let foo = 2; +lˇet foo = 2; +let fooˇ = 2; +let foo = 2; +let foo = ˇ2;"#, + ); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state( + r#"let foo = 2; +«letˇ» foo = 2; +let «fooˇ» = 2; +let foo = 2; +let foo = «2ˇ»;"#, + ); + + // noop for multiple selections with different contents + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state( + r#"let foo = 2; +«letˇ» foo = 2; +let «fooˇ» = 2; +let foo = 2; +let foo = «2ˇ»;"#, + ); +} + +#[gpui::test] +async fn test_select_previous_with_single_selection(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state("abc\n«ˇabc» abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\nabc"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\n«abcˇ»"); + + cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx)); + cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\nabc"); + + cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx)); + cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\n«abcˇ»"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndef«abcˇ»\n«abcˇ»"); + + cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx)) + .unwrap(); + cx.assert_editor_state("«abcˇ»\n«ˇabc» «abcˇ»\ndef«abcˇ»\n«abcˇ»"); +} + +#[gpui::test] +async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new(Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + )); + + let text = r#" + use mod1::mod2::{mod3, mod4}; + + fn fn_1(param1: bool, param2: &str) { + let var1 = "text"; + } + "# + .unindent(); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + + view.condition::(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 25)..DisplayPoint::new(DisplayRow(0), 25), + DisplayPoint::new(DisplayRow(2), 24)..DisplayPoint::new(DisplayRow(2), 12), + DisplayPoint::new(DisplayRow(3), 18)..DisplayPoint::new(DisplayRow(3), 18), + ]); + }); + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| { view.selections.display_ranges(cx) }), + &[ + DisplayPoint::new(DisplayRow(0), 23)..DisplayPoint::new(DisplayRow(0), 27), + DisplayPoint::new(DisplayRow(2), 35)..DisplayPoint::new(DisplayRow(2), 7), + DisplayPoint::new(DisplayRow(3), 15)..DisplayPoint::new(DisplayRow(3), 21), + ] + ); + + _ = view.update(cx, |view, cx| { + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(DisplayRow(0), 16)..DisplayPoint::new(DisplayRow(0), 28), + DisplayPoint::new(DisplayRow(4), 1)..DisplayPoint::new(DisplayRow(2), 0), + ] + ); + + _ = view.update(cx, |view, cx| { + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + // Trying to expand the selected syntax node one more time has no effect. + _ = view.update(cx, |view, cx| { + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + _ = view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(DisplayRow(0), 16)..DisplayPoint::new(DisplayRow(0), 28), + DisplayPoint::new(DisplayRow(4), 1)..DisplayPoint::new(DisplayRow(2), 0), + ] + ); + + _ = view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(DisplayRow(0), 23)..DisplayPoint::new(DisplayRow(0), 27), + DisplayPoint::new(DisplayRow(2), 35)..DisplayPoint::new(DisplayRow(2), 7), + DisplayPoint::new(DisplayRow(3), 15)..DisplayPoint::new(DisplayRow(3), 21), + ] + ); + + _ = view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(DisplayRow(0), 25)..DisplayPoint::new(DisplayRow(0), 25), + DisplayPoint::new(DisplayRow(2), 24)..DisplayPoint::new(DisplayRow(2), 12), + DisplayPoint::new(DisplayRow(3), 18)..DisplayPoint::new(DisplayRow(3), 18), + ] + ); + + // Trying to shrink the selected syntax node one more time has no effect. + _ = view.update(cx, |view, cx| { + view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(DisplayRow(0), 25)..DisplayPoint::new(DisplayRow(0), 25), + DisplayPoint::new(DisplayRow(2), 24)..DisplayPoint::new(DisplayRow(2), 12), + DisplayPoint::new(DisplayRow(3), 18)..DisplayPoint::new(DisplayRow(3), 18), + ] + ); + + // Ensure that we keep expanding the selection if the larger selection starts or ends within + // a fold. + _ = view.update(cx, |view, cx| { + view.fold_ranges( + vec![ + Point::new(0, 21)..Point::new(0, 24), + Point::new(3, 20)..Point::new(3, 22), + ], + true, + cx, + ); + view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); + }); + assert_eq!( + view.update(cx, |view, cx| view.selections.display_ranges(cx)), + &[ + DisplayPoint::new(DisplayRow(0), 16)..DisplayPoint::new(DisplayRow(0), 28), + DisplayPoint::new(DisplayRow(2), 35)..DisplayPoint::new(DisplayRow(2), 7), + DisplayPoint::new(DisplayRow(3), 4)..DisplayPoint::new(DisplayRow(3), 23), + ] + ); +} + +#[gpui::test] +async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: false, + newline: true, + }, + ], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_indents_query( + r#" + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap(), + ); + + let text = "fn a() {}"; + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + editor + .condition::(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) + .await; + + _ = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([5..5, 8..8, 9..9])); + editor.newline(&Newline, cx); + assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n"); + assert_eq!( + editor.selections.ranges(cx), + &[ + Point::new(1, 4)..Point::new(1, 4), + Point::new(3, 4)..Point::new(3, 4), + Point::new(5, 0)..Point::new(5, 0) + ] + ); + }); +} + +#[gpui::test] +async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let language = Arc::new(Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "/*".to_string(), + end: " */".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "[".to_string(), + end: "]".to_string(), + close: false, + newline: true, + }, + BracketPair { + start: "\"".to_string(), + end: "\"".to_string(), + close: true, + newline: false, + }, + ], + ..Default::default() + }, + autoclose_before: "})]".to_string(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + cx.language_registry().add(language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(language), cx); + }); + + cx.set_state( + &r#" + 🏀ˇ + εˇ + ❤️ˇ + "# + .unindent(), + ); + + // autoclose multiple nested brackets at multiple cursors + cx.update_editor(|view, cx| { + view.handle_input("{", cx); + view.handle_input("{", cx); + view.handle_input("{", cx); + }); + cx.assert_editor_state( + &" + 🏀{{{ˇ}}} + ε{{{ˇ}}} + ❤️{{{ˇ}}} + " + .unindent(), + ); + + // insert a different closing bracket + cx.update_editor(|view, cx| { + view.handle_input(")", cx); + }); + cx.assert_editor_state( + &" + 🏀{{{)ˇ}}} + ε{{{)ˇ}}} + ❤️{{{)ˇ}}} + " + .unindent(), + ); + + // skip over the auto-closed brackets when typing a closing bracket + cx.update_editor(|view, cx| { + view.move_right(&MoveRight, cx); + view.handle_input("}", cx); + view.handle_input("}", cx); + view.handle_input("}", cx); + }); + cx.assert_editor_state( + &" + 🏀{{{)}}}}ˇ + ε{{{)}}}}ˇ + ❤️{{{)}}}}ˇ + " + .unindent(), + ); + + // autoclose multi-character pairs + cx.set_state( + &" + ˇ + ˇ + " + .unindent(), + ); + cx.update_editor(|view, cx| { + view.handle_input("/", cx); + view.handle_input("*", cx); + }); + cx.assert_editor_state( + &" + /*ˇ */ + /*ˇ */ + " + .unindent(), + ); + + // one cursor autocloses a multi-character pair, one cursor + // does not autoclose. + cx.set_state( + &" + /ˇ + ˇ + " + .unindent(), + ); + cx.update_editor(|view, cx| view.handle_input("*", cx)); + cx.assert_editor_state( + &" + /*ˇ */ + *ˇ + " + .unindent(), + ); + + // Don't autoclose if the next character isn't whitespace and isn't + // listed in the language's "autoclose_before" section. + cx.set_state("ˇa b"); + cx.update_editor(|view, cx| view.handle_input("{", cx)); + cx.assert_editor_state("{ˇa b"); + + // Don't autoclose if `close` is false for the bracket pair + cx.set_state("ˇ"); + cx.update_editor(|view, cx| view.handle_input("[", cx)); + cx.assert_editor_state("[ˇ"); + + // Surround with brackets if text is selected + cx.set_state("«aˇ» b"); + cx.update_editor(|view, cx| view.handle_input("{", cx)); + cx.assert_editor_state("{«aˇ»} b"); + + // Autclose pair where the start and end characters are the same + cx.set_state("aˇ"); + cx.update_editor(|view, cx| view.handle_input("\"", cx)); + cx.assert_editor_state("a\"ˇ\""); + cx.update_editor(|view, cx| view.handle_input("\"", cx)); + cx.assert_editor_state("a\"\"ˇ"); +} + +#[gpui::test] +async fn test_always_treat_brackets_as_autoclosed_skip_over(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.always_treat_brackets_as_autoclosed = Some(true); + }); + + let mut cx = EditorTestContext::new(cx).await; + + let language = Arc::new(Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "[".to_string(), + end: "]".to_string(), + close: false, + newline: true, + }, + ], + ..Default::default() + }, + autoclose_before: "})]".to_string(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + cx.language_registry().add(language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(language), cx); + }); + + cx.set_state( + &" + ˇ + ˇ + ˇ + " + .unindent(), + ); + + // ensure only matching closing brackets are skipped over + cx.update_editor(|view, cx| { + view.handle_input("}", cx); + view.move_left(&MoveLeft, cx); + view.handle_input(")", cx); + view.move_left(&MoveLeft, cx); + }); + cx.assert_editor_state( + &" + ˇ)} + ˇ)} + ˇ)} + " + .unindent(), + ); + + // skip-over closing brackets at multiple cursors + cx.update_editor(|view, cx| { + view.handle_input(")", cx); + view.handle_input("}", cx); + }); + cx.assert_editor_state( + &" + )}ˇ + )}ˇ + )}ˇ + " + .unindent(), + ); + + // ignore non-close brackets + cx.update_editor(|view, cx| { + view.handle_input("]", cx); + view.move_left(&MoveLeft, cx); + view.handle_input("]", cx); + }); + cx.assert_editor_state( + &" + )}]ˇ] + )}]ˇ] + )}]ˇ] + " + .unindent(), + ); +} + +#[gpui::test] +async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "<".into(), + end: ">".into(), + close: true, + ..Default::default() + }, + BracketPair { + start: "{".into(), + end: "}".into(), + close: true, + ..Default::default() + }, + BracketPair { + start: "(".into(), + end: ")".into(), + close: true, + ..Default::default() + }, + ], + ..Default::default() + }, + autoclose_before: "})]>".into(), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "/*".into(), + end: " */".into(), + close: true, + ..Default::default() + }, + BracketPair { + start: "{".into(), + end: "}".into(), + close: true, + ..Default::default() + }, + BracketPair { + start: "(".into(), + end: ")".into(), + close: true, + ..Default::default() + }, + ], + ..Default::default() + }, + autoclose_before: "})]>".into(), + ..Default::default() + }, + Some(tree_sitter_typescript::language_tsx()), + )); + + cx.language_registry().add(html_language.clone()); + cx.language_registry().add(javascript_language.clone()); + + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(html_language), cx); + }); + + cx.set_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Precondition: different languages are active at different locations. + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let cursors = editor.selections.ranges::(cx); + let languages = cursors + .iter() + .map(|c| snapshot.language_at(c.start).unwrap().name()) + .collect::>(); + assert_eq!( + languages, + &["HTML".into(), "JavaScript".into(), "HTML".into()] + ); + }); + + // Angle brackets autoclose in HTML, but not JavaScript. + cx.update_editor(|editor, cx| { + editor.handle_input("<", cx); + editor.handle_input("a", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + + // Curly braces and parens autoclose in both HTML and JavaScript. + cx.update_editor(|editor, cx| { + editor.handle_input(" b=", cx); + editor.handle_input("{", cx); + editor.handle_input("c", cx); + editor.handle_input("(", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + + // Brackets that were already autoclosed are skipped. + cx.update_editor(|editor, cx| { + editor.handle_input(")", cx); + editor.handle_input("d", cx); + editor.handle_input("}", cx); + }); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + editor.handle_input(">", cx); + }); + cx.assert_editor_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Reset + cx.set_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + editor.handle_input("<", cx); + }); + cx.assert_editor_state( + &r#" + <ˇ> + + <ˇ> + "# + .unindent(), + ); + + // When backspacing, the closing angle brackets are removed. + cx.update_editor(|editor, cx| { + editor.backspace(&Backspace, cx); + }); + cx.assert_editor_state( + &r#" + ˇ + + ˇ + "# + .unindent(), + ); + + // Block comments autoclose in JavaScript, but not HTML. + cx.update_editor(|editor, cx| { + editor.handle_input("/", cx); + editor.handle_input("*", cx); + }); + cx.assert_editor_state( + &r#" + /*ˇ + + /*ˇ + "# + .unindent(), + ); +} + +#[gpui::test] +async fn test_autoclose_with_overrides(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let rust_language = Arc::new( + Language::new( + LanguageConfig { + name: "Rust".into(), + brackets: serde_json::from_value(json!([ + { "start": "{", "end": "}", "close": true, "newline": true }, + { "start": "\"", "end": "\"", "close": true, "newline": false, "not_in": ["string"] }, + ])) + .unwrap(), + autoclose_before: "})]>".into(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_override_query("(string_literal) @string") + .unwrap(), + ); + + cx.language_registry().add(rust_language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(rust_language), cx); + }); + + cx.set_state( + &r#" + let x = ˇ + "# + .unindent(), + ); + + // Inserting a quotation mark. A closing quotation mark is automatically inserted. + cx.update_editor(|editor, cx| { + editor.handle_input("\"", cx); + }); + cx.assert_editor_state( + &r#" + let x = "ˇ" + "# + .unindent(), + ); + + // Inserting another quotation mark. The cursor moves across the existing + // automatically-inserted quotation mark. + cx.update_editor(|editor, cx| { + editor.handle_input("\"", cx); + }); + cx.assert_editor_state( + &r#" + let x = ""ˇ + "# + .unindent(), + ); + + // Reset + cx.set_state( + &r#" + let x = ˇ + "# + .unindent(), + ); + + // Inserting a quotation mark inside of a string. A second quotation mark is not inserted. + cx.update_editor(|editor, cx| { + editor.handle_input("\"", cx); + editor.handle_input(" ", cx); + editor.move_left(&Default::default(), cx); + editor.handle_input("\\", cx); + editor.handle_input("\"", cx); + }); + cx.assert_editor_state( + &r#" + let x = "\"ˇ " + "# + .unindent(), + ); + + // Inserting a closing quotation mark at the position of an automatically-inserted quotation + // mark. Nothing is inserted. + cx.update_editor(|editor, cx| { + editor.move_right(&Default::default(), cx); + editor.handle_input("\"", cx); + }); + cx.assert_editor_state( + &r#" + let x = "\" "ˇ + "# + .unindent(), + ); +} + +#[gpui::test] +async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new(Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "/* ".to_string(), + end: "*/".to_string(), + close: true, + ..Default::default() + }, + ], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let text = r#" + a + b + c + "# + .unindent(); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + view.condition::(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 1), + ]) + }); + + view.handle_input("{", cx); + view.handle_input("{", cx); + view.handle_input("{", cx); + assert_eq!( + view.text(cx), + " + {{{a}}} + {{{b}}} + {{{c}}} + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 4), + DisplayPoint::new(DisplayRow(1), 3)..DisplayPoint::new(DisplayRow(1), 4), + DisplayPoint::new(DisplayRow(2), 3)..DisplayPoint::new(DisplayRow(2), 4) + ] + ); + + view.undo(&Undo, cx); + view.undo(&Undo, cx); + view.undo(&Undo, cx); + assert_eq!( + view.text(cx), + " + a + b + c + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 1) + ] + ); + + // Ensure inserting the first character of a multi-byte bracket pair + // doesn't surround the selections with the bracket. + view.handle_input("/", cx); + assert_eq!( + view.text(cx), + " + / + / + / + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1) + ] + ); + + view.undo(&Undo, cx); + assert_eq!( + view.text(cx), + " + a + b + c + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 1) + ] + ); + + // Ensure inserting the last character of a multi-byte bracket pair + // doesn't surround the selections with the bracket. + view.handle_input("*", cx); + assert_eq!( + view.text(cx), + " + * + * + * + " + .unindent() + ); + assert_eq!( + view.selections.display_ranges(cx), + [ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), + DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1), + DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1) + ] + ); + }); +} + +#[gpui::test] +async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new(Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }], + ..Default::default() + }, + autoclose_before: "}".to_string(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let text = r#" + a + b + c + "# + .unindent(); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + editor + .condition::(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + _ = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + Point::new(2, 1)..Point::new(2, 1), + ]) + }); + + editor.handle_input("{", cx); + editor.handle_input("{", cx); + editor.handle_input("_", cx); + assert_eq!( + editor.text(cx), + " + a{{_}} + b{{_}} + c{{_}} + " + .unindent() + ); + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 4)..Point::new(0, 4), + Point::new(1, 4)..Point::new(1, 4), + Point::new(2, 4)..Point::new(2, 4) + ] + ); + + editor.backspace(&Default::default(), cx); + editor.backspace(&Default::default(), cx); + assert_eq!( + editor.text(cx), + " + a{} + b{} + c{} + " + .unindent() + ); + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 2)..Point::new(0, 2), + Point::new(1, 2)..Point::new(1, 2), + Point::new(2, 2)..Point::new(2, 2) + ] + ); + + editor.delete_to_previous_word_start(&Default::default(), cx); + assert_eq!( + editor.text(cx), + " + a + b + c + " + .unindent() + ); + assert_eq!( + editor.selections.ranges::(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + Point::new(2, 1)..Point::new(2, 1) + ] + ); + }); +} + +#[gpui::test] +async fn test_always_treat_brackets_as_autoclosed_delete(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.always_treat_brackets_as_autoclosed = Some(true); + }); + + let mut cx = EditorTestContext::new(cx).await; + + let language = Arc::new(Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "[".to_string(), + end: "]".to_string(), + close: false, + newline: true, + }, + ], + ..Default::default() + }, + autoclose_before: "})]".to_string(), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + cx.language_registry().add(language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(language), cx); + }); + + cx.set_state( + &" + {(ˇ)} + [[ˇ]] + {(ˇ)} + " + .unindent(), + ); + + cx.update_editor(|view, cx| { + view.backspace(&Default::default(), cx); + view.backspace(&Default::default(), cx); + }); + + cx.assert_editor_state( + &" + ˇ + ˇ]] + ˇ + " + .unindent(), + ); + + cx.update_editor(|view, cx| { + view.handle_input("{", cx); + view.handle_input("{", cx); + view.move_right(&MoveRight, cx); + view.move_right(&MoveRight, cx); + view.move_left(&MoveLeft, cx); + view.move_left(&MoveLeft, cx); + view.backspace(&Default::default(), cx); + }); + + cx.assert_editor_state( + &" + {ˇ} + {ˇ}]] + {ˇ} + " + .unindent(), + ); + + cx.update_editor(|view, cx| { + view.backspace(&Default::default(), cx); + }); + + cx.assert_editor_state( + &" + ˇ + ˇ]] + ˇ + " + .unindent(), + ); +} + +#[gpui::test] +async fn test_auto_replace_emoji_shortcode(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new(Language::new( + LanguageConfig::default(), + Some(tree_sitter_rust::language()), + )); + + let buffer = cx.new_model(|cx| Buffer::local("", cx).with_language(language, cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + editor + .condition::(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + _ = editor.update(cx, |editor, cx| { + editor.set_auto_replace_emoji_shortcode(true); + + editor.handle_input("Hello ", cx); + editor.handle_input(":wave", cx); + assert_eq!(editor.text(cx), "Hello :wave".unindent()); + + editor.handle_input(":", cx); + assert_eq!(editor.text(cx), "Hello 👋".unindent()); + + editor.handle_input(" :smile", cx); + assert_eq!(editor.text(cx), "Hello 👋 :smile".unindent()); + + editor.handle_input(":", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄".unindent()); + + // Ensure shortcode gets replaced when it is part of a word that only consists of emojis + editor.handle_input(":wave", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄:wave".unindent()); + + editor.handle_input(":", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄👋".unindent()); + + editor.handle_input(":1", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄👋:1".unindent()); + + editor.handle_input(":", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄👋:1:".unindent()); + + // Ensure shortcode does not get replaced when it is part of a word + editor.handle_input(" Test:wave", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄👋:1: Test:wave".unindent()); + + editor.handle_input(":", cx); + assert_eq!(editor.text(cx), "Hello 👋 😄👋:1: Test:wave:".unindent()); + + editor.set_auto_replace_emoji_shortcode(false); + + // Ensure shortcode does not get replaced when auto replace is off + editor.handle_input(" :wave", cx); + assert_eq!( + editor.text(cx), + "Hello 👋 😄👋:1: Test:wave: :wave".unindent() + ); + + editor.handle_input(":", cx); + assert_eq!( + editor.text(cx), + "Hello 👋 😄👋:1: Test:wave: :wave:".unindent() + ); + }); +} + +#[gpui::test] +async fn test_snippets(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let (text, insertion_ranges) = marked_text_ranges( + indoc! {" + a.ˇ b + a.ˇ b + a.ˇ b + "}, + false, + ); + + let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + + _ = editor.update(cx, |editor, cx| { + let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap(); + + editor + .insert_snippet(&insertion_ranges, snippet, cx) + .unwrap(); + + fn assert(editor: &mut Editor, cx: &mut ViewContext, marked_text: &str) { + let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); + assert_eq!(editor.text(cx), expected_text); + assert_eq!(editor.selections.ranges::(cx), selection_ranges); + } + + assert( + editor, + cx, + indoc! {" + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, + ); + + // Can't move earlier than the first tab stop + assert!(!editor.move_to_prev_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, + ); + + assert!(editor.move_to_next_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(one, «two», three) b + a.f(one, «two», three) b + a.f(one, «two», three) b + "}, + ); + + editor.move_to_prev_snippet_tabstop(cx); + assert( + editor, + cx, + indoc! {" + a.f(«one», two, «three») b + a.f(«one», two, «three») b + a.f(«one», two, «three») b + "}, + ); + + assert!(editor.move_to_next_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(one, «two», three) b + a.f(one, «two», three) b + a.f(one, «two», three) b + "}, + ); + assert!(editor.move_to_next_snippet_tabstop(cx)); + assert( + editor, + cx, + indoc! {" + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + "}, + ); + + // As soon as the last tab stop is reached, snippet state is gone + editor.move_to_prev_snippet_tabstop(cx); + assert( + editor, + cx, + indoc! {" + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + a.f(one, two, three)ˇ b + "}, + ); + }); +} + +#[gpui::test] +async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_file("/file.rs", Default::default()).await; + + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .await + .unwrap(); + + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 4); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }) + .next() + .await; + cx.executor().start_waiting(); + save.await; + + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + // Ensure we can still save even if formatting hangs. + fake_server.handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + futures::future::pending::<()>().await; + unreachable!() + }); + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + cx.executor().advance_clock(super::FORMAT_TIMEOUT); + cx.executor().start_waiting(); + save.await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one\ntwo\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + // For non-dirty buffer, no formatting request should be sent + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + let _pending_format_request = fake_server + .handle_request::(move |_, _| async move { + panic!("Should not be invoked on non-dirty buffer"); + }) + .next(); + cx.executor().start_waiting(); + save.await; + + // Set rust language override and assert overridden tabsize is sent to language server + update_test_language_settings(cx, |settings| { + settings.languages.insert( + "Rust".into(), + LanguageSettingsContent { + tab_size: NonZeroU32::new(8), + ..Default::default() + }, + ); + }); + + editor.update(cx, |editor, cx| editor.set_text("somehting_new\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 8); + Ok(Some(vec![])) + }) + .next() + .await; + cx.executor().start_waiting(); + save.await; +} + +#[gpui::test] +async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let cols = 4; + let rows = 10; + let sample_text_1 = sample_text(rows, cols, 'a'); + assert_eq!( + sample_text_1, + "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj" + ); + let sample_text_2 = sample_text(rows, cols, 'l'); + assert_eq!( + sample_text_2, + "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" + ); + let sample_text_3 = sample_text(rows, cols, 'v'); + assert_eq!( + sample_text_3, + "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" + ); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/a", + json!({ + "main.rs": sample_text_1, + "other.rs": sample_text_2, + "lib.rs": sample_text_3, + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let worktree = project.update(cx, |project, _| { + let mut worktrees = project.worktrees().collect::>(); + assert_eq!(worktrees.len(), 1); + worktrees.pop().unwrap() + }); + let worktree_id = worktree.update(cx, |worktree, _| worktree.id()); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "main.rs"), cx) + }) + .await + .unwrap(); + let buffer_2 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "other.rs"), cx) + }) + .await + .unwrap(); + let buffer_3 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "lib.rs"), cx) + }) + .await + .unwrap(); + + let multi_buffer = cx.new_model(|cx| { + let mut multi_buffer = MultiBuffer::new(0, ReadWrite); + multi_buffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multi_buffer.push_excerpts( + buffer_2.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multi_buffer.push_excerpts( + buffer_3.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multi_buffer + }); + let multi_buffer_editor = + cx.new_view(|cx| Editor::new(EditorMode::Full, multi_buffer, Some(project.clone()), cx)); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| s.select_ranges(Some(1..2))); + editor.insert("|one|two|three|", cx); + }); + assert!(cx.read(|cx| multi_buffer_editor.is_dirty(cx))); + multi_buffer_editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges(Some(60..70)) + }); + editor.insert("|four|five|six|", cx); + }); + assert!(cx.read(|cx| multi_buffer_editor.is_dirty(cx))); + + // First two buffers should be edited, but not the third one. + assert_eq!( + multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)), + "a|one|two|three|aa\nbbbb\ncccc\n\nffff\ngggg\n\njjjj\nllll\nmmmm\nnnnn|four|five|six|\nr\n\nuuuu\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}", + ); + buffer_1.update(cx, |buffer, _| { + assert!(buffer.is_dirty()); + assert_eq!( + buffer.text(), + "a|one|two|three|aa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj", + ) + }); + buffer_2.update(cx, |buffer, _| { + assert!(buffer.is_dirty()); + assert_eq!( + buffer.text(), + "llll\nmmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu", + ) + }); + buffer_3.update(cx, |buffer, _| { + assert!(!buffer.is_dirty()); + assert_eq!(buffer.text(), sample_text_3,) + }); + + cx.executor().start_waiting(); + let save = multi_buffer_editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + + let fake_server = fake_servers.next().await.unwrap(); + fake_server + .server + .on_request::(move |params, _| async move { + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + format!("[{} formatted]", params.text_document.uri), + )])) + }) + .detach(); + save.await; + + // After multibuffer saving, only first two buffers should be reformatted, but not the third one (as it was not dirty). + assert!(cx.read(|cx| !multi_buffer_editor.is_dirty(cx))); + assert_eq!( + multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)), + "a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}", + ); + buffer_1.update(cx, |buffer, _| { + assert!(!buffer.is_dirty()); + assert_eq!( + buffer.text(), + "a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n", + ) + }); + buffer_2.update(cx, |buffer, _| { + assert!(!buffer.is_dirty()); + assert_eq!( + buffer.text(), + "lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n", + ) + }); + buffer_3.update(cx, |buffer, _| { + assert!(!buffer.is_dirty()); + assert_eq!(buffer.text(), sample_text_3,) + }); +} + +#[gpui::test] +async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_file("/file.rs", Default::default()).await; + + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_range_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .await + .unwrap(); + + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 4); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }) + .next() + .await; + cx.executor().start_waiting(); + save.await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + + // Ensure we can still save even if formatting hangs. + fake_server.handle_request::( + move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + futures::future::pending::<()>().await; + unreachable!() + }, + ); + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + cx.executor().advance_clock(super::FORMAT_TIMEOUT); + cx.executor().start_waiting(); + save.await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one\ntwo\nthree\n" + ); + assert!(!cx.read(|cx| editor.is_dirty(cx))); + + // For non-dirty buffer, no formatting request should be sent + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + let _pending_format_request = fake_server + .handle_request::(move |_, _| async move { + panic!("Should not be invoked on non-dirty buffer"); + }) + .next(); + cx.executor().start_waiting(); + save.await; + + // Set Rust language override and assert overridden tabsize is sent to language server + update_test_language_settings(cx, |settings| { + settings.languages.insert( + "Rust".into(), + LanguageSettingsContent { + tab_size: NonZeroU32::new(8), + ..Default::default() + }, + ); + }); + + editor.update(cx, |editor, cx| editor.set_text("somehting_new\n", cx)); + assert!(cx.read(|cx| editor.is_dirty(cx))); + let save = editor + .update(cx, |editor, cx| editor.save(true, project.clone(), cx)) + .unwrap(); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 8); + Ok(Some(vec![])) + }) + .next() + .await; + cx.executor().start_waiting(); + save.await; +} + +#[gpui::test] +async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.formatter = Some(language_settings::Formatter::LanguageServer) + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_file("/file.rs", Default::default()).await; + + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..LanguageConfig::default() + }, + Some(tree_sitter_rust::language()), + ))); + update_test_language_settings(cx, |settings| { + // Enable Prettier formatting for the same buffer, and ensure + // LSP is called instead of Prettier. + settings.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .await + .unwrap(); + + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + _ = editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + + let format = editor + .update(cx, |editor, cx| { + editor.perform_format(project.clone(), FormatTrigger::Manual, cx) + }) + .unwrap(); + fake_server + .handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + assert_eq!(params.options.tab_size, 4); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }) + .next() + .await; + cx.executor().start_waiting(); + format.await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); + + _ = editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx)); + // Ensure we don't lock if formatting hangs. + fake_server.handle_request::(move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/file.rs").unwrap() + ); + futures::future::pending::<()>().await; + unreachable!() + }); + let format = editor + .update(cx, |editor, cx| { + editor.perform_format(project, FormatTrigger::Manual, cx) + }) + .unwrap(); + cx.executor().advance_clock(super::FORMAT_TIMEOUT); + cx.executor().start_waiting(); + format.await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one\ntwo\nthree\n" + ); +} + +#[gpui::test] +async fn test_concurrent_format_requests(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + one.twoˇ + "}); + + // The format request takes a long time. When it completes, it inserts + // a newline and an indent before the `.` + cx.lsp + .handle_request::(move |_, cx| { + let executor = cx.background_executor().clone(); + async move { + executor.timer(Duration::from_millis(100)).await; + Ok(Some(vec![lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 3)), + new_text: "\n ".into(), + }])) + } + }); + + // Submit a format request. + let format_1 = cx + .update_editor(|editor, cx| editor.format(&Format, cx)) + .unwrap(); + cx.executor().run_until_parked(); + + // Submit a second format request. + let format_2 = cx + .update_editor(|editor, cx| editor.format(&Format, cx)) + .unwrap(); + cx.executor().run_until_parked(); + + // Wait for both format requests to complete + cx.executor().advance_clock(Duration::from_millis(200)); + cx.executor().start_waiting(); + format_1.await.unwrap(); + cx.executor().start_waiting(); + format_2.await.unwrap(); + + // The formatting edits only happens once. + cx.assert_editor_state(indoc! {" + one + .twoˇ + "}); +} + +#[gpui::test] +async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.formatter = Some(language_settings::Formatter::Auto) + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + cx, + ) + .await; + + // Set up a buffer white some trailing whitespace and no trailing newline. + cx.set_state( + &[ + "one ", // + "twoˇ", // + "three ", // + "four", // + ] + .join("\n"), + ); + + // Submit a format request. + let format = cx + .update_editor(|editor, cx| editor.format(&Format, cx)) + .unwrap(); + + // Record which buffer changes have been sent to the language server + let buffer_changes = Arc::new(Mutex::new(Vec::new())); + cx.lsp + .handle_notification::({ + let buffer_changes = buffer_changes.clone(); + move |params, _| { + buffer_changes.lock().extend( + params + .content_changes + .into_iter() + .map(|e| (e.range.unwrap(), e.text)), + ); + } + }); + + // Handle formatting requests to the language server. + cx.lsp.handle_request::({ + let buffer_changes = buffer_changes.clone(); + move |_, _| { + // When formatting is requested, trailing whitespace has already been stripped, + // and the trailing newline has already been added. + assert_eq!( + &buffer_changes.lock()[1..], + &[ + ( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)), + "".into() + ), + ( + lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)), + "".into() + ), + ( + lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)), + "\n".into() + ), + ] + ); + + // Insert blank lines between each line of the buffer. + async move { + Ok(Some(vec![ + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 0)), + new_text: "\n".into(), + }, + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(2, 0), lsp::Position::new(2, 0)), + new_text: "\n".into(), + }, + ])) + } + } + }); + + // After formatting the buffer, the trailing whitespace is stripped, + // a newline is appended, and the edits provided by the language server + // have been applied. + format.await.unwrap(); + cx.assert_editor_state( + &[ + "one", // + "", // + "twoˇ", // + "", // + "three", // + "four", // + "", // + ] + .join("\n"), + ); + + // Undoing the formatting undoes the trailing whitespace removal, the + // trailing newline, and the LSP edits. + cx.update_buffer(|buffer, cx| buffer.undo(cx)); + cx.assert_editor_state( + &[ + "one ", // + "twoˇ", // + "three ", // + "four", // + ] + .join("\n"), + ); +} + +#[gpui::test] +async fn test_completion(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + resolve_provider: Some(true), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + let counter = Arc::new(AtomicUsize::new(0)); + + cx.set_state(indoc! {" + oneˇ + two + three + "}); + cx.simulate_keystroke("."); + handle_completion_request( + &mut cx, + indoc! {" + one.|<> + two + three + "}, + vec!["first_completion", "second_completion"], + counter.clone(), + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + assert_eq!(counter.load(atomic::Ordering::Acquire), 1); + + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor.context_menu_next(&Default::default(), cx); + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state(indoc! {" + one.second_completionˇ + two + three + "}); + + handle_resolve_completion_request( + &mut cx, + Some(vec![ + ( + //This overlaps with the primary completion edit which is + //misbehavior from the LSP spec, test that we filter it out + indoc! {" + one.second_ˇcompletion + two + threeˇ + "}, + "overlapping additional edit", + ), + ( + indoc! {" + one.second_completion + two + threeˇ + "}, + "\nadditional edit", + ), + ]), + ) + .await; + apply_additional_edits.await.unwrap(); + cx.assert_editor_state(indoc! {" + one.second_completionˇ + two + three + additional edit + "}); + + cx.set_state(indoc! {" + one.second_completion + twoˇ + threeˇ + additional edit + "}); + cx.simulate_keystroke(" "); + assert!(cx.editor(|e, _| e.context_menu.read().is_none())); + cx.simulate_keystroke("s"); + assert!(cx.editor(|e, _| e.context_menu.read().is_none())); + + cx.assert_editor_state(indoc! {" + one.second_completion + two sˇ + three sˇ + additional edit + "}); + handle_completion_request( + &mut cx, + indoc! {" + one.second_completion + two s + three + additional edit + "}, + vec!["fourth_completion", "fifth_completion", "sixth_completion"], + counter.clone(), + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + assert_eq!(counter.load(atomic::Ordering::Acquire), 2); + + cx.simulate_keystroke("i"); + + handle_completion_request( + &mut cx, + indoc! {" + one.second_completion + two si + three + additional edit + "}, + vec!["fourth_completion", "fifth_completion", "sixth_completion"], + counter.clone(), + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + assert_eq!(counter.load(atomic::Ordering::Acquire), 3); + + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state(indoc! {" + one.second_completion + two sixth_completionˇ + three sixth_completionˇ + additional edit + "}); + + handle_resolve_completion_request(&mut cx, None).await; + apply_additional_edits.await.unwrap(); + + _ = cx.update(|cx| { + cx.update_global::(|settings, cx| { + settings.update_user_settings::(cx, |settings| { + settings.show_completions_on_input = Some(false); + }); + }) + }); + cx.set_state("editorˇ"); + cx.simulate_keystroke("."); + assert!(cx.editor(|e, _| e.context_menu.read().is_none())); + cx.simulate_keystroke("c"); + cx.simulate_keystroke("l"); + cx.simulate_keystroke("o"); + cx.assert_editor_state("editor.cloˇ"); + assert!(cx.editor(|e, _| e.context_menu.read().is_none())); + cx.update_editor(|editor, cx| { + editor.show_completions(&ShowCompletions, cx); + }); + handle_completion_request( + &mut cx, + "editor.", + vec!["close", "clobber"], + counter.clone(), + ) + .await; + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + assert_eq!(counter.load(atomic::Ordering::Acquire), 4); + + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state("editor.closeˇ"); + handle_resolve_completion_request(&mut cx, None).await; + apply_additional_edits.await.unwrap(); +} + +#[gpui::test] +async fn test_no_duplicated_completion_requests(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + resolve_provider: Some(true), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {"fn main() { let a = 2ˇ; }"}); + cx.simulate_keystroke("."); + let completion_item = lsp::CompletionItem { + label: "Some".into(), + kind: Some(lsp::CompletionItemKind::SNIPPET), + detail: Some("Wrap the expression in an `Option::Some`".to_string()), + documentation: Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "```rust\nSome(2)\n```".to_string(), + })), + deprecated: Some(false), + sort_text: Some("Some".to_string()), + filter_text: Some("Some".to_string()), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 22, + }, + end: lsp::Position { + line: 0, + character: 22, + }, + }, + new_text: "Some(2)".to_string(), + })), + additional_text_edits: Some(vec![lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 20, + }, + end: lsp::Position { + line: 0, + character: 22, + }, + }, + new_text: "".to_string(), + }]), + ..Default::default() + }; + + let closure_completion_item = completion_item.clone(); + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + let mut request = cx.handle_request::(move |_, _, _| { + let task_completion_item = closure_completion_item.clone(); + counter_clone.fetch_add(1, atomic::Ordering::Release); + async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + task_completion_item, + ]))) + } + }); + + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + cx.assert_editor_state(indoc! {"fn main() { let a = 2.ˇ; }"}); + assert!(request.next().await.is_some()); + assert_eq!(counter.load(atomic::Ordering::Acquire), 1); + + cx.simulate_keystroke("S"); + cx.simulate_keystroke("o"); + cx.simulate_keystroke("m"); + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + cx.assert_editor_state(indoc! {"fn main() { let a = 2.Somˇ; }"}); + assert!(request.next().await.is_some()); + assert!(request.next().await.is_some()); + assert!(request.next().await.is_some()); + request.close(); + assert!(request.next().await.is_none()); + assert_eq!( + counter.load(atomic::Ordering::Acquire), + 4, + "With the completions menu open, only one LSP request should happen per input" + ); +} + +#[gpui::test] +async fn test_toggle_comment(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into(), "//! ".into(), "/// ".into()], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // If multiple selections intersect a line, the line is only toggled once. + cx.set_state(indoc! {" + fn a() { + «//b(); + ˇ»// «c(); + //ˇ» d(); + } + "}); + + cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx)); + + cx.assert_editor_state(indoc! {" + fn a() { + «b(); + c(); + ˇ» d(); + } + "}); + + // The comment prefix is inserted at the same column for every line in a + // selection. + cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx)); + + cx.assert_editor_state(indoc! {" + fn a() { + // «b(); + // c(); + ˇ»// d(); + } + "}); + + // If a selection ends at the beginning of a line, that line is not toggled. + cx.set_selections_state(indoc! {" + fn a() { + // b(); + «// c(); + ˇ» // d(); + } + "}); + + cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx)); + + cx.assert_editor_state(indoc! {" + fn a() { + // b(); + «c(); + ˇ» // d(); + } + "}); + + // If a selection span a single line and is empty, the line is toggled. + cx.set_state(indoc! {" + fn a() { + a(); + b(); + ˇ + } + "}); + + cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx)); + + cx.assert_editor_state(indoc! {" + fn a() { + a(); + b(); + //•ˇ + } + "}); + + // If a selection span multiple lines, empty lines are not toggled. + cx.set_state(indoc! {" + fn a() { + «a(); + + c();ˇ» + } + "}); + + cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx)); + + cx.assert_editor_state(indoc! {" + fn a() { + // «a(); + + // c();ˇ» + } + "}); + + // If a selection includes multiple comment prefixes, all lines are uncommented. + cx.set_state(indoc! {" + fn a() { + «// a(); + /// b(); + //! c();ˇ» + } + "}); + + cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx)); + + cx.assert_editor_state(indoc! {" + fn a() { + «a(); + b(); + c();ˇ» + } + "}); +} + +#[gpui::test] +async fn test_advance_downward_on_toggle_comment(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into()], + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )); + + let mut cx = EditorTestContext::new(cx).await; + + cx.language_registry().add(language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(language), cx); + }); + + let toggle_comments = &ToggleComments { + advance_downwards: true, + }; + + // Single cursor on one line -> advance + // Cursor moves horizontally 3 characters as well on non-blank line + cx.set_state(indoc!( + "fn a() { + ˇdog(); + cat(); + }" + )); + cx.update_editor(|editor, cx| { + editor.toggle_comments(toggle_comments, cx); + }); + cx.assert_editor_state(indoc!( + "fn a() { + // dog(); + catˇ(); + }" + )); + + // Single selection on one line -> don't advance + cx.set_state(indoc!( + "fn a() { + «dog()ˇ»; + cat(); + }" + )); + cx.update_editor(|editor, cx| { + editor.toggle_comments(toggle_comments, cx); + }); + cx.assert_editor_state(indoc!( + "fn a() { + // «dog()ˇ»; + cat(); + }" + )); + + // Multiple cursors on one line -> advance + cx.set_state(indoc!( + "fn a() { + ˇdˇog(); + cat(); + }" + )); + cx.update_editor(|editor, cx| { + editor.toggle_comments(toggle_comments, cx); + }); + cx.assert_editor_state(indoc!( + "fn a() { + // dog(); + catˇ(ˇ); + }" + )); + + // Multiple cursors on one line, with selection -> don't advance + cx.set_state(indoc!( + "fn a() { + ˇdˇog«()ˇ»; + cat(); + }" + )); + cx.update_editor(|editor, cx| { + editor.toggle_comments(toggle_comments, cx); + }); + cx.assert_editor_state(indoc!( + "fn a() { + // ˇdˇog«()ˇ»; + cat(); + }" + )); + + // Single cursor on one line -> advance + // Cursor moves to column 0 on blank line + cx.set_state(indoc!( + "fn a() { + ˇdog(); + + cat(); + }" + )); + cx.update_editor(|editor, cx| { + editor.toggle_comments(toggle_comments, cx); + }); + cx.assert_editor_state(indoc!( + "fn a() { + // dog(); + ˇ + cat(); + }" + )); + + // Single cursor on one line -> advance + // Cursor starts and ends at column 0 + cx.set_state(indoc!( + "fn a() { + ˇ dog(); + cat(); + }" + )); + cx.update_editor(|editor, cx| { + editor.toggle_comments(toggle_comments, cx); + }); + cx.assert_editor_state(indoc!( + "fn a() { + // dog(); + ˇ cat(); + }" + )); +} + +#[gpui::test] +async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let html_language = Arc::new( + Language::new( + LanguageConfig { + name: "HTML".into(), + block_comment: Some(("".into())), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ) + .with_injection_query( + r#" + (script_element + (raw_text) @content + (#set! "language" "javascript")) + "#, + ) + .unwrap(), + ); + + let javascript_language = Arc::new(Language::new( + LanguageConfig { + name: "JavaScript".into(), + line_comments: vec!["// ".into()], + ..Default::default() + }, + Some(tree_sitter_typescript::language_tsx()), + )); + + cx.language_registry().add(html_language.clone()); + cx.language_registry().add(javascript_language.clone()); + cx.update_buffer(|buffer, cx| { + buffer.set_language(Some(html_language), cx); + }); + + // Toggle comments for empty selections + cx.set_state( + &r#" +

A

ˇ +

B

ˇ +

C

ˇ + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments::default(), cx)); + cx.assert_editor_state( + &r#" + + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments::default(), cx)); + cx.assert_editor_state( + &r#" +

A

ˇ +

B

ˇ +

C

ˇ + "# + .unindent(), + ); + + // Toggle comments for mixture of empty and non-empty selections, where + // multiple selections occupy a given line. + cx.set_state( + &r#" +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments::default(), cx)); + cx.assert_editor_state( + &r#" + + + "# + .unindent(), + ); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments::default(), cx)); + cx.assert_editor_state( + &r#" +

+

ˇ»B

ˇ +

+

ˇ»D

ˇ + "# + .unindent(), + ); + + // Toggle comments when different languages are active for different + // selections. + cx.set_state( + &r#" + ˇ + "# + .unindent(), + ); + cx.executor().run_until_parked(); + cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments::default(), cx)); + cx.assert_editor_state( + &r#" + + // ˇvar x = new Y(); + + "# + .unindent(), + ); +} + +#[gpui::test] +fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(0, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(1, 0)..Point::new(1, 4), + primary: None, + }, + ], + cx, + ); + assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb"); + multibuffer + }); + + let (view, cx) = cx.add_window_view(|cx| build_editor(multibuffer, cx)); + _ = view.update(cx, |view, cx| { + assert_eq!(view.text(cx), "aaaa\nbbbb"); + view.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(0, 0)..Point::new(0, 0), + Point::new(1, 0)..Point::new(1, 0), + ]) + }); + + view.handle_input("X", cx); + assert_eq!(view.text(cx), "Xaaaa\nXbbbb"); + assert_eq!( + view.selections.ranges(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + ] + ); + + // Ensure the cursor's head is respected when deleting across an excerpt boundary. + view.change_selections(None, cx, |s| { + s.select_ranges([Point::new(0, 2)..Point::new(1, 2)]) + }); + view.backspace(&Default::default(), cx); + assert_eq!(view.text(cx), "Xa\nbbb"); + assert_eq!( + view.selections.ranges(cx), + [Point::new(1, 0)..Point::new(1, 0)] + ); + + view.change_selections(None, cx, |s| { + s.select_ranges([Point::new(1, 1)..Point::new(0, 1)]) + }); + view.backspace(&Default::default(), cx); + assert_eq!(view.text(cx), "X\nbb"); + assert_eq!( + view.selections.ranges(cx), + [Point::new(0, 1)..Point::new(0, 1)] + ); + }); +} + +#[gpui::test] +fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let markers = vec![('[', ']').into(), ('(', ')').into()]; + let (initial_text, mut excerpt_ranges) = marked_text_ranges_by( + indoc! {" + [aaaa + (bbbb] + cccc)", + }, + markers.clone(), + ); + let excerpt_ranges = markers.into_iter().map(|marker| { + let context = excerpt_ranges.remove(&marker).unwrap()[0].clone(); + ExcerptRange { + context, + primary: None, + } + }); + let buffer = cx.new_model(|cx| Buffer::local(initial_text, cx)); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts(buffer, excerpt_ranges, cx); + multibuffer + }); + + let (view, cx) = cx.add_window_view(|cx| build_editor(multibuffer, cx)); + _ = view.update(cx, |view, cx| { + let (expected_text, selection_ranges) = marked_text_ranges( + indoc! {" + aaaa + bˇbbb + bˇbbˇb + cccc" + }, + true, + ); + assert_eq!(view.text(cx), expected_text); + view.change_selections(None, cx, |s| s.select_ranges(selection_ranges)); + + view.handle_input("X", cx); + + let (expected_text, expected_selections) = marked_text_ranges( + indoc! {" + aaaa + bXˇbbXb + bXˇbbXˇb + cccc" + }, + false, + ); + assert_eq!(view.text(cx), expected_text); + assert_eq!(view.selections.ranges(cx), expected_selections); + + view.newline(&Newline, cx); + let (expected_text, expected_selections) = marked_text_ranges( + indoc! {" + aaaa + bX + ˇbbX + b + bX + ˇbbX + ˇb + cccc" + }, + false, + ); + assert_eq!(view.text(cx), expected_text); + assert_eq!(view.selections.ranges(cx), expected_selections); + }); +} + +#[gpui::test] +fn test_refresh_selections(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); + let mut excerpt1_id = None; + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + excerpt1_id = multibuffer + .push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 4), + primary: None, + }, + ], + cx, + ) + .into_iter() + .next(); + assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\nbbbb\ncccc"); + multibuffer + }); + + let editor = cx.add_window(|cx| { + let mut editor = build_editor(multibuffer.clone(), cx); + let snapshot = editor.snapshot(cx); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(1, 3)..Point::new(1, 3)]) + }); + editor.begin_selection(Point::new(2, 1).to_display_point(&snapshot), true, 1, cx); + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(1, 3)..Point::new(1, 3), + Point::new(2, 1)..Point::new(2, 1), + ] + ); + editor + }); + + // Refreshing selections is a no-op when excerpts haven't changed. + _ = editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.refresh()); + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(1, 3)..Point::new(1, 3), + Point::new(2, 1)..Point::new(2, 1), + ] + ); + }); + + _ = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx); + }); + _ = editor.update(cx, |editor, cx| { + // Removing an excerpt causes the first selection to become degenerate. + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(0, 0)..Point::new(0, 0), + Point::new(0, 1)..Point::new(0, 1) + ] + ); + + // Refreshing selections will relocate the first selection to the original buffer + // location. + editor.change_selections(None, cx, |s| s.refresh()); + assert_eq!( + editor.selections.ranges(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(0, 3)..Point::new(0, 3) + ] + ); + assert!(editor.selections.pending_anchor().is_some()); + }); +} + +#[gpui::test] +fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); + let mut excerpt1_id = None; + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + excerpt1_id = multibuffer + .push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 4), + primary: None, + }, + ], + cx, + ) + .into_iter() + .next(); + assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\nbbbb\ncccc"); + multibuffer + }); + + let editor = cx.add_window(|cx| { + let mut editor = build_editor(multibuffer.clone(), cx); + let snapshot = editor.snapshot(cx); + editor.begin_selection(Point::new(1, 3).to_display_point(&snapshot), false, 1, cx); + assert_eq!( + editor.selections.ranges(cx), + [Point::new(1, 3)..Point::new(1, 3)] + ); + editor + }); + + _ = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx); + }); + _ = editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.ranges(cx), + [Point::new(0, 0)..Point::new(0, 0)] + ); + + // Ensure we don't panic when selections are refreshed and that the pending selection is finalized. + editor.change_selections(None, cx, |s| s.refresh()); + assert_eq!( + editor.selections.ranges(cx), + [Point::new(0, 3)..Point::new(0, 3)] + ); + assert!(editor.selections.pending_anchor().is_some()); + }); +} + +#[gpui::test] +async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "/* ".to_string(), + end: " */".to_string(), + close: true, + newline: true, + }, + ], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_indents_query("") + .unwrap(), + ); + + let text = concat!( + "{ }\n", // + " x\n", // + " /* */\n", // + "x\n", // + "{{} }\n", // + ); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + view.condition::(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) + .await; + + _ = view.update(cx, |view, cx| { + view.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 3), + DisplayPoint::new(DisplayRow(2), 5)..DisplayPoint::new(DisplayRow(2), 5), + DisplayPoint::new(DisplayRow(4), 4)..DisplayPoint::new(DisplayRow(4), 4), + ]) + }); + view.newline(&Newline, cx); + + assert_eq!( + view.buffer().read(cx).read(cx).text(), + concat!( + "{ \n", // Suppress rustfmt + "\n", // + "}\n", // + " x\n", // + " /* \n", // + " \n", // + " */\n", // + "x\n", // + "{{} \n", // + "}\n", // + ) + ); + }); +} + +#[gpui::test] +fn test_highlighted_ranges(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx); + build_editor(buffer.clone(), cx) + }); + + _ = editor.update(cx, |editor, cx| { + struct Type1; + struct Type2; + + let buffer = editor.buffer.read(cx).snapshot(cx); + + let anchor_range = + |range: Range| buffer.anchor_after(range.start)..buffer.anchor_after(range.end); + + editor.highlight_background::( + &[ + anchor_range(Point::new(2, 1)..Point::new(2, 3)), + anchor_range(Point::new(4, 2)..Point::new(4, 4)), + anchor_range(Point::new(6, 3)..Point::new(6, 5)), + anchor_range(Point::new(8, 4)..Point::new(8, 6)), + ], + |_| Hsla::red(), + cx, + ); + editor.highlight_background::( + &[ + anchor_range(Point::new(3, 2)..Point::new(3, 5)), + anchor_range(Point::new(5, 3)..Point::new(5, 6)), + anchor_range(Point::new(7, 4)..Point::new(7, 7)), + anchor_range(Point::new(9, 5)..Point::new(9, 8)), + ], + |_| Hsla::green(), + cx, + ); + + let snapshot = editor.snapshot(cx); + let mut highlighted_ranges = editor.background_highlights_in_range( + anchor_range(Point::new(3, 4)..Point::new(7, 4)), + &snapshot, + cx.theme().colors(), + ); + // Enforce a consistent ordering based on color without relying on the ordering of the + // highlight's `TypeId` which is non-executor. + highlighted_ranges.sort_unstable_by_key(|(_, color)| *color); + assert_eq!( + highlighted_ranges, + &[ + ( + DisplayPoint::new(DisplayRow(4), 2)..DisplayPoint::new(DisplayRow(4), 4), + Hsla::red(), + ), + ( + DisplayPoint::new(DisplayRow(6), 3)..DisplayPoint::new(DisplayRow(6), 5), + Hsla::red(), + ), + ( + DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(3), 5), + Hsla::green(), + ), + ( + DisplayPoint::new(DisplayRow(5), 3)..DisplayPoint::new(DisplayRow(5), 6), + Hsla::green(), + ), + ] + ); + assert_eq!( + editor.background_highlights_in_range( + anchor_range(Point::new(5, 6)..Point::new(6, 4)), + &snapshot, + cx.theme().colors(), + ), + &[( + DisplayPoint::new(DisplayRow(6), 3)..DisplayPoint::new(DisplayRow(6), 5), + Hsla::red(), + )] + ); + }); +} + +#[gpui::test] +async fn test_following(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + + let buffer = project.update(cx, |project, cx| { + let buffer = project.create_local_buffer(&sample_text(16, 8, 'a'), None, cx); + cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)) + }); + let leader = cx.add_window(|cx| build_editor(buffer.clone(), cx)); + let follower = cx.update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(Bounds::from_corners( + gpui::Point::new(0.into(), 0.into()), + gpui::Point::new(10.into(), 80.into()), + ))), + ..Default::default() + }, + |cx| cx.new_view(|cx| build_editor(buffer.clone(), cx)), + ) + }); + + let is_still_following = Rc::new(RefCell::new(true)); + let follower_edit_event_count = Rc::new(RefCell::new(0)); + let pending_update = Rc::new(RefCell::new(None)); + _ = follower.update(cx, { + let update = pending_update.clone(); + let is_still_following = is_still_following.clone(); + let follower_edit_event_count = follower_edit_event_count.clone(); + |_, cx| { + cx.subscribe( + &leader.root_view(cx).unwrap(), + move |_, leader, event, cx| { + leader + .read(cx) + .add_event_to_update_proto(event, &mut update.borrow_mut(), cx); + }, + ) + .detach(); + + cx.subscribe( + &follower.root_view(cx).unwrap(), + move |_, _, event: &EditorEvent, _cx| { + if matches!(Editor::to_follow_event(event), Some(FollowEvent::Unfollow)) { + *is_still_following.borrow_mut() = false; + } + + if let EditorEvent::BufferEdited = event { + *follower_edit_event_count.borrow_mut() += 1; + } + }, + ) + .detach(); + } + }); + + // Update the selections only + _ = leader.update(cx, |leader, cx| { + leader.change_selections(None, cx, |s| s.select_ranges([1..1])); + }); + follower + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx) + }) + .unwrap() + .await + .unwrap(); + _ = follower.update(cx, |follower, cx| { + assert_eq!(follower.selections.ranges(cx), vec![1..1]); + }); + assert_eq!(*is_still_following.borrow(), true); + assert_eq!(*follower_edit_event_count.borrow(), 0); + + // Update the scroll position only + _ = leader.update(cx, |leader, cx| { + leader.set_scroll_position(gpui::Point::new(1.5, 3.5), cx); + }); + follower + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx) + }) + .unwrap() + .await + .unwrap(); + assert_eq!( + follower + .update(cx, |follower, cx| follower.scroll_position(cx)) + .unwrap(), + gpui::Point::new(1.5, 3.5) + ); + assert_eq!(*is_still_following.borrow(), true); + assert_eq!(*follower_edit_event_count.borrow(), 0); + + // Update the selections and scroll position. The follower's scroll position is updated + // via autoscroll, not via the leader's exact scroll position. + _ = leader.update(cx, |leader, cx| { + leader.change_selections(None, cx, |s| s.select_ranges([0..0])); + leader.request_autoscroll(Autoscroll::newest(), cx); + leader.set_scroll_position(gpui::Point::new(1.5, 3.5), cx); + }); + follower + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx) + }) + .unwrap() + .await + .unwrap(); + _ = follower.update(cx, |follower, cx| { + assert_eq!(follower.scroll_position(cx), gpui::Point::new(1.5, 0.0)); + assert_eq!(follower.selections.ranges(cx), vec![0..0]); + }); + assert_eq!(*is_still_following.borrow(), true); + + // Creating a pending selection that precedes another selection + _ = leader.update(cx, |leader, cx| { + leader.change_selections(None, cx, |s| s.select_ranges([1..1])); + leader.begin_selection(DisplayPoint::new(DisplayRow(0), 0), true, 1, cx); + }); + follower + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx) + }) + .unwrap() + .await + .unwrap(); + _ = follower.update(cx, |follower, cx| { + assert_eq!(follower.selections.ranges(cx), vec![0..0, 1..1]); + }); + assert_eq!(*is_still_following.borrow(), true); + + // Extend the pending selection so that it surrounds another selection + _ = leader.update(cx, |leader, cx| { + leader.extend_selection(DisplayPoint::new(DisplayRow(0), 2), 1, cx); + }); + follower + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, pending_update.borrow_mut().take().unwrap(), cx) + }) + .unwrap() + .await + .unwrap(); + _ = follower.update(cx, |follower, cx| { + assert_eq!(follower.selections.ranges(cx), vec![0..2]); + }); + + // Scrolling locally breaks the follow + _ = follower.update(cx, |follower, cx| { + let top_anchor = follower.buffer().read(cx).read(cx).anchor_after(0); + follower.set_scroll_anchor( + ScrollAnchor { + anchor: top_anchor, + offset: gpui::Point::new(0.0, 0.5), + }, + cx, + ); + }); + assert_eq!(*is_still_following.borrow(), false); +} + +#[gpui::test] +async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let pane = workspace + .update(cx, |workspace, _| workspace.active_pane().clone()) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + + let leader = pane.update(cx, |_, cx| { + let multibuffer = cx.new_model(|_| MultiBuffer::new(0, ReadWrite)); + cx.new_view(|cx| build_editor(multibuffer.clone(), cx)) + }); + + // Start following the editor when it has no excerpts. + let mut state_message = leader.update(cx, |leader, cx| leader.to_state_proto(cx)); + let follower_1 = cx + .update_window(*workspace.deref(), |_, cx| { + Editor::from_state_proto( + pane.clone(), + workspace.root_view(cx).unwrap(), + ViewId { + creator: Default::default(), + id: 0, + }, + &mut state_message, + cx, + ) + }) + .unwrap() + .unwrap() + .await + .unwrap(); + + let update_message = Rc::new(RefCell::new(None)); + follower_1.update(cx, { + let update = update_message.clone(); + |_, cx| { + cx.subscribe(&leader, move |_, leader, event, cx| { + leader + .read(cx) + .add_event_to_update_proto(event, &mut update.borrow_mut(), cx); + }) + .detach(); + } + }); + + let (buffer_1, buffer_2) = project.update(cx, |project, cx| { + ( + project.create_local_buffer("abc\ndef\nghi\njkl\n", None, cx), + project.create_local_buffer("mno\npqr\nstu\nvwx\n", None, cx), + ) + }); + + // Insert some excerpts. + _ = leader.update(cx, |leader, cx| { + leader.buffer.update(cx, |multibuffer, cx| { + let excerpt_ids = multibuffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: 1..6, + primary: None, + }, + ExcerptRange { + context: 12..15, + primary: None, + }, + ExcerptRange { + context: 0..3, + primary: None, + }, + ], + cx, + ); + multibuffer.insert_excerpts_after( + excerpt_ids[0], + buffer_2.clone(), + [ + ExcerptRange { + context: 8..12, + primary: None, + }, + ExcerptRange { + context: 0..6, + primary: None, + }, + ], + cx, + ); + }); + }); + + // Apply the update of adding the excerpts. + follower_1 + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, update_message.borrow().clone().unwrap(), cx) + }) + .await + .unwrap(); + assert_eq!( + follower_1.update(cx, |editor, cx| editor.text(cx)), + leader.update(cx, |editor, cx| editor.text(cx)) + ); + update_message.borrow_mut().take(); + + // Start following separately after it already has excerpts. + let mut state_message = leader.update(cx, |leader, cx| leader.to_state_proto(cx)); + let follower_2 = cx + .update_window(*workspace.deref(), |_, cx| { + Editor::from_state_proto( + pane.clone(), + workspace.root_view(cx).unwrap().clone(), + ViewId { + creator: Default::default(), + id: 0, + }, + &mut state_message, + cx, + ) + }) + .unwrap() + .unwrap() + .await + .unwrap(); + assert_eq!( + follower_2.update(cx, |editor, cx| editor.text(cx)), + leader.update(cx, |editor, cx| editor.text(cx)) + ); + + // Remove some excerpts. + _ = leader.update(cx, |leader, cx| { + leader.buffer.update(cx, |multibuffer, cx| { + let excerpt_ids = multibuffer.excerpt_ids(); + multibuffer.remove_excerpts([excerpt_ids[1], excerpt_ids[2]], cx); + multibuffer.remove_excerpts([excerpt_ids[0]], cx); + }); + }); + + // Apply the update of removing the excerpts. + follower_1 + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, update_message.borrow().clone().unwrap(), cx) + }) + .await + .unwrap(); + follower_2 + .update(cx, |follower, cx| { + follower.apply_update_proto(&project, update_message.borrow().clone().unwrap(), cx) + }) + .await + .unwrap(); + update_message.borrow_mut().take(); + assert_eq!( + follower_1.update(cx, |editor, cx| editor.text(cx)), + leader.update(cx, |editor, cx| editor.text(cx)) + ); +} + +#[gpui::test] +async fn go_to_prev_overlapping_diagnostic( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let project = cx.update_editor(|editor, _| editor.project.clone().unwrap()); + + cx.set_state(indoc! {" + ˇfn func(abc def: i32) -> u32 { + } + "}); + + _ = cx.update(|cx| { + _ = project.update(cx, |project, cx| { + project + .update_diagnostics( + LanguageServerId(0), + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/root/file").unwrap(), + version: None, + diagnostics: vec![ + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 11), + lsp::Position::new(0, 12), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 12), + lsp::Position::new(0, 15), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 25), + lsp::Position::new(0, 28), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + ], + }, + &[], + cx, + ) + .unwrap() + }); + }); + + executor.run_until_parked(); + + cx.update_editor(|editor, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx); + }); + + cx.assert_editor_state(indoc! {" + fn func(abc def: i32) -> ˇu32 { + } + "}); + + cx.update_editor(|editor, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx); + }); + + cx.assert_editor_state(indoc! {" + fn func(abc ˇdef: i32) -> u32 { + } + "}); + + cx.update_editor(|editor, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx); + }); + + cx.assert_editor_state(indoc! {" + fn func(abcˇ def: i32) -> u32 { + } + "}); + + cx.update_editor(|editor, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx); + }); + + cx.assert_editor_state(indoc! {" + fn func(abc def: i32) -> ˇu32 { + } + "}); +} + +#[gpui::test] +async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod; + + const A: u32 = 42; + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(); + + // Edits are modified, removed, modified, added + cx.set_state( + &r#" + use some::modified; + + ˇ + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + + cx.update_editor(|editor, cx| { + //Wrap around the bottom of the buffer + for _ in 0..3 { + editor.go_to_hunk(&GoToHunk, cx); + } + }); + + cx.assert_editor_state( + &r#" + ˇuse some::modified; + + + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + //Wrap around the top of the buffer + for _ in 0..2 { + editor.go_to_prev_hunk(&GoToPrevHunk, cx); + } + }); + + cx.assert_editor_state( + &r#" + use some::modified; + + + fn main() { + ˇ println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + editor.go_to_prev_hunk(&GoToPrevHunk, cx); + }); + + cx.assert_editor_state( + &r#" + use some::modified; + + ˇ + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + for _ in 0..3 { + editor.go_to_prev_hunk(&GoToPrevHunk, cx); + } + }); + + cx.assert_editor_state( + &r#" + use some::modified; + + + fn main() { + ˇ println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + + cx.update_editor(|editor, cx| { + editor.fold(&Fold, cx); + + //Make sure that the fold only gets one hunk + for _ in 0..4 { + editor.go_to_hunk(&GoToHunk, cx); + } + }); + + cx.assert_editor_state( + &r#" + ˇuse some::modified; + + + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); +} + +#[test] +fn test_split_words() { + fn split(text: &str) -> Vec<&str> { + split_words(text).collect() + } + + assert_eq!(split("HelloWorld"), &["Hello", "World"]); + assert_eq!(split("hello_world"), &["hello_", "world"]); + assert_eq!(split("_hello_world_"), &["_", "hello_", "world_"]); + assert_eq!(split("Hello_World"), &["Hello_", "World"]); + assert_eq!(split("helloWOrld"), &["hello", "WOrld"]); + assert_eq!(split("helloworld"), &["helloworld"]); + + assert_eq!(split(":do_the_thing"), &[":", "do_", "the_", "thing"]); +} + +#[gpui::test] +async fn test_move_to_enclosing_bracket(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_typescript(Default::default(), cx).await; + let mut assert = |before, after| { + let _state_context = cx.set_state(before); + cx.update_editor(|editor, cx| { + editor.move_to_enclosing_bracket(&MoveToEnclosingBracket, cx) + }); + cx.assert_editor_state(after); + }; + + // Outside bracket jumps to outside of matching bracket + assert("console.logˇ(var);", "console.log(var)ˇ;"); + assert("console.log(var)ˇ;", "console.logˇ(var);"); + + // Inside bracket jumps to inside of matching bracket + assert("console.log(ˇvar);", "console.log(varˇ);"); + assert("console.log(varˇ);", "console.log(ˇvar);"); + + // When outside a bracket and inside, favor jumping to the inside bracket + assert( + "console.log('foo', [1, 2, 3]ˇ);", + "console.log(ˇ'foo', [1, 2, 3]);", + ); + assert( + "console.log(ˇ'foo', [1, 2, 3]);", + "console.log('foo', [1, 2, 3]ˇ);", + ); + + // Bias forward if two options are equally likely + assert( + "let result = curried_fun()ˇ();", + "let result = curried_fun()()ˇ;", + ); + + // If directly adjacent to a smaller pair but inside a larger (not adjacent), pick the smaller + assert( + indoc! {" + function test() { + console.log('test')ˇ + }"}, + indoc! {" + function test() { + console.logˇ('test') + }"}, + ); +} + +#[gpui::test] +async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/a", + json!({ + "main.rs": "fn main() { let a = 5; }", + "other.rs": "// Test file", + }), + ) + .await; + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }], + disabled_scopes_by_bracket_ix: Vec::new(), + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_on_type_formatting_provider: Some(lsp::DocumentOnTypeFormattingOptions { + first_trigger_character: "{".to_string(), + more_trigger_character: None, + }), + ..Default::default() + }, + ..Default::default() + }, + ); + + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let worktree_id = workspace + .update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.worktrees().next().unwrap().read(cx).id() + }) + }) + .unwrap(); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + let editor_handle = workspace + .update(cx, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs"), None, true, cx) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); + + fake_server.handle_request::(|params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 21), + ); + + Ok(Some(vec![lsp::TextEdit { + new_text: "]".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 22), lsp::Position::new(0, 22)), + }])) + }); + + editor_handle.update(cx, |editor, cx| { + editor.focus(cx); + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(0, 21)..Point::new(0, 20)]) + }); + editor.handle_input("{", cx); + }); + + cx.executor().run_until_parked(); + + _ = buffer.update(cx, |buffer, _| { + assert_eq!( + buffer.text(), + "fn main() { let a = {5}; }", + "No extra braces from on type formatting should appear in the buffer" + ) + }); +} + +#[gpui::test] +async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/a", + json!({ + "main.rs": "fn main() { let a = 5; }", + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let server_restarts = Arc::new(AtomicUsize::new(0)); + let closure_restarts = Arc::clone(&server_restarts); + let language_server_name = "test language server"; + let language_name: Arc = "Rust".into(); + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: Arc::clone(&language_name), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: language_server_name, + initialization_options: Some(json!({ + "testOptionValue": true + })), + initializer: Some(Box::new(move |fake_server| { + let task_restarts = Arc::clone(&closure_restarts); + fake_server.handle_request::(move |_, _| { + task_restarts.fetch_add(1, atomic::Ordering::Release); + futures::future::ready(Ok(())) + }); + })), + ..Default::default() + }, + ); + + let _window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + let _fake_server = fake_servers.next().await.unwrap(); + update_test_language_settings(cx, |language_settings| { + language_settings.languages.insert( + Arc::clone(&language_name), + LanguageSettingsContent { + tab_size: NonZeroU32::new(8), + ..Default::default() + }, + ); + }); + cx.executor().run_until_parked(); + assert_eq!( + server_restarts.load(atomic::Ordering::Acquire), + 0, + "Should not restart LSP server on an unrelated change" + ); + + update_test_project_settings(cx, |project_settings| { + project_settings.lsp.insert( + "Some other server name".into(), + LspSettings { + binary: None, + settings: None, + initialization_options: Some(json!({ + "some other init value": false + })), + }, + ); + }); + cx.executor().run_until_parked(); + assert_eq!( + server_restarts.load(atomic::Ordering::Acquire), + 0, + "Should not restart LSP server on an unrelated LSP settings change" + ); + + update_test_project_settings(cx, |project_settings| { + project_settings.lsp.insert( + language_server_name.into(), + LspSettings { + binary: None, + settings: None, + initialization_options: Some(json!({ + "anotherInitValue": false + })), + }, + ); + }); + cx.executor().run_until_parked(); + assert_eq!( + server_restarts.load(atomic::Ordering::Acquire), + 1, + "Should restart LSP server on a related LSP settings change" + ); + + update_test_project_settings(cx, |project_settings| { + project_settings.lsp.insert( + language_server_name.into(), + LspSettings { + binary: None, + settings: None, + initialization_options: Some(json!({ + "anotherInitValue": false + })), + }, + ); + }); + cx.executor().run_until_parked(); + assert_eq!( + server_restarts.load(atomic::Ordering::Acquire), + 1, + "Should not restart LSP server on a related LSP settings change that is the same" + ); + + update_test_project_settings(cx, |project_settings| { + project_settings.lsp.insert( + language_server_name.into(), + LspSettings { + binary: None, + settings: None, + initialization_options: None, + }, + ); + }); + cx.executor().run_until_parked(); + assert_eq!( + server_restarts.load(atomic::Ordering::Acquire), + 2, + "Should restart LSP server on another related LSP settings change" + ); +} + +#[gpui::test] +async fn test_completions_with_additional_edits(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + resolve_provider: Some(true), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {"fn main() { let a = 2ˇ; }"}); + cx.simulate_keystroke("."); + let completion_item = lsp::CompletionItem { + label: "some".into(), + kind: Some(lsp::CompletionItemKind::SNIPPET), + detail: Some("Wrap the expression in an `Option::Some`".to_string()), + documentation: Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "```rust\nSome(2)\n```".to_string(), + })), + deprecated: Some(false), + sort_text: Some("fffffff2".to_string()), + filter_text: Some("some".to_string()), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 22, + }, + end: lsp::Position { + line: 0, + character: 22, + }, + }, + new_text: "Some(2)".to_string(), + })), + additional_text_edits: Some(vec![lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 20, + }, + end: lsp::Position { + line: 0, + character: 22, + }, + }, + new_text: "".to_string(), + }]), + ..Default::default() + }; + + let closure_completion_item = completion_item.clone(); + let mut request = cx.handle_request::(move |_, _, _| { + let task_completion_item = closure_completion_item.clone(); + async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + task_completion_item, + ]))) + } + }); + + request.next().await; + + cx.condition(|editor, _| editor.context_menu_visible()) + .await; + let apply_additional_edits = cx.update_editor(|editor, cx| { + editor + .confirm_completion(&ConfirmCompletion::default(), cx) + .unwrap() + }); + cx.assert_editor_state(indoc! {"fn main() { let a = 2.Some(2)ˇ; }"}); + + cx.handle_request::(move |_, _, _| { + let task_completion_item = completion_item.clone(); + async move { Ok(task_completion_item) } + }) + .next() + .await + .unwrap(); + apply_additional_edits.await.unwrap(); + cx.assert_editor_state(indoc! {"fn main() { let a = Some(2)ˇ; }"}); +} + +#[gpui::test] +async fn test_completions_in_languages_with_extra_word_characters(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new( + Language::new( + LanguageConfig { + matcher: LanguageMatcher { + path_suffixes: vec!["jsx".into()], + ..Default::default() + }, + overrides: [( + "element".into(), + LanguageConfigOverride { + word_characters: Override::Set(['-'].into_iter().collect()), + ..Default::default() + }, + )] + .into_iter() + .collect(), + ..Default::default() + }, + Some(tree_sitter_typescript::language_tsx()), + ) + .with_override_query("(jsx_self_closing_element) @element") + .unwrap(), + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![":".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + + cx.lsp + .handle_request::(move |_, _| async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "bg-blue".into(), + ..Default::default() + }, + lsp::CompletionItem { + label: "bg-red".into(), + ..Default::default() + }, + lsp::CompletionItem { + label: "bg-yellow".into(), + ..Default::default() + }, + ]))) + }); + + cx.set_state(r#"

"#); + + // Trigger completion when typing a dash, because the dash is an extra + // word character in the 'element' scope, which contains the cursor. + cx.simulate_keystroke("-"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _| { + if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() { + assert_eq!( + menu.matches.iter().map(|m| &m.string).collect::>(), + &["bg-red", "bg-blue", "bg-yellow"] + ); + } else { + panic!("expected completion menu to be open"); + } + }); + + cx.simulate_keystroke("l"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _| { + if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() { + assert_eq!( + menu.matches.iter().map(|m| &m.string).collect::>(), + &["bg-blue", "bg-yellow"] + ); + } else { + panic!("expected completion menu to be open"); + } + }); + + // When filtering completions, consider the character after the '-' to + // be the start of a subword. + cx.set_state(r#"

"#); + cx.simulate_keystroke("l"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _| { + if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() { + assert_eq!( + menu.matches.iter().map(|m| &m.string).collect::>(), + &["bg-yellow"] + ); + } else { + panic!("expected completion menu to be open"); + } + }); +} + +#[gpui::test] +async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.formatter = Some(language_settings::Formatter::Prettier) + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_file("/file.ts", Default::default()).await; + + let project = Project::test(fs, ["/file.ts".as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + update_test_language_settings(cx, |settings| { + settings.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + + let test_plugin = "test_plugin"; + let _ = language_registry.register_fake_lsp_adapter( + "TypeScript", + FakeLspAdapter { + prettier_plugins: vec![test_plugin], + ..Default::default() + }, + ); + + let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; + let buffer = project + .update(cx, |project, cx| project.open_local_buffer("/file.ts", cx)) + .await + .unwrap(); + + let buffer_text = "one\ntwo\nthree\n"; + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx)); + _ = editor.update(cx, |editor, cx| editor.set_text(buffer_text, cx)); + + editor + .update(cx, |editor, cx| { + editor.perform_format(project.clone(), FormatTrigger::Manual, cx) + }) + .unwrap() + .await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + buffer_text.to_string() + prettier_format_suffix, + "Test prettier formatting was not applied to the original buffer text", + ); + + update_test_language_settings(cx, |settings| { + settings.defaults.formatter = Some(language_settings::Formatter::Auto) + }); + let format = editor.update(cx, |editor, cx| { + editor.perform_format(project.clone(), FormatTrigger::Manual, cx) + }); + format.await.unwrap(); + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + buffer_text.to_string() + prettier_format_suffix + "\n" + prettier_format_suffix, + "Autoformatting (via test prettier) was not applied to the original buffer text", + ); +} + +#[gpui::test] +async fn test_addition_reverts(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + let base_text = indoc! {r#"struct Row; +struct Row1; +struct Row2; + +struct Row4; +struct Row5; +struct Row6; + +struct Row8; +struct Row9; +struct Row10;"#}; + + // When addition hunks are not adjacent to carets, no hunk revert is performed + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row1; + struct Row1.1; + struct Row1.2; + struct Row2;ˇ + + struct Row4; + struct Row5; + struct Row6; + + struct Row8; + ˇstruct Row9; + struct Row9.1; + struct Row9.2; + struct Row9.3; + struct Row10;"#}, + vec![DiffHunkStatus::Added, DiffHunkStatus::Added], + indoc! {r#"struct Row; + struct Row1; + struct Row1.1; + struct Row1.2; + struct Row2;ˇ + + struct Row4; + struct Row5; + struct Row6; + + struct Row8; + ˇstruct Row9; + struct Row9.1; + struct Row9.2; + struct Row9.3; + struct Row10;"#}, + base_text, + &mut cx, + ); + // Same for selections + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row1; + struct Row2; + struct Row2.1; + struct Row2.2; + «ˇ + struct Row4; + struct» Row5; + «struct Row6; + ˇ» + struct Row9.1; + struct Row9.2; + struct Row9.3; + struct Row8; + struct Row9; + struct Row10;"#}, + vec![DiffHunkStatus::Added, DiffHunkStatus::Added], + indoc! {r#"struct Row; + struct Row1; + struct Row2; + struct Row2.1; + struct Row2.2; + «ˇ + struct Row4; + struct» Row5; + «struct Row6; + ˇ» + struct Row9.1; + struct Row9.2; + struct Row9.3; + struct Row8; + struct Row9; + struct Row10;"#}, + base_text, + &mut cx, + ); + + // When carets and selections intersect the addition hunks, those are reverted. + // Adjacent carets got merged. + assert_hunk_revert( + indoc! {r#"struct Row; + ˇ// something on the top + struct Row1; + struct Row2; + struct Roˇw3.1; + struct Row2.2; + struct Row2.3;ˇ + + struct Row4; + struct ˇRow5.1; + struct Row5.2; + struct «Rowˇ»5.3; + struct Row5; + struct Row6; + ˇ + struct Row9.1; + struct «Rowˇ»9.2; + struct «ˇRow»9.3; + struct Row8; + struct Row9; + «ˇ// something on bottom» + struct Row10;"#}, + vec![ + DiffHunkStatus::Added, + DiffHunkStatus::Added, + DiffHunkStatus::Added, + DiffHunkStatus::Added, + DiffHunkStatus::Added, + ], + indoc! {r#"struct Row; + ˇstruct Row1; + struct Row2; + ˇ + struct Row4; + ˇstruct Row5; + struct Row6; + ˇ + ˇstruct Row8; + struct Row9; + ˇstruct Row10;"#}, + base_text, + &mut cx, + ); +} + +#[gpui::test] +async fn test_modification_reverts(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + let base_text = indoc! {r#"struct Row; +struct Row1; +struct Row2; + +struct Row4; +struct Row5; +struct Row6; + +struct Row8; +struct Row9; +struct Row10;"#}; + + // Modification hunks behave the same as the addition ones. + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row1; + struct Row33; + ˇ + struct Row4; + struct Row5; + struct Row6; + ˇ + struct Row99; + struct Row9; + struct Row10;"#}, + vec![DiffHunkStatus::Modified, DiffHunkStatus::Modified], + indoc! {r#"struct Row; + struct Row1; + struct Row33; + ˇ + struct Row4; + struct Row5; + struct Row6; + ˇ + struct Row99; + struct Row9; + struct Row10;"#}, + base_text, + &mut cx, + ); + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row1; + struct Row33; + «ˇ + struct Row4; + struct» Row5; + «struct Row6; + ˇ» + struct Row99; + struct Row9; + struct Row10;"#}, + vec![DiffHunkStatus::Modified, DiffHunkStatus::Modified], + indoc! {r#"struct Row; + struct Row1; + struct Row33; + «ˇ + struct Row4; + struct» Row5; + «struct Row6; + ˇ» + struct Row99; + struct Row9; + struct Row10;"#}, + base_text, + &mut cx, + ); + + assert_hunk_revert( + indoc! {r#"ˇstruct Row1.1; + struct Row1; + «ˇstr»uct Row22; + + struct ˇRow44; + struct Row5; + struct «Rˇ»ow66;ˇ + + «struˇ»ct Row88; + struct Row9; + struct Row1011;ˇ"#}, + vec![ + DiffHunkStatus::Modified, + DiffHunkStatus::Modified, + DiffHunkStatus::Modified, + DiffHunkStatus::Modified, + DiffHunkStatus::Modified, + DiffHunkStatus::Modified, + ], + indoc! {r#"struct Row; + ˇstruct Row1; + struct Row2; + ˇ + struct Row4; + ˇstruct Row5; + struct Row6; + ˇ + struct Row8; + ˇstruct Row9; + struct Row10;ˇ"#}, + base_text, + &mut cx, + ); +} + +#[gpui::test] +async fn test_deletion_reverts(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + let base_text = indoc! {r#"struct Row; +struct Row1; +struct Row2; + +struct Row4; +struct Row5; +struct Row6; + +struct Row8; +struct Row9; +struct Row10;"#}; + + // Deletion hunks trigger with carets on ajacent rows, so carets and selections have to stay farther to avoid the revert + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row2; + + ˇstruct Row4; + struct Row5; + struct Row6; + ˇ + struct Row8; + struct Row10;"#}, + vec![DiffHunkStatus::Removed, DiffHunkStatus::Removed], + indoc! {r#"struct Row; + struct Row2; + + ˇstruct Row4; + struct Row5; + struct Row6; + ˇ + struct Row8; + struct Row10;"#}, + base_text, + &mut cx, + ); + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row2; + + «ˇstruct Row4; + struct» Row5; + «struct Row6; + ˇ» + struct Row8; + struct Row10;"#}, + vec![DiffHunkStatus::Removed, DiffHunkStatus::Removed], + indoc! {r#"struct Row; + struct Row2; + + «ˇstruct Row4; + struct» Row5; + «struct Row6; + ˇ» + struct Row8; + struct Row10;"#}, + base_text, + &mut cx, + ); + + // Deletion hunks are ephemeral, so it's impossible to place the caret into them — Zed triggers reverts for lines, adjacent to carets and selections. + assert_hunk_revert( + indoc! {r#"struct Row; + ˇstruct Row2; + + struct Row4; + struct Row5; + struct Row6; + + struct Row8;ˇ + struct Row10;"#}, + vec![DiffHunkStatus::Removed, DiffHunkStatus::Removed], + indoc! {r#"struct Row; + struct Row1; + ˇstruct Row2; + + struct Row4; + struct Row5; + struct Row6; + + struct Row8;ˇ + struct Row9; + struct Row10;"#}, + base_text, + &mut cx, + ); + assert_hunk_revert( + indoc! {r#"struct Row; + struct Row2«ˇ; + struct Row4; + struct» Row5; + «struct Row6; + + struct Row8;ˇ» + struct Row10;"#}, + vec![ + DiffHunkStatus::Removed, + DiffHunkStatus::Removed, + DiffHunkStatus::Removed, + ], + indoc! {r#"struct Row; + struct Row1; + struct Row2«ˇ; + + struct Row4; + struct» Row5; + «struct Row6; + + struct Row8;ˇ» + struct Row9; + struct Row10;"#}, + base_text, + &mut cx, + ); +} + +#[gpui::test] +async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let cols = 4; + let rows = 10; + let sample_text_1 = sample_text(rows, cols, 'a'); + assert_eq!( + sample_text_1, + "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj" + ); + let sample_text_2 = sample_text(rows, cols, 'l'); + assert_eq!( + sample_text_2, + "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" + ); + let sample_text_3 = sample_text(rows, cols, 'v'); + assert_eq!( + sample_text_3, + "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" + ); + + fn diff_every_buffer_row( + buffer: &Model, + sample_text: String, + cols: usize, + cx: &mut gpui::TestAppContext, + ) { + // revert first character in each row, creating one large diff hunk per buffer + let is_first_char = |offset: usize| offset % cols == 0; + buffer.update(cx, |buffer, cx| { + buffer.set_text( + sample_text + .chars() + .enumerate() + .map(|(offset, c)| if is_first_char(offset) { 'X' } else { c }) + .collect::(), + cx, + ); + buffer.set_diff_base(Some(sample_text), cx); + }); + cx.executor().run_until_parked(); + } + + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text_1.clone(), cx)); + diff_every_buffer_row(&buffer_1, sample_text_1.clone(), cols, cx); + + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text_2.clone(), cx)); + diff_every_buffer_row(&buffer_2, sample_text_2.clone(), cols, cx); + + let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx)); + diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx); + + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_3.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + + let (editor, cx) = cx.add_window_view(|cx| build_editor(multibuffer, cx)); + editor.update(cx, |editor, cx| { + assert_eq!(editor.text(cx), "XaaaXbbbX\nccXc\ndXdd\n\nhXhh\nXiiiXjjjX\n\nXlllXmmmX\nnnXn\noXoo\n\nsXss\nXtttXuuuX\n\nXvvvXwwwX\nxxXx\nyXyy\n\n}X}}\nX~~~X\u{7f}\u{7f}\u{7f}X\n"); + editor.select_all(&SelectAll, cx); + editor.revert_selected_hunks(&RevertSelectedHunks, cx); + }); + cx.executor().run_until_parked(); + // When all ranges are selected, all buffer hunks are reverted. + editor.update(cx, |editor, cx| { + assert_eq!(editor.text(cx), "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nllll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu\n\n\nvvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}\n\n"); + }); + buffer_1.update(cx, |buffer, _| { + assert_eq!(buffer.text(), sample_text_1); + }); + buffer_2.update(cx, |buffer, _| { + assert_eq!(buffer.text(), sample_text_2); + }); + buffer_3.update(cx, |buffer, _| { + assert_eq!(buffer.text(), sample_text_3); + }); + + diff_every_buffer_row(&buffer_1, sample_text_1.clone(), cols, cx); + diff_every_buffer_row(&buffer_2, sample_text_2.clone(), cols, cx); + diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx); + editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges(Some(Point::new(0, 0)..Point::new(6, 0))); + }); + editor.revert_selected_hunks(&RevertSelectedHunks, cx); + }); + // Now, when all ranges selected belong to buffer_1, the revert should succeed, + // but not affect buffer_2 and its related excerpts. + editor.update(cx, |editor, cx| { + assert_eq!( + editor.text(cx), + "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nXlllXmmmX\nnnXn\noXoo\nXpppXqqqX\nrrXr\nsXss\nXtttXuuuX\n\n\nXvvvXwwwX\nxxXx\nyXyy\nXzzzX{{{X\n||X|\n}X}}\nX~~~X\u{7f}\u{7f}\u{7f}X\n\n" + ); + }); + buffer_1.update(cx, |buffer, _| { + assert_eq!(buffer.text(), sample_text_1); + }); + buffer_2.update(cx, |buffer, _| { + assert_eq!( + buffer.text(), + "XlllXmmmX\nnnXn\noXoo\nXpppXqqqX\nrrXr\nsXss\nXtttXuuuX" + ); + }); + buffer_3.update(cx, |buffer, _| { + assert_eq!( + buffer.text(), + "XvvvXwwwX\nxxXx\nyXyy\nXzzzX{{{X\n||X|\n}X}}\nX~~~X\u{7f}\u{7f}\u{7f}X" + ); + }); +} + +#[gpui::test] +async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let cols = 4; + let rows = 10; + let sample_text_1 = sample_text(rows, cols, 'a'); + assert_eq!( + sample_text_1, + "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj" + ); + let sample_text_2 = sample_text(rows, cols, 'l'); + assert_eq!( + sample_text_2, + "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" + ); + let sample_text_3 = sample_text(rows, cols, 'v'); + assert_eq!( + sample_text_3, + "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" + ); + + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text_1.clone(), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text_2.clone(), cx)); + let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx)); + + let multi_buffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_3.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/a", + json!({ + "main.rs": sample_text_1, + "other.rs": sample_text_2, + "lib.rs": sample_text_3, + }), + ) + .await; + let project = Project::test(fs, ["/a".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + let multi_buffer_editor = + cx.new_view(|cx| Editor::new(EditorMode::Full, multi_buffer, Some(project.clone()), cx)); + let multibuffer_item_id = workspace + .update(cx, |workspace, cx| { + assert!( + workspace.active_item(cx).is_none(), + "active item should be None before the first item is added" + ); + workspace.add_item_to_active_pane(Box::new(multi_buffer_editor.clone()), None, cx); + let active_item = workspace + .active_item(cx) + .expect("should have an active item after adding the multi buffer"); + assert!( + !active_item.is_singleton(cx), + "A multi buffer was expected to active after adding" + ); + active_item.item_id() + }) + .unwrap(); + cx.executor().run_until_parked(); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| s.select_ranges(Some(1..2))); + editor.open_excerpts(&OpenExcerpts, cx); + }); + cx.executor().run_until_parked(); + let first_item_id = workspace + .update(cx, |workspace, cx| { + let active_item = workspace + .active_item(cx) + .expect("should have an active item after navigating into the 1st buffer"); + let first_item_id = active_item.item_id(); + assert_ne!( + first_item_id, multibuffer_item_id, + "Should navigate into the 1st buffer and activate it" + ); + assert!( + active_item.is_singleton(cx), + "New active item should be a singleton buffer" + ); + assert_eq!( + active_item + .act_as::(cx) + .expect("should have navigated into an editor for the 1st buffer") + .read(cx) + .text(cx), + sample_text_1 + ); + + workspace + .go_back(workspace.active_pane().downgrade(), cx) + .detach_and_log_err(cx); + + first_item_id + }) + .unwrap(); + cx.executor().run_until_parked(); + workspace + .update(cx, |workspace, cx| { + let active_item = workspace + .active_item(cx) + .expect("should have an active item after navigating back"); + assert_eq!( + active_item.item_id(), + multibuffer_item_id, + "Should navigate back to the multi buffer" + ); + assert!(!active_item.is_singleton(cx)); + }) + .unwrap(); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges(Some(39..40)) + }); + editor.open_excerpts(&OpenExcerpts, cx); + }); + cx.executor().run_until_parked(); + let second_item_id = workspace + .update(cx, |workspace, cx| { + let active_item = workspace + .active_item(cx) + .expect("should have an active item after navigating into the 2nd buffer"); + let second_item_id = active_item.item_id(); + assert_ne!( + second_item_id, multibuffer_item_id, + "Should navigate away from the multibuffer" + ); + assert_ne!( + second_item_id, first_item_id, + "Should navigate into the 2nd buffer and activate it" + ); + assert!( + active_item.is_singleton(cx), + "New active item should be a singleton buffer" + ); + assert_eq!( + active_item + .act_as::(cx) + .expect("should have navigated into an editor") + .read(cx) + .text(cx), + sample_text_2 + ); + + workspace + .go_back(workspace.active_pane().downgrade(), cx) + .detach_and_log_err(cx); + + second_item_id + }) + .unwrap(); + cx.executor().run_until_parked(); + workspace + .update(cx, |workspace, cx| { + let active_item = workspace + .active_item(cx) + .expect("should have an active item after navigating back from the 2nd buffer"); + assert_eq!( + active_item.item_id(), + multibuffer_item_id, + "Should navigate back from the 2nd buffer to the multi buffer" + ); + assert!(!active_item.is_singleton(cx)); + }) + .unwrap(); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges(Some(60..70)) + }); + editor.open_excerpts(&OpenExcerpts, cx); + }); + cx.executor().run_until_parked(); + workspace + .update(cx, |workspace, cx| { + let active_item = workspace + .active_item(cx) + .expect("should have an active item after navigating into the 3rd buffer"); + let third_item_id = active_item.item_id(); + assert_ne!( + third_item_id, multibuffer_item_id, + "Should navigate into the 3rd buffer and activate it" + ); + assert_ne!(third_item_id, first_item_id); + assert_ne!(third_item_id, second_item_id); + assert!( + active_item.is_singleton(cx), + "New active item should be a singleton buffer" + ); + assert_eq!( + active_item + .act_as::(cx) + .expect("should have navigated into an editor") + .read(cx) + .text(cx), + sample_text_3 + ); + + workspace + .go_back(workspace.active_pane().downgrade(), cx) + .detach_and_log_err(cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + workspace + .update(cx, |workspace, cx| { + let active_item = workspace + .active_item(cx) + .expect("should have an active item after navigating back from the 3rd buffer"); + assert_eq!( + active_item.item_id(), + multibuffer_item_id, + "Should navigate back from the 3rd buffer to the multi buffer" + ); + assert!(!active_item.is_singleton(cx)); + }) + .unwrap(); +} + +#[gpui::test] +async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod; + + const A: u32 = 42; + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(); + + cx.set_state( + &r#" + use some::modified; + + ˇ + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + let unexpanded_hunks = vec![ + ( + "use some::mod;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(0)..DisplayRow(1), + ), + ( + "const A: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(2)..DisplayRow(2), + ), + ( + " println!(\"hello\");\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(4)..DisplayRow(5), + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(6)..DisplayRow(7), + ), + ]; + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!(all_hunks, unexpanded_hunks); + }); + + cx.update_editor(|editor, cx| { + for _ in 0..4 { + editor.go_to_hunk(&GoToHunk, cx); + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); + } + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::modified; + + ˇ + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(1)..=DisplayRow(1), DisplayRow(7)..=DisplayRow(7), DisplayRow(9)..=DisplayRow(9)], + "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" + ); + assert_eq!( + all_hunks, + vec![ + ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(1)..DisplayRow(2)), + ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(4)..DisplayRow(4)), + (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(7)..DisplayRow(8)), + ("".to_string(), DiffHunkStatus::Added, DisplayRow(9)..DisplayRow(10)), + ], + "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ + (from modified and removed hunks)" + ); + assert_eq!( + all_hunks, all_expanded_hunks, + "Editor hunks should not change and all be expanded" + ); + }); + + cx.update_editor(|editor, cx| { + editor.cancel(&Cancel, cx); + + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + Vec::new(), + "After cancelling in editor, no git highlights should be left" + ); + assert_eq!( + all_expanded_hunks, + Vec::new(), + "After cancelling in editor, no hunks should be expanded" + ); + assert_eq!( + all_hunks, unexpanded_hunks, + "After cancelling in editor, regular hunks' coordinates should get back to normal" + ); + }); +} + +#[gpui::test] +async fn test_toggled_diff_base_change( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + + fn main(ˇ) { + println!("hello"); + + println!("world"); + } + "# + .unindent(); + + cx.set_state( + &r#" + use some::mod2; + + const A: u32 = 42; + const C: u32 = 42; + + fn main(ˇ) { + //println!("hello"); + + println!("world"); + // + // + } + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![ + ( + "use some::mod1;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(0)..DisplayRow(0) + ), + ( + "const B: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(3)..DisplayRow(3) + ), + ( + "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(5)..DisplayRow(7) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(9)..DisplayRow(11) + ), + ] + ); + }); + + cx.update_editor(|editor, cx| { + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod2; + + const A: u32 = 42; + const C: u32 = 42; + + fn main(ˇ) { + //println!("hello"); + + println!("world"); + // + // + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(9)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(14)], + "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" + ); + assert_eq!( + all_hunks, + vec![ + ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(1)..DisplayRow(1)), + ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(5)..DisplayRow(5)), + ("fn main(ˇ) {\n println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(9)..DisplayRow(11)), + ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(15)), + ], + "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ + (from modified and removed hunks)" + ); + assert_eq!( + all_hunks, all_expanded_hunks, + "Editor hunks should not change and all be expanded" + ); + }); + + cx.set_diff_base(Some("new diff base!")); + executor.run_until_parked(); + + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + Vec::new(), + "After diff base is changed, old git highlights should be removed" + ); + assert_eq!( + all_expanded_hunks, + Vec::new(), + "After diff base is changed, old git hunk expansions should be removed" + ); + assert_eq!( + all_hunks, + vec![( + "new diff base!".to_string(), + DiffHunkStatus::Modified, + DisplayRow(0)..snapshot.display_snapshot.max_point().row() + )], + "After diff base is changed, hunks should update" + ); + }); +} + +#[gpui::test] +async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + + fn main(ˇ) { + println!("hello"); + + println!("world"); + } + + fn another() { + println!("another"); + } + + fn another2() { + println!("another2"); + } + "# + .unindent(); + + cx.set_state( + &r#" + «use some::mod2; + + const A: u32 = 42; + const C: u32 = 42; + + fn main() { + //println!("hello"); + + println!("world"); + // + //ˇ» + } + + fn another() { + println!("another"); + println!("another"); + } + + println!("another2"); + } + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![ + ( + "use some::mod1;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(0)..DisplayRow(0) + ), + ( + "const B: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(3)..DisplayRow(3) + ), + ( + "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(5)..DisplayRow(7) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(9)..DisplayRow(11) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(15)..DisplayRow(16) + ), + ( + "fn another2() {\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(18)..DisplayRow(18) + ), + ] + ); + }); + + cx.update_editor(|editor, cx| { + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + «use some::mod2; + + const A: u32 = 42; + const C: u32 = 42; + + fn main() { + //println!("hello"); + + println!("world"); + // + //ˇ» + } + + fn another() { + println!("another"); + println!("another"); + } + + println!("another2"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(9)..=DisplayRow(10), + DisplayRow(13)..=DisplayRow(14), + DisplayRow(19)..=DisplayRow(19) + ] + ); + assert_eq!( + all_hunks, + vec![ + ( + "use some::mod1;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(1)..DisplayRow(1) + ), + ( + "const B: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(5)..DisplayRow(5) + ), + ( + "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(9)..DisplayRow(11) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(13)..DisplayRow(15) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(19)..DisplayRow(20) + ), + ( + "fn another2() {\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(23)..DisplayRow(23) + ), + ], + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx)); + cx.executor().run_until_parked(); + cx.assert_editor_state( + &r#" + «use some::mod2; + + const A: u32 = 42; + const C: u32 = 42; + + fn main() { + //println!("hello"); + + println!("world"); + // + //ˇ» + } + + fn another() { + println!("another"); + println!("another"); + } + + println!("another2"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(5)..=DisplayRow(5)], + "Only one hunk is left not folded, its highlight should be visible" + ); + assert_eq!( + all_hunks, + vec![ + ( + "use some::mod1;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(0)..DisplayRow(0) + ), + ( + "const B: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(0)..DisplayRow(0) + ), + ( + "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(0)..DisplayRow(0) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(0)..DisplayRow(1) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(5)..DisplayRow(6) + ), + ( + "fn another2() {\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(9)..DisplayRow(9) + ), + ], + "Hunk list should still return shifted folded hunks" + ); + assert_eq!( + all_expanded_hunks, + vec![ + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(5)..DisplayRow(6) + ), + ( + "fn another2() {\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(9)..DisplayRow(9) + ), + ], + "Only non-folded hunks should be left expanded" + ); + }); + + cx.update_editor(|editor, cx| { + editor.select_all(&SelectAll, cx); + editor.unfold_lines(&UnfoldLines, cx); + }); + cx.executor().run_until_parked(); + cx.assert_editor_state( + &r#" + «use some::mod2; + + const A: u32 = 42; + const C: u32 = 42; + + fn main() { + //println!("hello"); + + println!("world"); + // + // + } + + fn another() { + println!("another"); + println!("another"); + } + + println!("another2"); + } + ˇ»"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(9)..=DisplayRow(10), + DisplayRow(13)..=DisplayRow(14), + DisplayRow(19)..=DisplayRow(19) + ], + "After unfolding, all hunk diffs should be visible again" + ); + assert_eq!( + all_hunks, + vec![ + ( + "use some::mod1;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(1)..DisplayRow(1) + ), + ( + "const B: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(5)..DisplayRow(5) + ), + ( + "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(9)..DisplayRow(11) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(13)..DisplayRow(15) + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(19)..DisplayRow(20) + ), + ( + "fn another2() {\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(23)..DisplayRow(23) + ), + ], + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); +} + +#[gpui::test] +async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let cols = 4; + let rows = 10; + let sample_text_1 = sample_text(rows, cols, 'a'); + assert_eq!( + sample_text_1, + "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj" + ); + let modified_sample_text_1 = "aaaa\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj"; + let sample_text_2 = sample_text(rows, cols, 'l'); + assert_eq!( + sample_text_2, + "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" + ); + let modified_sample_text_2 = "llll\nmmmm\n1n1n1n1n1\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu"; + let sample_text_3 = sample_text(rows, cols, 'v'); + assert_eq!( + sample_text_3, + "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" + ); + let modified_sample_text_3 = + "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n@@@@\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}"; + let buffer_1 = cx.new_model(|cx| { + let mut buffer = Buffer::local(modified_sample_text_1.to_string(), cx); + buffer.set_diff_base(Some(sample_text_1.clone()), cx); + buffer + }); + let buffer_2 = cx.new_model(|cx| { + let mut buffer = Buffer::local(modified_sample_text_2.to_string(), cx); + buffer.set_diff_base(Some(sample_text_2.clone()), cx); + buffer + }); + let buffer_3 = cx.new_model(|cx| { + let mut buffer = Buffer::local(modified_sample_text_3.to_string(), cx); + buffer.set_diff_base(Some(sample_text_3.clone()), cx); + buffer + }); + + let multi_buffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_3.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(3, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(5, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 4), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/a", + json!({ + "main.rs": modified_sample_text_1, + "other.rs": modified_sample_text_2, + "lib.rs": modified_sample_text_3, + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + let multi_buffer_editor = + cx.new_view(|cx| Editor::new(EditorMode::Full, multi_buffer, Some(project.clone()), cx)); + cx.executor().run_until_parked(); + + let expected_all_hunks = vec![ + ( + "bbbb\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(3)..DisplayRow(3), + ), + ( + "nnnn\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(16)..DisplayRow(17), + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(31)..DisplayRow(32), + ), + ]; + let expected_all_hunks_shifted = vec![ + ( + "bbbb\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(4)..DisplayRow(4), + ), + ( + "nnnn\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(18)..DisplayRow(19), + ), + ( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(33)..DisplayRow(34), + ), + ]; + + multi_buffer_editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!(all_hunks, expected_all_hunks); + assert_eq!(all_expanded_hunks, Vec::new()); + }); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.select_all(&SelectAll, cx); + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); + }); + cx.executor().run_until_parked(); + multi_buffer_editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(18)..=DisplayRow(18), + DisplayRow(33)..=DisplayRow(33) + ], + ); + assert_eq!(all_hunks, expected_all_hunks_shifted); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); + }); + cx.executor().run_until_parked(); + multi_buffer_editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!(all_hunks, expected_all_hunks); + assert_eq!(all_expanded_hunks, Vec::new()); + }); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); + }); + cx.executor().run_until_parked(); + multi_buffer_editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(18)..=DisplayRow(18), + DisplayRow(33)..=DisplayRow(33) + ], + ); + assert_eq!(all_hunks, expected_all_hunks_shifted); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + multi_buffer_editor.update(cx, |editor, cx| { + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); + }); + cx.executor().run_until_parked(); + multi_buffer_editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!(all_hunks, expected_all_hunks); + assert_eq!(all_expanded_hunks, Vec::new()); + }); +} + +#[gpui::test] +async fn test_edits_around_toggled_additions( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(); + executor.run_until_parked(); + cx.set_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(4)..DisplayRow(7) + )] + ); + }); + cx.update_editor(|editor, cx| { + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(4)..DisplayRow(7) + )] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(4)..=DisplayRow(6)] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| editor.handle_input("const D: u32 = 42;\n", cx)); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(4)..DisplayRow(8) + )] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(4)..=DisplayRow(6)], + "Edited hunk should have one more line added" + ); + assert_eq!( + all_hunks, all_expanded_hunks, + "Expanded hunk should also grow with the addition" + ); + }); + + cx.update_editor(|editor, cx| editor.handle_input("const E: u32 = 42;\n", cx)); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; + const E: u32 = 42; + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(4)..DisplayRow(9) + )] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(4)..=DisplayRow(6)], + "Edited hunk should have one more line added" + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(4)..DisplayRow(8) + )] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(4)..=DisplayRow(6)], + "Deleting a line should shrint the hunk" + ); + assert_eq!( + all_hunks, all_expanded_hunks, + "Expanded hunk should also shrink with the addition" + ); + }); + + cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "".to_string(), + DiffHunkStatus::Added, + DisplayRow(5)..DisplayRow(6) + )] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(5)..=DisplayRow(5)] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.select_up_by_lines(&SelectUpByLines { lines: 5 }, cx); + editor.delete_line(&DeleteLine, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![ + ( + "use some::mod1;\nuse some::mod2;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(0)..DisplayRow(0) + ), + ( + "const A: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(2)..DisplayRow(2) + ) + ] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + Vec::new(), + "Should close all stale expanded addition hunks" + ); + assert_eq!( + all_expanded_hunks, + vec![( + "const A: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(2)..DisplayRow(2) + )], + "Should open hunks that were adjacent to the stale addition one" + ); + }); +} + +#[gpui::test] +async fn test_edits_around_toggled_deletions( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(); + executor.run_until_parked(); + cx.set_state( + &r#" + use some::mod1; + use some::mod2; + + ˇconst B: u32 = 42; + const C: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(3)..DisplayRow(3) + )] + ); + }); + cx.update_editor(|editor, cx| { + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + ˇconst B: u32 = 42; + const C: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(4)..DisplayRow(4) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.delete_line(&DeleteLine, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + ˇconst C: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + Vec::new(), + "Deleted hunks do not highlight current editor's background" + ); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\nconst B: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(5)..DisplayRow(5) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.delete_line(&DeleteLine, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + ˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), + DiffHunkStatus::Removed, + DisplayRow(6)..DisplayRow(6) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.handle_input("replacement", cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + replacementˇ + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(7)..DisplayRow(8) + )] + ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(7)..=DisplayRow(7)], + "Modified expanded hunks should display additions and highlight their background" + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); +} + +#[gpui::test] +async fn test_edits_around_toggled_modifications( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(); + executor.run_until_parked(); + cx.set_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 43ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "const C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(5)..DisplayRow(6) + )] + ); + }); + cx.update_editor(|editor, cx| { + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 43ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(6)..=DisplayRow(6)], + ); + assert_eq!( + all_hunks, + vec![( + "const C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(7) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.handle_input("\nnew_line\n", cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 43 + new_line + ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(6)..=DisplayRow(6)], + "Modified hunk should grow highlighted lines on more text additions" + ); + assert_eq!( + all_hunks, + vec![( + "const C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(9) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.move_up(&MoveUp, cx); + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + ˇconst C: u32 = 43 + new_line + + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(6)..=DisplayRow(8)], + ); + assert_eq!( + all_hunks, + vec![( + "const B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(9) + )], + "Modified hunk should grow deleted lines on text deletions above" + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.handle_input("v", cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + vˇconst A: u32 = 42; + const C: u32 = 43 + new_line + + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(6)..=DisplayRow(9)], + "Modified hunk should grow deleted lines on text modifications above" + ); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(10) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.move_down(&MoveDown, cx); + editor.move_down(&MoveDown, cx); + editor.delete_line(&DeleteLine, cx) + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + vconst A: u32 = 42; + const C: u32 = 43 + ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(6)..=DisplayRow(8)], + "Modified hunk should grow shrink lines on modification lines removal" + ); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(9) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.move_up(&MoveUp, cx); + editor.select_down_by_lines(&SelectDownByLines { lines: 4 }, cx); + editor.delete_line(&DeleteLine, cx) + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + ˇ + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + Vec::new(), + "Modified hunk should turn into a removed one on all modified lines removal" + ); + assert_eq!( + all_hunks, + vec![( + "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\nconst D: u32 = 42;\n" + .to_string(), + DiffHunkStatus::Removed, + DisplayRow(7)..DisplayRow(7) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); +} + +#[gpui::test] +async fn test_multiple_expanded_hunks_merge( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(); + executor.run_until_parked(); + cx.set_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 43ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + + cx.set_diff_base(Some(&diff_base)); + executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + assert_eq!( + all_hunks, + vec![( + "const C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(5)..DisplayRow(6) + )] + ); + }); + cx.update_editor(|editor, cx| { + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 43ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let all_hunks = editor_hunks(editor, &snapshot, cx); + let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(6)..=DisplayRow(6)], + ); + assert_eq!( + all_hunks, + vec![( + "const C: u32 = 42;\n".to_string(), + DiffHunkStatus::Modified, + DisplayRow(6)..DisplayRow(7) + )] + ); + assert_eq!(all_hunks, all_expanded_hunks); + }); + + cx.update_editor(|editor, cx| { + editor.handle_input("\nnew_line\n", cx); + }); + executor.run_until_parked(); + cx.assert_editor_state( + &r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + const C: u32 = 43 + new_line + ˇ + const D: u32 = 42; + + + fn main() { + println!("hello"); + + println!("world"); + }"# + .unindent(), + ); +} + +fn empty_range(row: usize, column: usize) -> Range { + let point = DisplayPoint::new(DisplayRow(row as u32), column as u32); + point..point +} + +fn assert_selection_ranges(marked_text: &str, view: &mut Editor, cx: &mut ViewContext) { + let (text, ranges) = marked_text_ranges(marked_text, true); + assert_eq!(view.text(cx), text); + assert_eq!( + view.selections.ranges(cx), + ranges, + "Assert selections are {}", + marked_text + ); +} + +/// Handle completion request passing a marked string specifying where the completion +/// should be triggered from using '|' character, what range should be replaced, and what completions +/// should be returned using '<' and '>' to delimit the range +pub fn handle_completion_request( + cx: &mut EditorLspTestContext, + marked_string: &str, + completions: Vec<&'static str>, + counter: Arc, +) -> impl Future { + let complete_from_marker: TextRangeMarker = '|'.into(); + let replace_range_marker: TextRangeMarker = ('<', '>').into(); + let (_, mut marked_ranges) = marked_text_ranges_by( + marked_string, + vec![complete_from_marker.clone(), replace_range_marker.clone()], + ); + + let complete_from_position = + cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start); + let replace_range = + cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); + + let mut request = cx.handle_request::(move |url, params, _| { + let completions = completions.clone(); + counter.fetch_add(1, atomic::Ordering::Release); + async move { + assert_eq!(params.text_document_position.text_document.uri, url.clone()); + assert_eq!( + params.text_document_position.position, + complete_from_position + ); + Ok(Some(lsp::CompletionResponse::Array( + completions + .iter() + .map(|completion_text| lsp::CompletionItem { + label: completion_text.to_string(), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: replace_range, + new_text: completion_text.to_string(), + })), + ..Default::default() + }) + .collect(), + ))) + } + }); + + async move { + request.next().await; + } +} + +fn handle_resolve_completion_request( + cx: &mut EditorLspTestContext, + edits: Option>, +) -> impl Future { + let edits = edits.map(|edits| { + edits + .iter() + .map(|(marked_string, new_text)| { + let (_, marked_ranges) = marked_text_ranges(marked_string, false); + let replace_range = cx.to_lsp_range(marked_ranges[0].clone()); + lsp::TextEdit::new(replace_range, new_text.to_string()) + }) + .collect::>() + }); + + let mut request = + cx.handle_request::(move |_, _, _| { + let edits = edits.clone(); + async move { + Ok(lsp::CompletionItem { + additional_text_edits: edits, + ..Default::default() + }) + } + }); + + async move { + request.next().await; + } +} + +pub(crate) fn update_test_language_settings( + cx: &mut TestAppContext, + f: impl Fn(&mut AllLanguageSettingsContent), +) { + _ = cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, f); + }); + }); +} + +pub(crate) fn update_test_project_settings( + cx: &mut TestAppContext, + f: impl Fn(&mut ProjectSettings), +) { + _ = cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, f); + }); + }); +} + +pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) { + _ = cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + theme::init(theme::LoadThemes::JustBase, cx); + release_channel::init("0.0.0", cx); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + workspace::init_settings(cx); + crate::init(cx); + }); + + update_test_language_settings(cx, f); +} + +pub(crate) fn rust_lang() -> Arc { + Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + )) +} + +#[track_caller] +fn assert_hunk_revert( + not_reverted_text_with_selections: &str, + expected_not_reverted_hunk_statuses: Vec, + expected_reverted_text_with_selections: &str, + base_text: &str, + cx: &mut EditorLspTestContext, +) { + cx.set_state(not_reverted_text_with_selections); + cx.update_editor(|editor, cx| { + editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_diff_base(Some(base_text.into()), cx); + }); + }); + cx.executor().run_until_parked(); + + let reverted_hunk_statuses = cx.update_editor(|editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let reverted_hunk_statuses = snapshot + .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + .map(|hunk| hunk_status(&hunk)) + .collect::>(); + + editor.revert_selected_hunks(&RevertSelectedHunks, cx); + reverted_hunk_statuses + }); + cx.executor().run_until_parked(); + cx.assert_editor_state(expected_reverted_text_with_selections); + assert_eq!(reverted_hunk_statuses, expected_not_reverted_hunk_statuses); +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs new file mode 100644 index 0000000..37e8bf4 --- /dev/null +++ b/crates/editor/src/element.rs @@ -0,0 +1,5478 @@ +use crate::{ + blame_entry_tooltip::{blame_entry_relative_timestamp, BlameEntryTooltip}, + display_map::{ + BlockContext, BlockStyle, DisplaySnapshot, FoldStatus, HighlightedChunk, ToDisplayPoint, + TransformBlock, + }, + editor_settings::{ + CurrentLineHighlight, DoubleClickInMultibuffer, MultiCursorModifier, ShowScrollbar, + }, + git::{ + blame::{CommitDetails, GitBlame}, + diff_hunk_to_display, DisplayDiffHunk, + }, + hover_popover::{ + self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT, + }, + hunk_status, + items::BufferSearchHighlights, + mouse_context_menu::{self, MouseContextMenu}, + scroll::scroll_amount::ScrollAmount, + CodeActionsMenu, CursorShape, DisplayPoint, DisplayRow, DocumentHighlightRead, + DocumentHighlightWrite, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, + ExpandExcerpts, GutterDimensions, HalfPageDown, HalfPageUp, HoveredCursor, HunkToExpand, + LineDown, LineUp, OpenExcerpts, PageDown, PageUp, Point, RowExt, RowRangeExt, SelectPhase, + Selection, SoftWrap, ToPoint, CURSORS_VISIBLE_FOR, MAX_LINE_LEN, +}; +use anyhow::Result; +use client::ParticipantIndex; +use collections::{BTreeMap, HashMap}; +use git::{blame::BlameEntry, diff::DiffHunkStatus, Oid}; +use gpui::{ + anchored, deferred, div, fill, outline, point, px, quad, relative, size, svg, + transparent_black, Action, AnchorCorner, AnyElement, AvailableSpace, Bounds, ClipboardItem, + ContentMask, Corners, CursorStyle, DispatchPhase, Edges, Element, ElementInputHandler, Entity, + GlobalElementId, Hitbox, Hsla, InteractiveElement, IntoElement, ModifiersChangedEvent, + MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, + ScrollDelta, ScrollWheelEvent, ShapedLine, SharedString, Size, Stateful, + StatefulInteractiveElement, Style, Styled, TextRun, TextStyle, TextStyleRefinement, View, + ViewContext, WeakView, WindowContext, +}; +use itertools::Itertools; +use language::language_settings::ShowWhitespaceSetting; +use lsp::DiagnosticSeverity; +use multi_buffer::{Anchor, MultiBufferPoint, MultiBufferRow}; +use project::{ + project_settings::{GitGutterSetting, ProjectSettings}, + ProjectPath, +}; +use settings::Settings; +use smallvec::SmallVec; +use std::{ + any::TypeId, + borrow::Cow, + cmp::{self, max, Ordering}, + fmt::Write, + iter, mem, + ops::{Deref, Range}, + sync::Arc, +}; +use sum_tree::Bias; +use theme::{ActiveTheme, PlayerColor}; +use ui::prelude::*; +use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip}; +use util::ResultExt; +use workspace::{item::Item, Workspace}; + +struct SelectionLayout { + head: DisplayPoint, + cursor_shape: CursorShape, + is_newest: bool, + is_local: bool, + range: Range, + active_rows: Range, + user_name: Option, +} + +impl SelectionLayout { + fn new( + selection: Selection, + line_mode: bool, + cursor_shape: CursorShape, + map: &DisplaySnapshot, + is_newest: bool, + is_local: bool, + user_name: Option, + ) -> Self { + let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot)); + let display_selection = point_selection.map(|p| p.to_display_point(map)); + let mut range = display_selection.range(); + let mut head = display_selection.head(); + let mut active_rows = map.prev_line_boundary(point_selection.start).1.row() + ..map.next_line_boundary(point_selection.end).1.row(); + + // vim visual line mode + if line_mode { + let point_range = map.expand_to_line(point_selection.range()); + range = point_range.start.to_display_point(map)..point_range.end.to_display_point(map); + } + + // any vim visual mode (including line mode) + if (cursor_shape == CursorShape::Block || cursor_shape == CursorShape::Hollow) + && !range.is_empty() + && !selection.reversed + { + if head.column() > 0 { + head = map.clip_point(DisplayPoint::new(head.row(), head.column() - 1), Bias::Left) + } else if head.row().0 > 0 && head != map.max_point() { + head = map.clip_point( + DisplayPoint::new( + head.row().previous_row(), + map.line_len(head.row().previous_row()), + ), + Bias::Left, + ); + // updating range.end is a no-op unless you're cursor is + // on the newline containing a multi-buffer divider + // in which case the clip_point may have moved the head up + // an additional row. + range.end = DisplayPoint::new(head.row().next_row(), 0); + active_rows.end = head.row(); + } + } + + Self { + head, + cursor_shape, + is_newest, + is_local, + range, + active_rows, + user_name, + } + } +} + +pub struct EditorElement { + editor: View, + style: EditorStyle, +} + +type DisplayRowDelta = u32; + +impl EditorElement { + pub(crate) const SCROLLBAR_WIDTH: Pixels = px(13.); + + pub fn new(editor: &View, style: EditorStyle) -> Self { + Self { + editor: editor.clone(), + style, + } + } + + fn register_actions(&self, cx: &mut WindowContext) { + let view = &self.editor; + view.update(cx, |editor, cx| { + for action in editor.editor_actions.iter() { + (action)(cx) + } + }); + + crate::rust_analyzer_ext::apply_related_actions(view, cx); + register_action(view, cx, Editor::move_left); + register_action(view, cx, Editor::move_right); + register_action(view, cx, Editor::move_down); + register_action(view, cx, Editor::move_down_by_lines); + register_action(view, cx, Editor::select_down_by_lines); + register_action(view, cx, Editor::move_up); + register_action(view, cx, Editor::move_up_by_lines); + register_action(view, cx, Editor::select_up_by_lines); + register_action(view, cx, Editor::cancel); + register_action(view, cx, Editor::newline); + register_action(view, cx, Editor::newline_above); + register_action(view, cx, Editor::newline_below); + register_action(view, cx, Editor::backspace); + register_action(view, cx, Editor::delete); + register_action(view, cx, Editor::tab); + register_action(view, cx, Editor::tab_prev); + register_action(view, cx, Editor::indent); + register_action(view, cx, Editor::outdent); + register_action(view, cx, Editor::delete_line); + register_action(view, cx, Editor::join_lines); + register_action(view, cx, Editor::sort_lines_case_sensitive); + register_action(view, cx, Editor::sort_lines_case_insensitive); + register_action(view, cx, Editor::reverse_lines); + register_action(view, cx, Editor::shuffle_lines); + register_action(view, cx, Editor::convert_to_upper_case); + register_action(view, cx, Editor::convert_to_lower_case); + register_action(view, cx, Editor::convert_to_title_case); + register_action(view, cx, Editor::convert_to_snake_case); + register_action(view, cx, Editor::convert_to_kebab_case); + register_action(view, cx, Editor::convert_to_upper_camel_case); + register_action(view, cx, Editor::convert_to_lower_camel_case); + register_action(view, cx, Editor::convert_to_opposite_case); + register_action(view, cx, Editor::delete_to_previous_word_start); + register_action(view, cx, Editor::delete_to_previous_subword_start); + register_action(view, cx, Editor::delete_to_next_word_end); + register_action(view, cx, Editor::delete_to_next_subword_end); + register_action(view, cx, Editor::delete_to_beginning_of_line); + register_action(view, cx, Editor::delete_to_end_of_line); + register_action(view, cx, Editor::cut_to_end_of_line); + register_action(view, cx, Editor::duplicate_line_up); + register_action(view, cx, Editor::duplicate_line_down); + register_action(view, cx, Editor::move_line_up); + register_action(view, cx, Editor::move_line_down); + register_action(view, cx, Editor::transpose); + register_action(view, cx, Editor::cut); + register_action(view, cx, Editor::copy); + register_action(view, cx, Editor::paste); + register_action(view, cx, Editor::undo); + register_action(view, cx, Editor::redo); + register_action(view, cx, Editor::move_page_up); + register_action(view, cx, Editor::move_page_down); + register_action(view, cx, Editor::next_screen); + register_action(view, cx, Editor::scroll_cursor_top); + register_action(view, cx, Editor::scroll_cursor_center); + register_action(view, cx, Editor::scroll_cursor_bottom); + register_action(view, cx, |editor, _: &LineDown, cx| { + editor.scroll_screen(&ScrollAmount::Line(1.), cx) + }); + register_action(view, cx, |editor, _: &LineUp, cx| { + editor.scroll_screen(&ScrollAmount::Line(-1.), cx) + }); + register_action(view, cx, |editor, _: &HalfPageDown, cx| { + editor.scroll_screen(&ScrollAmount::Page(0.5), cx) + }); + register_action(view, cx, |editor, _: &HalfPageUp, cx| { + editor.scroll_screen(&ScrollAmount::Page(-0.5), cx) + }); + register_action(view, cx, |editor, _: &PageDown, cx| { + editor.scroll_screen(&ScrollAmount::Page(1.), cx) + }); + register_action(view, cx, |editor, _: &PageUp, cx| { + editor.scroll_screen(&ScrollAmount::Page(-1.), cx) + }); + register_action(view, cx, Editor::move_to_previous_word_start); + register_action(view, cx, Editor::move_to_previous_subword_start); + register_action(view, cx, Editor::move_to_next_word_end); + register_action(view, cx, Editor::move_to_next_subword_end); + register_action(view, cx, Editor::move_to_beginning_of_line); + register_action(view, cx, Editor::move_to_end_of_line); + register_action(view, cx, Editor::move_to_start_of_paragraph); + register_action(view, cx, Editor::move_to_end_of_paragraph); + register_action(view, cx, Editor::move_to_beginning); + register_action(view, cx, Editor::move_to_end); + register_action(view, cx, Editor::select_up); + register_action(view, cx, Editor::select_down); + register_action(view, cx, Editor::select_left); + register_action(view, cx, Editor::select_right); + register_action(view, cx, Editor::select_to_previous_word_start); + register_action(view, cx, Editor::select_to_previous_subword_start); + register_action(view, cx, Editor::select_to_next_word_end); + register_action(view, cx, Editor::select_to_next_subword_end); + register_action(view, cx, Editor::select_to_beginning_of_line); + register_action(view, cx, Editor::select_to_end_of_line); + register_action(view, cx, Editor::select_to_start_of_paragraph); + register_action(view, cx, Editor::select_to_end_of_paragraph); + register_action(view, cx, Editor::select_to_beginning); + register_action(view, cx, Editor::select_to_end); + register_action(view, cx, Editor::select_all); + register_action(view, cx, |editor, action, cx| { + editor.select_all_matches(action, cx).log_err(); + }); + register_action(view, cx, Editor::select_line); + register_action(view, cx, Editor::split_selection_into_lines); + register_action(view, cx, Editor::add_selection_above); + register_action(view, cx, Editor::add_selection_below); + register_action(view, cx, |editor, action, cx| { + editor.select_next(action, cx).log_err(); + }); + register_action(view, cx, |editor, action, cx| { + editor.select_previous(action, cx).log_err(); + }); + register_action(view, cx, Editor::toggle_comments); + register_action(view, cx, Editor::select_larger_syntax_node); + register_action(view, cx, Editor::select_smaller_syntax_node); + register_action(view, cx, Editor::move_to_enclosing_bracket); + register_action(view, cx, Editor::undo_selection); + register_action(view, cx, Editor::redo_selection); + if !view.read(cx).is_singleton(cx) { + register_action(view, cx, Editor::expand_excerpts); + } + register_action(view, cx, Editor::go_to_diagnostic); + register_action(view, cx, Editor::go_to_prev_diagnostic); + register_action(view, cx, Editor::go_to_hunk); + register_action(view, cx, Editor::go_to_prev_hunk); + register_action(view, cx, |editor, a, cx| { + editor.go_to_definition(a, cx).detach_and_log_err(cx); + }); + register_action(view, cx, |editor, a, cx| { + editor.go_to_definition_split(a, cx).detach_and_log_err(cx); + }); + register_action(view, cx, |editor, a, cx| { + editor.go_to_implementation(a, cx).detach_and_log_err(cx); + }); + register_action(view, cx, |editor, a, cx| { + editor + .go_to_implementation_split(a, cx) + .detach_and_log_err(cx); + }); + register_action(view, cx, |editor, a, cx| { + editor.go_to_type_definition(a, cx).detach_and_log_err(cx); + }); + register_action(view, cx, |editor, a, cx| { + editor + .go_to_type_definition_split(a, cx) + .detach_and_log_err(cx); + }); + register_action(view, cx, Editor::open_url); + register_action(view, cx, Editor::fold); + register_action(view, cx, Editor::fold_at); + register_action(view, cx, Editor::unfold_lines); + register_action(view, cx, Editor::unfold_at); + register_action(view, cx, Editor::fold_selected_ranges); + register_action(view, cx, Editor::show_completions); + register_action(view, cx, Editor::toggle_code_actions); + register_action(view, cx, Editor::open_excerpts); + register_action(view, cx, Editor::open_excerpts_in_split); + register_action(view, cx, Editor::toggle_soft_wrap); + register_action(view, cx, Editor::toggle_line_numbers); + register_action(view, cx, Editor::toggle_inlay_hints); + register_action(view, cx, hover_popover::hover); + register_action(view, cx, Editor::reveal_in_finder); + register_action(view, cx, Editor::copy_path); + register_action(view, cx, Editor::copy_relative_path); + register_action(view, cx, Editor::copy_highlight_json); + register_action(view, cx, Editor::copy_permalink_to_line); + register_action(view, cx, Editor::open_permalink_to_line); + register_action(view, cx, Editor::toggle_git_blame); + register_action(view, cx, Editor::toggle_git_blame_inline); + register_action(view, cx, Editor::toggle_hunk_diff); + register_action(view, cx, Editor::expand_all_hunk_diffs); + register_action(view, cx, |editor, action, cx| { + if let Some(task) = editor.format(action, cx) { + task.detach_and_log_err(cx); + } else { + cx.propagate(); + } + }); + register_action(view, cx, Editor::restart_language_server); + register_action(view, cx, Editor::show_character_palette); + register_action(view, cx, |editor, action, cx| { + if let Some(task) = editor.confirm_completion(action, cx) { + task.detach_and_log_err(cx); + } else { + cx.propagate(); + } + }); + register_action(view, cx, |editor, action, cx| { + if let Some(task) = editor.confirm_code_action(action, cx) { + task.detach_and_log_err(cx); + } else { + cx.propagate(); + } + }); + register_action(view, cx, |editor, action, cx| { + if let Some(task) = editor.rename(action, cx) { + task.detach_and_log_err(cx); + } else { + cx.propagate(); + } + }); + register_action(view, cx, |editor, action, cx| { + if let Some(task) = editor.confirm_rename(action, cx) { + task.detach_and_log_err(cx); + } else { + cx.propagate(); + } + }); + register_action(view, cx, |editor, action, cx| { + if let Some(task) = editor.find_all_references(action, cx) { + task.detach_and_log_err(cx); + } else { + cx.propagate(); + } + }); + register_action(view, cx, Editor::next_inline_completion); + register_action(view, cx, Editor::previous_inline_completion); + register_action(view, cx, Editor::show_inline_completion); + register_action(view, cx, Editor::context_menu_first); + register_action(view, cx, Editor::context_menu_prev); + register_action(view, cx, Editor::context_menu_next); + register_action(view, cx, Editor::context_menu_last); + register_action(view, cx, Editor::display_cursor_names); + register_action(view, cx, Editor::unique_lines_case_insensitive); + register_action(view, cx, Editor::unique_lines_case_sensitive); + register_action(view, cx, Editor::accept_partial_inline_completion); + register_action(view, cx, Editor::revert_selected_hunks); + register_action(view, cx, Editor::open_active_item_in_terminal) + } + + fn register_key_listeners(&self, cx: &mut WindowContext, layout: &EditorLayout) { + let position_map = layout.position_map.clone(); + cx.on_key_event({ + let editor = self.editor.clone(); + let text_hitbox = layout.text_hitbox.clone(); + move |event: &ModifiersChangedEvent, phase, cx| { + if phase != DispatchPhase::Bubble { + return; + } + + editor.update(cx, |editor, cx| { + Self::modifiers_changed(editor, event, &position_map, &text_hitbox, cx) + }) + } + }); + } + + fn modifiers_changed( + editor: &mut Editor, + event: &ModifiersChangedEvent, + position_map: &PositionMap, + text_hitbox: &Hitbox, + cx: &mut ViewContext, + ) { + let mouse_position = cx.mouse_position(); + if !text_hitbox.is_hovered(cx) { + return; + } + + editor.update_hovered_link( + position_map.point_for_position(text_hitbox.bounds, mouse_position), + &position_map.snapshot, + event.modifiers, + cx, + ) + } + + fn mouse_left_down( + editor: &mut Editor, + event: &MouseDownEvent, + hovered_hunk: Option<&HunkToExpand>, + position_map: &PositionMap, + text_hitbox: &Hitbox, + gutter_hitbox: &Hitbox, + cx: &mut ViewContext, + ) { + if cx.default_prevented() { + return; + } + + let mut click_count = event.click_count; + let mut modifiers = event.modifiers; + + if let Some(hovered_hunk) = hovered_hunk { + editor.expand_diff_hunk(None, hovered_hunk, cx); + cx.notify(); + return; + } else if gutter_hitbox.is_hovered(cx) { + click_count = 3; // Simulate triple-click when clicking the gutter to select lines + } else if !text_hitbox.is_hovered(cx) { + return; + } + + if click_count == 2 && !editor.buffer().read(cx).is_singleton() { + match EditorSettings::get_global(cx).double_click_in_multibuffer { + DoubleClickInMultibuffer::Select => { + // do nothing special on double click, all selection logic is below + } + DoubleClickInMultibuffer::Open => { + if modifiers.alt { + // if double click is made with alt, pretend it's a regular double click without opening and alt, + // and run the selection logic. + modifiers.alt = false; + } else { + // if double click is made without alt, open the corresponding excerp + editor.open_excerpts(&OpenExcerpts, cx); + return; + } + } + } + } + + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + let position = point_for_position.previous_valid; + if modifiers.shift && modifiers.alt { + editor.select( + SelectPhase::BeginColumnar { + position, + reset: false, + goal_column: point_for_position.exact_unclipped.column(), + }, + cx, + ); + } else if modifiers.shift && !modifiers.control && !modifiers.alt && !modifiers.secondary() + { + editor.select( + SelectPhase::Extend { + position, + click_count, + }, + cx, + ); + } else { + let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier; + let multi_cursor_modifier = match multi_cursor_setting { + MultiCursorModifier::Alt => modifiers.alt, + MultiCursorModifier::CmdOrCtrl => modifiers.secondary(), + }; + editor.select( + SelectPhase::Begin { + position, + add: multi_cursor_modifier, + click_count, + }, + cx, + ); + } + + cx.stop_propagation(); + } + + fn mouse_right_down( + editor: &mut Editor, + event: &MouseDownEvent, + position_map: &PositionMap, + text_hitbox: &Hitbox, + cx: &mut ViewContext, + ) { + if !text_hitbox.is_hovered(cx) { + return; + } + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + mouse_context_menu::deploy_context_menu( + editor, + event.position, + point_for_position.previous_valid, + cx, + ); + cx.stop_propagation(); + } + + fn mouse_middle_down( + editor: &mut Editor, + event: &MouseDownEvent, + position_map: &PositionMap, + text_hitbox: &Hitbox, + cx: &mut ViewContext, + ) { + if cx.default_prevented() { + return; + } + + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + let position = point_for_position.previous_valid; + + editor.select( + SelectPhase::BeginColumnar { + position, + reset: true, + goal_column: point_for_position.exact_unclipped.column(), + }, + cx, + ); + } + + fn mouse_up( + editor: &mut Editor, + event: &MouseUpEvent, + position_map: &PositionMap, + text_hitbox: &Hitbox, + cx: &mut ViewContext, + ) { + let end_selection = editor.has_pending_selection(); + let pending_nonempty_selections = editor.has_pending_nonempty_selection(); + + if end_selection { + editor.select(SelectPhase::End, cx); + } + + let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier; + let multi_cursor_modifier = match multi_cursor_setting { + MultiCursorModifier::Alt => event.modifiers.secondary(), + MultiCursorModifier::CmdOrCtrl => event.modifiers.alt, + }; + + if !pending_nonempty_selections && multi_cursor_modifier && text_hitbox.is_hovered(cx) { + let point = position_map.point_for_position(text_hitbox.bounds, event.position); + editor.handle_click_hovered_link(point, event.modifiers, cx); + + cx.stop_propagation(); + } else if end_selection { + cx.stop_propagation(); + } else if cfg!(target_os = "linux") && event.button == MouseButton::Middle { + if !text_hitbox.is_hovered(cx) || editor.read_only(cx) { + return; + } + + #[cfg(target_os = "linux")] + if let Some(item) = cx.read_from_clipboard() { + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + let position = point_for_position.previous_valid; + + editor.select( + SelectPhase::Begin { + position, + add: false, + click_count: 1, + }, + cx, + ); + editor.insert(item.text(), cx); + } + cx.stop_propagation() + } + } + + fn mouse_dragged( + editor: &mut Editor, + event: &MouseMoveEvent, + position_map: &PositionMap, + text_bounds: Bounds, + cx: &mut ViewContext, + ) { + if !editor.has_pending_selection() { + return; + } + + let point_for_position = position_map.point_for_position(text_bounds, event.position); + let mut scroll_delta = gpui::Point::::default(); + let vertical_margin = position_map.line_height.min(text_bounds.size.height / 3.0); + let top = text_bounds.origin.y + vertical_margin; + let bottom = text_bounds.lower_left().y - vertical_margin; + if event.position.y < top { + scroll_delta.y = -scale_vertical_mouse_autoscroll_delta(top - event.position.y); + } + if event.position.y > bottom { + scroll_delta.y = scale_vertical_mouse_autoscroll_delta(event.position.y - bottom); + } + + let horizontal_margin = position_map.line_height.min(text_bounds.size.width / 3.0); + let left = text_bounds.origin.x + horizontal_margin; + let right = text_bounds.upper_right().x - horizontal_margin; + if event.position.x < left { + scroll_delta.x = -scale_horizontal_mouse_autoscroll_delta(left - event.position.x); + } + if event.position.x > right { + scroll_delta.x = scale_horizontal_mouse_autoscroll_delta(event.position.x - right); + } + + editor.select( + SelectPhase::Update { + position: point_for_position.previous_valid, + goal_column: point_for_position.exact_unclipped.column(), + scroll_delta, + }, + cx, + ); + } + + fn mouse_moved( + editor: &mut Editor, + event: &MouseMoveEvent, + position_map: &PositionMap, + text_hitbox: &Hitbox, + gutter_hitbox: &Hitbox, + cx: &mut ViewContext, + ) { + let modifiers = event.modifiers; + let gutter_hovered = gutter_hitbox.is_hovered(cx); + editor.set_gutter_hovered(gutter_hovered, cx); + + // Don't trigger hover popover if mouse is hovering over context menu + if text_hitbox.is_hovered(cx) { + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + + editor.update_hovered_link(point_for_position, &position_map.snapshot, modifiers, cx); + + if let Some(point) = point_for_position.as_valid() { + let anchor = position_map + .snapshot + .buffer_snapshot + .anchor_before(point.to_offset(&position_map.snapshot, Bias::Left)); + hover_at(editor, Some(anchor), cx); + Self::update_visible_cursor(editor, point, position_map, cx); + } else { + hover_at(editor, None, cx); + } + } else { + editor.hide_hovered_link(cx); + hover_at(editor, None, cx); + if gutter_hovered { + cx.stop_propagation(); + } + } + } + + fn update_visible_cursor( + editor: &mut Editor, + point: DisplayPoint, + position_map: &PositionMap, + cx: &mut ViewContext, + ) { + let snapshot = &position_map.snapshot; + let Some(hub) = editor.collaboration_hub() else { + return; + }; + let range = DisplayPoint::new(point.row(), point.column().saturating_sub(1)) + ..DisplayPoint::new( + point.row(), + (point.column() + 1).min(snapshot.line_len(point.row())), + ); + + let range = snapshot + .buffer_snapshot + .anchor_at(range.start.to_point(&snapshot.display_snapshot), Bias::Left) + ..snapshot + .buffer_snapshot + .anchor_at(range.end.to_point(&snapshot.display_snapshot), Bias::Right); + + let Some(selection) = snapshot.remote_selections_in_range(&range, hub, cx).next() else { + return; + }; + let key = crate::HoveredCursor { + replica_id: selection.replica_id, + selection_id: selection.selection.id, + }; + editor.hovered_cursors.insert( + key.clone(), + cx.spawn(|editor, mut cx| async move { + cx.background_executor().timer(CURSORS_VISIBLE_FOR).await; + editor + .update(&mut cx, |editor, cx| { + editor.hovered_cursors.remove(&key); + cx.notify(); + }) + .ok(); + }), + ); + cx.notify() + } + + fn layout_selections( + &self, + start_anchor: Anchor, + end_anchor: Anchor, + snapshot: &EditorSnapshot, + start_row: DisplayRow, + end_row: DisplayRow, + cx: &mut WindowContext, + ) -> ( + Vec<(PlayerColor, Vec)>, + BTreeMap, + Option, + ) { + let mut selections: Vec<(PlayerColor, Vec)> = Vec::new(); + let mut active_rows = BTreeMap::new(); + let mut newest_selection_head = None; + let editor = self.editor.read(cx); + + if editor.show_local_selections { + let mut local_selections: Vec> = editor + .selections + .disjoint_in_range(start_anchor..end_anchor, cx); + local_selections.extend(editor.selections.pending(cx)); + let mut layouts = Vec::new(); + let newest = editor.selections.newest(cx); + for selection in local_selections.drain(..) { + let is_empty = selection.start == selection.end; + let is_newest = selection == newest; + + let layout = SelectionLayout::new( + selection, + editor.selections.line_mode, + editor.cursor_shape, + &snapshot.display_snapshot, + is_newest, + editor.leader_peer_id.is_none(), + None, + ); + if is_newest { + newest_selection_head = Some(layout.head); + } + + for row in cmp::max(layout.active_rows.start.0, start_row.0) + ..=cmp::min(layout.active_rows.end.0, end_row.0) + { + let contains_non_empty_selection = + active_rows.entry(DisplayRow(row)).or_insert(!is_empty); + *contains_non_empty_selection |= !is_empty; + } + layouts.push(layout); + } + + let player = if editor.read_only(cx) { + cx.theme().players().read_only() + } else { + self.style.local_player + }; + + selections.push((player, layouts)); + } + + if let Some(collaboration_hub) = &editor.collaboration_hub { + // When following someone, render the local selections in their color. + if let Some(leader_id) = editor.leader_peer_id { + if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id) { + if let Some(participant_index) = collaboration_hub + .user_participant_indices(cx) + .get(&collaborator.user_id) + { + if let Some((local_selection_style, _)) = selections.first_mut() { + *local_selection_style = cx + .theme() + .players() + .color_for_participant(participant_index.0); + } + } + } + } + + let mut remote_selections = HashMap::default(); + for selection in snapshot.remote_selections_in_range( + &(start_anchor..end_anchor), + collaboration_hub.as_ref(), + cx, + ) { + let selection_style = Self::get_participant_color(selection.participant_index, cx); + + // Don't re-render the leader's selections, since the local selections + // match theirs. + if Some(selection.peer_id) == editor.leader_peer_id { + continue; + } + let key = HoveredCursor { + replica_id: selection.replica_id, + selection_id: selection.selection.id, + }; + + let is_shown = + editor.show_cursor_names || editor.hovered_cursors.contains_key(&key); + + remote_selections + .entry(selection.replica_id) + .or_insert((selection_style, Vec::new())) + .1 + .push(SelectionLayout::new( + selection.selection, + selection.line_mode, + selection.cursor_shape, + &snapshot.display_snapshot, + false, + false, + if is_shown { selection.user_name } else { None }, + )); + } + + selections.extend(remote_selections.into_values()); + } + (selections, active_rows, newest_selection_head) + } + + #[allow(clippy::too_many_arguments)] + fn layout_folds( + &self, + snapshot: &EditorSnapshot, + content_origin: gpui::Point, + visible_anchor_range: Range, + visible_display_row_range: Range, + scroll_pixel_position: gpui::Point, + line_height: Pixels, + line_layouts: &[LineWithInvisibles], + cx: &mut WindowContext, + ) -> Vec { + snapshot + .folds_in_range(visible_anchor_range.clone()) + .filter_map(|fold| { + let fold_range = fold.range.clone(); + let display_range = fold.range.start.to_display_point(&snapshot) + ..fold.range.end.to_display_point(&snapshot); + debug_assert_eq!(display_range.start.row(), display_range.end.row()); + let row = display_range.start.row(); + debug_assert!(row < visible_display_row_range.end); + let line_layout = line_layouts + .get(row.minus(visible_display_row_range.start) as usize) + .map(|l| &l.line)?; + + let start_x = content_origin.x + + line_layout.x_for_index(display_range.start.column() as usize) + - scroll_pixel_position.x; + let start_y = + content_origin.y + row.as_f32() * line_height - scroll_pixel_position.y; + let end_x = content_origin.x + + line_layout.x_for_index(display_range.end.column() as usize) + - scroll_pixel_position.x; + + let fold_bounds = Bounds { + origin: point(start_x, start_y), + size: size(end_x - start_x, line_height), + }; + + let mut hover_element = div() + .id(fold.id) + .size_full() + .cursor_pointer() + .on_mouse_down(MouseButton::Left, |_, cx| cx.stop_propagation()) + .on_click( + cx.listener_for(&self.editor, move |editor: &mut Editor, _, cx| { + editor.unfold_ranges( + [fold_range.start..fold_range.end], + true, + false, + cx, + ); + cx.stop_propagation(); + }), + ) + .into_any(); + hover_element.prepaint_as_root(fold_bounds.origin, fold_bounds.size.into(), cx); + Some(FoldLayout { + display_range, + hover_element, + }) + }) + .collect() + } + + fn collect_cursors( + &self, + snapshot: &EditorSnapshot, + cx: &mut WindowContext, + ) -> Vec<(DisplayPoint, Hsla)> { + let editor = self.editor.read(cx); + let mut cursors = Vec::new(); + let mut skip_local = false; + let mut add_cursor = |anchor: Anchor, color| { + cursors.push((anchor.to_display_point(&snapshot.display_snapshot), color)); + }; + // Remote cursors + if let Some(collaboration_hub) = &editor.collaboration_hub { + for remote_selection in snapshot.remote_selections_in_range( + &(Anchor::min()..Anchor::max()), + collaboration_hub.deref(), + cx, + ) { + let color = Self::get_participant_color(remote_selection.participant_index, cx); + add_cursor(remote_selection.selection.head(), color.cursor); + if Some(remote_selection.peer_id) == editor.leader_peer_id { + skip_local = true; + } + } + } + // Local cursors + if !skip_local { + let color = cx.theme().players().local().cursor; + editor.selections.disjoint.iter().for_each(|selection| { + add_cursor(selection.head(), color); + }); + if let Some(ref selection) = editor.selections.pending_anchor() { + add_cursor(selection.head(), color); + } + } + cursors + } + + #[allow(clippy::too_many_arguments)] + fn layout_visible_cursors( + &self, + snapshot: &EditorSnapshot, + selections: &[(PlayerColor, Vec)], + visible_display_row_range: Range, + line_layouts: &[LineWithInvisibles], + text_hitbox: &Hitbox, + content_origin: gpui::Point, + scroll_position: gpui::Point, + scroll_pixel_position: gpui::Point, + line_height: Pixels, + em_width: Pixels, + autoscroll_containing_element: bool, + cx: &mut WindowContext, + ) -> Vec { + let mut autoscroll_bounds = None; + let cursor_layouts = self.editor.update(cx, |editor, cx| { + let mut cursors = Vec::new(); + for (player_color, selections) in selections { + for selection in selections { + let cursor_position = selection.head; + + let in_range = visible_display_row_range.contains(&cursor_position.row()); + if (selection.is_local && !editor.show_local_cursors(cx)) || !in_range { + continue; + } + + let cursor_row_layout = &line_layouts + [cursor_position.row().minus(visible_display_row_range.start) as usize] + .line; + let cursor_column = cursor_position.column() as usize; + + let cursor_character_x = cursor_row_layout.x_for_index(cursor_column); + let mut block_width = + cursor_row_layout.x_for_index(cursor_column + 1) - cursor_character_x; + if block_width == Pixels::ZERO { + block_width = em_width; + } + let block_text = if let CursorShape::Block = selection.cursor_shape { + snapshot.display_chars_at(cursor_position).next().and_then( + |(character, _)| { + let text = if character == '\n' { + SharedString::from(" ") + } else { + SharedString::from(character.to_string()) + }; + let len = text.len(); + + let font = cursor_row_layout + .font_id_for_index(cursor_column) + .and_then(|cursor_font_id| { + cx.text_system().get_font_for_id(cursor_font_id) + }) + .unwrap_or(self.style.text.font()); + + cx.text_system() + .shape_line( + text, + cursor_row_layout.font_size, + &[TextRun { + len, + font, + color: self.style.background, + background_color: None, + strikethrough: None, + underline: None, + }], + ) + .log_err() + }, + ) + } else { + None + }; + + let x = cursor_character_x - scroll_pixel_position.x; + let y = (cursor_position.row().as_f32() + - scroll_pixel_position.y / line_height) + * line_height; + if selection.is_newest { + editor.pixel_position_of_newest_cursor = Some(point( + text_hitbox.origin.x + x + block_width / 2., + text_hitbox.origin.y + y + line_height / 2., + )); + + if autoscroll_containing_element { + let top = text_hitbox.origin.y + + (cursor_position.row().as_f32() - scroll_position.y - 3.).max(0.) + * line_height; + let left = text_hitbox.origin.x + + (cursor_position.column() as f32 - scroll_position.x - 3.) + .max(0.) + * em_width; + + let bottom = text_hitbox.origin.y + + (cursor_position.row().as_f32() - scroll_position.y + 4.) + * line_height; + let right = text_hitbox.origin.x + + (cursor_position.column() as f32 - scroll_position.x + 4.) + * em_width; + + autoscroll_bounds = + Some(Bounds::from_corners(point(left, top), point(right, bottom))) + } + } + + let mut cursor = CursorLayout { + color: player_color.cursor, + block_width, + origin: point(x, y), + line_height, + shape: selection.cursor_shape, + block_text, + cursor_name: None, + }; + let cursor_name = selection.user_name.clone().map(|name| CursorName { + string: name, + color: self.style.background, + is_top_row: cursor_position.row().0 == 0, + }); + cursor.layout(content_origin, cursor_name, cx); + cursors.push(cursor); + } + } + cursors + }); + + if let Some(bounds) = autoscroll_bounds { + cx.request_autoscroll(bounds); + } + + cursor_layouts + } + + fn layout_scrollbar( + &self, + snapshot: &EditorSnapshot, + bounds: Bounds, + scroll_position: gpui::Point, + rows_per_page: f32, + non_visible_cursors: bool, + cx: &mut WindowContext, + ) -> Option { + let scrollbar_settings = EditorSettings::get_global(cx).scrollbar; + let show_scrollbars = match scrollbar_settings.show { + ShowScrollbar::Auto => { + let editor = self.editor.read(cx); + let is_singleton = editor.is_singleton(cx); + // Git + (is_singleton && scrollbar_settings.git_diff && snapshot.buffer_snapshot.has_git_diffs()) + || + // Buffer Search Results + (is_singleton && scrollbar_settings.search_results && editor.has_background_highlights::()) + || + // Selected Symbol Occurrences + (is_singleton && scrollbar_settings.selected_symbol && (editor.has_background_highlights::() || editor.has_background_highlights::())) + || + // Diagnostics + (is_singleton && scrollbar_settings.diagnostics && snapshot.buffer_snapshot.has_diagnostics()) + || + // Cursors out of sight + non_visible_cursors + || + // Scrollmanager + editor.scroll_manager.scrollbars_visible() + } + ShowScrollbar::System => self.editor.read(cx).scroll_manager.scrollbars_visible(), + ShowScrollbar::Always => true, + ShowScrollbar::Never => false, + }; + if snapshot.mode != EditorMode::Full { + return None; + } + + let visible_row_range = scroll_position.y..scroll_position.y + rows_per_page; + + // If a drag took place after we started dragging the scrollbar, + // cancel the scrollbar drag. + if cx.has_active_drag() { + self.editor.update(cx, |editor, cx| { + editor.scroll_manager.set_is_dragging_scrollbar(false, cx); + }); + } + + let track_bounds = Bounds::from_corners( + point(self.scrollbar_left(&bounds), bounds.origin.y), + point(bounds.lower_right().x, bounds.lower_left().y), + ); + + let height = bounds.size.height; + let total_rows = snapshot.max_point().row().as_f32() + rows_per_page; + let px_per_row = height / total_rows; + let thumb_height = (rows_per_page * px_per_row).max(ScrollbarLayout::MIN_THUMB_HEIGHT); + let row_height = (height - thumb_height) / snapshot.max_point().row().as_f32(); + + Some(ScrollbarLayout { + hitbox: cx.insert_hitbox(track_bounds, false), + visible_row_range, + row_height, + visible: show_scrollbars, + thumb_height, + }) + } + + #[allow(clippy::too_many_arguments)] + fn layout_gutter_fold_indicators( + &self, + fold_statuses: Vec>, + line_height: Pixels, + gutter_dimensions: &GutterDimensions, + gutter_settings: crate::editor_settings::Gutter, + scroll_pixel_position: gpui::Point, + gutter_hitbox: &Hitbox, + cx: &mut WindowContext, + ) -> Vec> { + let mut indicators = self.editor.update(cx, |editor, cx| { + editor.render_fold_indicators( + fold_statuses, + &self.style, + editor.gutter_hovered, + line_height, + gutter_dimensions.margin, + cx, + ) + }); + + for (ix, fold_indicator) in indicators.iter_mut().enumerate() { + if let Some(fold_indicator) = fold_indicator { + debug_assert!(gutter_settings.folds); + let available_space = size( + AvailableSpace::MinContent, + AvailableSpace::Definite(line_height * 0.55), + ); + let fold_indicator_size = fold_indicator.layout_as_root(available_space, cx); + + let position = point( + gutter_dimensions.width - gutter_dimensions.right_padding, + ix as f32 * line_height - (scroll_pixel_position.y % line_height), + ); + let centering_offset = point( + (gutter_dimensions.right_padding + gutter_dimensions.margin + - fold_indicator_size.width) + / 2., + (line_height - fold_indicator_size.height) / 2., + ); + let origin = gutter_hitbox.origin + position + centering_offset; + fold_indicator.prepaint_as_root(origin, available_space, cx); + } + } + + indicators + } + + // Folds contained in a hunk are ignored apart from shrinking visual size + // If a fold contains any hunks then that fold line is marked as modified + fn layout_git_gutters( + &self, + line_height: Pixels, + gutter_hitbox: &Hitbox, + display_rows: Range, + snapshot: &EditorSnapshot, + cx: &mut WindowContext, + ) -> Vec<(DisplayDiffHunk, Option)> { + let buffer_snapshot = &snapshot.buffer_snapshot; + + let buffer_start_row = MultiBufferRow( + DisplayPoint::new(display_rows.start, 0) + .to_point(snapshot) + .row, + ); + let buffer_end_row = MultiBufferRow( + DisplayPoint::new(display_rows.end, 0) + .to_point(snapshot) + .row, + ); + + let expanded_hunk_display_rows = self.editor.update(cx, |editor, _| { + editor + .expanded_hunks + .hunks(false) + .map(|expanded_hunk| { + let start_row = expanded_hunk + .hunk_range + .start + .to_display_point(snapshot) + .row(); + let end_row = expanded_hunk + .hunk_range + .end + .to_display_point(snapshot) + .row(); + (start_row, end_row) + }) + .collect::>() + }); + + buffer_snapshot + .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) + .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) + .dedup() + .map(|hunk| { + let hitbox = if let DisplayDiffHunk::Unfolded { + display_row_range, .. + } = &hunk + { + let was_expanded = expanded_hunk_display_rows + .get(&display_row_range.start) + .map(|expanded_end_row| expanded_end_row == &display_row_range.end) + .unwrap_or(false); + if was_expanded { + None + } else { + let hunk_bounds = Self::diff_hunk_bounds( + &snapshot, + line_height, + gutter_hitbox.bounds, + &hunk, + ); + Some(cx.insert_hitbox(hunk_bounds, true)) + } + } else { + None + }; + (hunk, hitbox) + }) + .collect() + } + + #[allow(clippy::too_many_arguments)] + fn layout_inline_blame( + &self, + display_row: DisplayRow, + display_snapshot: &DisplaySnapshot, + line_layout: &LineWithInvisibles, + em_width: Pixels, + content_origin: gpui::Point, + scroll_pixel_position: gpui::Point, + line_height: Pixels, + cx: &mut WindowContext, + ) -> Option { + if !self + .editor + .update(cx, |editor, cx| editor.render_git_blame_inline(cx)) + { + return None; + } + + let workspace = self + .editor + .read(cx) + .workspace + .as_ref() + .map(|(w, _)| w.clone()); + + let display_point = DisplayPoint::new(display_row, 0); + let buffer_row = MultiBufferRow(display_point.to_point(display_snapshot).row); + + let blame = self.editor.read(cx).blame.clone()?; + let blame_entry = blame + .update(cx, |blame, cx| { + blame.blame_for_rows([Some(buffer_row)], cx).next() + }) + .flatten()?; + + let mut element = + render_inline_blame_entry(&blame, blame_entry, &self.style, workspace, cx); + + let start_y = content_origin.y + + line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height); + + let start_x = { + const INLINE_BLAME_PADDING_EM_WIDTHS: f32 = 6.; + + let padded_line_width = + line_layout.line.width + (em_width * INLINE_BLAME_PADDING_EM_WIDTHS); + + let min_column = ProjectSettings::get_global(cx) + .git + .inline_blame + .and_then(|settings| settings.min_column) + .map(|col| self.column_pixels(col as usize, cx)) + .unwrap_or(px(0.)); + + (content_origin.x - scroll_pixel_position.x) + max(padded_line_width, min_column) + }; + + let absolute_offset = point(start_x, start_y); + let available_space = size(AvailableSpace::MinContent, AvailableSpace::MinContent); + + element.prepaint_as_root(absolute_offset, available_space, cx); + + Some(element) + } + + #[allow(clippy::too_many_arguments)] + fn layout_blame_entries( + &self, + buffer_rows: impl Iterator>, + em_width: Pixels, + scroll_position: gpui::Point, + line_height: Pixels, + gutter_hitbox: &Hitbox, + max_width: Option, + cx: &mut WindowContext, + ) -> Option> { + if !self + .editor + .update(cx, |editor, cx| editor.render_git_blame_gutter(cx)) + { + return None; + } + + let blame = self.editor.read(cx).blame.clone()?; + let blamed_rows: Vec<_> = blame.update(cx, |blame, cx| { + blame.blame_for_rows(buffer_rows, cx).collect() + }); + + let width = if let Some(max_width) = max_width { + AvailableSpace::Definite(max_width) + } else { + AvailableSpace::MaxContent + }; + let scroll_top = scroll_position.y * line_height; + let start_x = em_width * 1; + + let mut last_used_color: Option<(PlayerColor, Oid)> = None; + + let shaped_lines = blamed_rows + .into_iter() + .enumerate() + .flat_map(|(ix, blame_entry)| { + if let Some(blame_entry) = blame_entry { + let mut element = render_blame_entry( + ix, + &blame, + blame_entry, + &self.style, + &mut last_used_color, + self.editor.clone(), + cx, + ); + + let start_y = ix as f32 * line_height - (scroll_top % line_height); + let absolute_offset = gutter_hitbox.origin + point(start_x, start_y); + + element.prepaint_as_root( + absolute_offset, + size(width, AvailableSpace::MinContent), + cx, + ); + + Some(element) + } else { + None + } + }) + .collect(); + + Some(shaped_lines) + } + + fn layout_run_indicators( + &self, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + gutter_dimensions: &GutterDimensions, + gutter_hitbox: &Hitbox, + snapshot: &EditorSnapshot, + cx: &mut WindowContext, + ) -> Vec { + self.editor.update(cx, |editor, cx| { + let active_task_indicator_row = + if let Some(crate::ContextMenu::CodeActions(CodeActionsMenu { + deployed_from_indicator, + actions, + .. + })) = editor.context_menu.read().as_ref() + { + actions + .tasks + .as_ref() + .map(|tasks| tasks.position.to_display_point(snapshot).row()) + .or_else(|| *deployed_from_indicator) + } else { + None + }; + editor + .tasks + .iter() + .filter_map(|(_, (multibuffer_offset, _))| { + let multibuffer_point = multibuffer_offset.to_point(&snapshot.buffer_snapshot); + let multibuffer_row = MultiBufferRow(multibuffer_point.row); + if snapshot.is_line_folded(multibuffer_row) { + return None; + } + let display_row = multibuffer_point.to_display_point(snapshot).row(); + let button = editor.render_run_indicator( + &self.style, + Some(display_row) == active_task_indicator_row, + display_row, + cx, + ); + + let button = prepaint_gutter_button( + button, + display_row, + line_height, + gutter_dimensions, + scroll_pixel_position, + gutter_hitbox, + cx, + ); + Some(button) + }) + .collect_vec() + }) + } + + fn layout_code_actions_indicator( + &self, + line_height: Pixels, + newest_selection_head: DisplayPoint, + scroll_pixel_position: gpui::Point, + gutter_dimensions: &GutterDimensions, + gutter_hitbox: &Hitbox, + cx: &mut WindowContext, + ) -> Option { + let mut active = false; + let mut button = None; + let row = newest_selection_head.row(); + self.editor.update(cx, |editor, cx| { + if let Some(crate::ContextMenu::CodeActions(CodeActionsMenu { + deployed_from_indicator, + .. + })) = editor.context_menu.read().as_ref() + { + active = deployed_from_indicator.map_or(true, |indicator_row| indicator_row == row); + }; + button = editor.render_code_actions_indicator(&self.style, row, active, cx); + }); + + let button = prepaint_gutter_button( + button?, + row, + line_height, + gutter_dimensions, + scroll_pixel_position, + gutter_hitbox, + cx, + ); + + Some(button) + } + + fn get_participant_color( + participant_index: Option, + cx: &WindowContext, + ) -> PlayerColor { + if let Some(index) = participant_index { + cx.theme().players().color_for_participant(index.0) + } else { + cx.theme().players().absent() + } + } + + fn calculate_relative_line_numbers( + &self, + snapshot: &EditorSnapshot, + rows: &Range, + relative_to: Option, + ) -> HashMap { + let mut relative_rows: HashMap = Default::default(); + let Some(relative_to) = relative_to else { + return relative_rows; + }; + + let start = rows.start.min(relative_to); + let end = rows.end.max(relative_to); + + let buffer_rows = snapshot + .buffer_rows(start) + .take(1 + end.minus(start) as usize) + .collect::>(); + + let head_idx = relative_to.minus(start); + let mut delta = 1; + let mut i = head_idx + 1; + while i < buffer_rows.len() as u32 { + if buffer_rows[i as usize].is_some() { + if rows.contains(&DisplayRow(i + start.0)) { + relative_rows.insert(DisplayRow(i + start.0), delta); + } + delta += 1; + } + i += 1; + } + delta = 1; + i = head_idx.min(buffer_rows.len() as u32 - 1); + while i > 0 && buffer_rows[i as usize].is_none() { + i -= 1; + } + + while i > 0 { + i -= 1; + if buffer_rows[i as usize].is_some() { + if rows.contains(&DisplayRow(i + start.0)) { + relative_rows.insert(DisplayRow(i + start.0), delta); + } + delta += 1; + } + } + + relative_rows + } + + fn layout_line_numbers( + &self, + rows: Range, + buffer_rows: impl Iterator>, + active_rows: &BTreeMap, + newest_selection_head: Option, + snapshot: &EditorSnapshot, + cx: &WindowContext, + ) -> ( + Vec>, + Vec>, + ) { + let editor = self.editor.read(cx); + let is_singleton = editor.is_singleton(cx); + let newest_selection_head = newest_selection_head.unwrap_or_else(|| { + let newest = editor.selections.newest::(cx); + SelectionLayout::new( + newest, + editor.selections.line_mode, + editor.cursor_shape, + &snapshot.display_snapshot, + true, + true, + None, + ) + .head + }); + let font_size = self.style.text.font_size.to_pixels(cx.rem_size()); + let include_line_numbers = + EditorSettings::get_global(cx).gutter.line_numbers && snapshot.mode == EditorMode::Full; + let include_fold_statuses = + EditorSettings::get_global(cx).gutter.folds && snapshot.mode == EditorMode::Full; + let mut shaped_line_numbers = Vec::with_capacity(rows.len()); + let mut fold_statuses = Vec::with_capacity(rows.len()); + let mut line_number = String::new(); + let is_relative = EditorSettings::get_global(cx).relative_line_numbers; + let relative_to = if is_relative { + Some(newest_selection_head.row()) + } else { + None + }; + + let relative_rows = self.calculate_relative_line_numbers(snapshot, &rows, relative_to); + + for (ix, row) in buffer_rows.into_iter().enumerate() { + let display_row = DisplayRow(rows.start.0 + ix as u32); + let (active, color) = if active_rows.contains_key(&display_row) { + (true, cx.theme().colors().editor_active_line_number) + } else { + (false, cx.theme().colors().editor_line_number) + }; + if let Some(multibuffer_row) = row { + if include_line_numbers { + line_number.clear(); + let default_number = multibuffer_row.0 + 1; + let number = relative_rows + .get(&DisplayRow(ix as u32 + rows.start.0)) + .unwrap_or(&default_number); + write!(&mut line_number, "{number}").unwrap(); + let run = TextRun { + len: line_number.len(), + font: self.style.text.font(), + color, + background_color: None, + underline: None, + strikethrough: None, + }; + let shaped_line = cx + .text_system() + .shape_line(line_number.clone().into(), font_size, &[run]) + .unwrap(); + shaped_line_numbers.push(Some(shaped_line)); + } + if include_fold_statuses { + fold_statuses.push( + is_singleton + .then(|| { + snapshot + .fold_for_line(multibuffer_row) + .map(|fold_status| (fold_status, multibuffer_row, active)) + }) + .flatten(), + ) + } + } else { + fold_statuses.push(None); + shaped_line_numbers.push(None); + } + } + + (shaped_line_numbers, fold_statuses) + } + + fn layout_lines( + &self, + rows: Range, + line_number_layouts: &[Option], + snapshot: &EditorSnapshot, + cx: &WindowContext, + ) -> Vec { + if rows.start >= rows.end { + return Vec::new(); + } + + // Show the placeholder when the editor is empty + if snapshot.is_empty() { + let font_size = self.style.text.font_size.to_pixels(cx.rem_size()); + let placeholder_color = cx.theme().colors().text_placeholder; + let placeholder_text = snapshot.placeholder_text(); + + let placeholder_lines = placeholder_text + .as_ref() + .map_or("", AsRef::as_ref) + .split('\n') + .skip(rows.start.0 as usize) + .chain(iter::repeat("")) + .take(rows.len()); + placeholder_lines + .filter_map(move |line| { + let run = TextRun { + len: line.len(), + font: self.style.text.font(), + color: placeholder_color, + background_color: None, + underline: Default::default(), + strikethrough: None, + }; + cx.text_system() + .shape_line(line.to_string().into(), font_size, &[run]) + .log_err() + }) + .map(|line| LineWithInvisibles { + line, + invisibles: Vec::new(), + }) + .collect() + } else { + let chunks = snapshot.highlighted_chunks(rows.clone(), true, &self.style); + LineWithInvisibles::from_chunks( + chunks, + &self.style.text, + MAX_LINE_LEN, + rows.len(), + line_number_layouts, + snapshot.mode, + cx, + ) + } + } + + #[allow(clippy::too_many_arguments)] + fn build_blocks( + &self, + rows: Range, + snapshot: &EditorSnapshot, + hitbox: &Hitbox, + text_hitbox: &Hitbox, + scroll_width: &mut Pixels, + gutter_dimensions: &GutterDimensions, + em_width: Pixels, + text_x: Pixels, + line_height: Pixels, + line_layouts: &[LineWithInvisibles], + cx: &mut WindowContext, + ) -> Vec { + let mut block_id = 0; + let (fixed_blocks, non_fixed_blocks) = snapshot + .blocks_in_range(rows.clone()) + .partition::, _>(|(_, block)| match block { + TransformBlock::ExcerptHeader { .. } => false, + TransformBlock::Custom(block) => block.style() == BlockStyle::Fixed, + }); + + let render_block = |block: &TransformBlock, + available_space: Size, + block_id: usize, + block_row_start: DisplayRow, + cx: &mut WindowContext| { + let mut element = match block { + TransformBlock::Custom(block) => { + let align_to = block + .position() + .to_point(&snapshot.buffer_snapshot) + .to_display_point(snapshot); + let anchor_x = text_x + + if rows.contains(&align_to.row()) { + line_layouts[align_to.row().minus(rows.start) as usize] + .line + .x_for_index(align_to.column() as usize) + } else { + layout_line(align_to.row(), snapshot, &self.style, cx) + .unwrap() + .x_for_index(align_to.column() as usize) + }; + + block.render(&mut BlockContext { + context: cx, + anchor_x, + gutter_dimensions, + line_height, + em_width, + block_id, + max_width: text_hitbox.size.width.max(*scroll_width), + editor_style: &self.style, + }) + } + + TransformBlock::ExcerptHeader { + buffer, + range, + starts_new_buffer, + height, + id, + .. + } => { + let include_root = self + .editor + .read(cx) + .project + .as_ref() + .map(|project| project.read(cx).visible_worktrees(cx).count() > 1) + .unwrap_or_default(); + + #[derive(Clone)] + struct JumpData { + position: Point, + anchor: text::Anchor, + path: ProjectPath, + line_offset_from_top: u32, + } + + let jump_data = project::File::from_dyn(buffer.file()).map(|file| { + let jump_path = ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }; + let jump_anchor = range + .primary + .as_ref() + .map_or(range.context.start, |primary| primary.start); + + let excerpt_start = range.context.start; + let jump_position = language::ToPoint::to_point(&jump_anchor, buffer); + let offset_from_excerpt_start = if jump_anchor == excerpt_start { + 0 + } else { + let excerpt_start_row = + language::ToPoint::to_point(&jump_anchor, buffer).row; + jump_position.row - excerpt_start_row + }; + + let line_offset_from_top = + block_row_start.0 + *height as u32 + offset_from_excerpt_start + - snapshot + .scroll_anchor + .scroll_position(&snapshot.display_snapshot) + .y as u32; + + JumpData { + position: jump_position, + anchor: jump_anchor, + path: jump_path, + line_offset_from_top, + } + }); + + let element = if *starts_new_buffer { + let path = buffer.resolve_file_path(cx, include_root); + let mut filename = None; + let mut parent_path = None; + // Can't use .and_then() because `.file_name()` and `.parent()` return references :( + if let Some(path) = path { + filename = path.file_name().map(|f| f.to_string_lossy().to_string()); + parent_path = path + .parent() + .map(|p| SharedString::from(p.to_string_lossy().to_string() + "/")); + } + + v_flex() + .id(("path header container", block_id)) + .size_full() + .justify_center() + .p(gpui::px(6.)) + .child( + h_flex() + .id("path header block") + .size_full() + .pl(gpui::px(12.)) + .pr(gpui::px(8.)) + .rounded_md() + .shadow_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_subheader_background) + .justify_between() + .hover(|style| style.bg(cx.theme().colors().element_hover)) + .child( + h_flex().gap_3().child( + h_flex() + .gap_2() + .child( + filename + .map(SharedString::from) + .unwrap_or_else(|| "untitled".into()), + ) + .when_some(parent_path, |then, path| { + then.child( + div().child(path).text_color( + cx.theme().colors().text_muted, + ), + ) + }), + ), + ) + .when_some(jump_data.clone(), |this, jump_data| { + this.cursor_pointer() + .tooltip(|cx| { + Tooltip::for_action( + "Jump to File", + &OpenExcerpts, + cx, + ) + }) + .on_mouse_down(MouseButton::Left, |_, cx| { + cx.stop_propagation() + }) + .on_click(cx.listener_for(&self.editor, { + move |editor, _, cx| { + editor.jump( + jump_data.path.clone(), + jump_data.position, + jump_data.anchor, + jump_data.line_offset_from_top, + cx, + ); + } + })) + }), + ) + } else { + v_flex() + .id(("collapsed context", block_id)) + .size_full() + .child( + div() + .flex() + .v_flex() + .justify_start() + .id("jump to collapsed context") + .w(relative(1.0)) + .h_full() + .child( + div() + .h_px() + .w_full() + .bg(cx.theme().colors().border_variant) + .group_hover("excerpt-jump-action", |style| { + style.bg(cx.theme().colors().border) + }), + ), + ) + .child( + h_flex() + .justify_end() + .flex_none() + .w( + gutter_dimensions.width - (gutter_dimensions.left_padding), // + gutter_dimensions.right_padding) + ) + .h_full() + .child( + ButtonLike::new("expand-icon") + .style(ButtonStyle::Transparent) + .child( + svg() + .path(IconName::ExpandVertical.path()) + .size(IconSize::XSmall.rems()) + .text_color( + cx.theme().colors().editor_line_number, + ) + .group("") + .hover(|style| { + style.text_color( + cx.theme() + .colors() + .editor_active_line_number, + ) + }), + ) + .on_click(cx.listener_for(&self.editor, { + let id = *id; + move |editor, _, cx| { + editor.expand_excerpt(id, cx); + } + })) + .tooltip({ + move |cx| { + Tooltip::for_action( + "Expand Excerpt", + &ExpandExcerpts { lines: 0 }, + cx, + ) + } + }), + ), + ) + .group("excerpt-jump-action") + .cursor_pointer() + .when_some(jump_data.clone(), |this, jump_data| { + this.on_click(cx.listener_for(&self.editor, { + let path = jump_data.path.clone(); + move |editor, _, cx| { + cx.stop_propagation(); + + editor.jump( + path.clone(), + jump_data.position, + jump_data.anchor, + jump_data.line_offset_from_top, + cx, + ); + } + })) + .tooltip(move |cx| { + Tooltip::for_action( + format!( + "Jump to {}:L{}", + jump_data.path.path.display(), + jump_data.position.row + 1 + ), + &OpenExcerpts, + cx, + ) + }) + }) + }; + element.into_any() + } + }; + + let size = element.layout_as_root(available_space, cx); + (element, size) + }; + + let mut fixed_block_max_width = Pixels::ZERO; + let mut blocks = Vec::new(); + for (row, block) in fixed_blocks { + let available_space = size( + AvailableSpace::MinContent, + AvailableSpace::Definite(block.height() as f32 * line_height), + ); + let (element, element_size) = render_block(block, available_space, block_id, row, cx); + block_id += 1; + fixed_block_max_width = fixed_block_max_width.max(element_size.width + em_width); + blocks.push(BlockLayout { + row, + element, + available_space, + style: BlockStyle::Fixed, + }); + } + for (row, block) in non_fixed_blocks { + let style = match block { + TransformBlock::Custom(block) => block.style(), + TransformBlock::ExcerptHeader { .. } => BlockStyle::Sticky, + }; + let width = match style { + BlockStyle::Sticky => hitbox.size.width, + BlockStyle::Flex => hitbox + .size + .width + .max(fixed_block_max_width) + .max(gutter_dimensions.width + *scroll_width), + BlockStyle::Fixed => unreachable!(), + }; + let available_space = size( + AvailableSpace::Definite(width), + AvailableSpace::Definite(block.height() as f32 * line_height), + ); + let (element, _) = render_block(block, available_space, block_id, row, cx); + block_id += 1; + blocks.push(BlockLayout { + row, + element, + available_space, + style, + }); + } + + *scroll_width = (*scroll_width).max(fixed_block_max_width - gutter_dimensions.width); + blocks + } + + fn layout_blocks( + &self, + blocks: &mut Vec, + hitbox: &Hitbox, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + cx: &mut WindowContext, + ) { + for block in blocks { + let mut origin = hitbox.origin + + point( + Pixels::ZERO, + block.row.as_f32() * line_height - scroll_pixel_position.y, + ); + if !matches!(block.style, BlockStyle::Sticky) { + origin += point(-scroll_pixel_position.x, Pixels::ZERO); + } + block + .element + .prepaint_as_root(origin, block.available_space, cx); + } + } + + #[allow(clippy::too_many_arguments)] + fn layout_context_menu( + &self, + line_height: Pixels, + hitbox: &Hitbox, + text_hitbox: &Hitbox, + content_origin: gpui::Point, + start_row: DisplayRow, + scroll_pixel_position: gpui::Point, + line_layouts: &[LineWithInvisibles], + newest_selection_head: DisplayPoint, + gutter_overshoot: Pixels, + cx: &mut WindowContext, + ) -> bool { + let max_height = cmp::min( + 12. * line_height, + cmp::max(3. * line_height, (hitbox.size.height - line_height) / 2.), + ); + let Some((position, mut context_menu)) = self.editor.update(cx, |editor, cx| { + if editor.context_menu_visible() { + editor.render_context_menu(newest_selection_head, &self.style, max_height, cx) + } else { + None + } + }) else { + return false; + }; + + let available_space = size(AvailableSpace::MinContent, AvailableSpace::MinContent); + let context_menu_size = context_menu.layout_as_root(available_space, cx); + + let (x, y) = match position { + crate::ContextMenuOrigin::EditorPoint(point) => { + let cursor_row_layout = &line_layouts[point.row().minus(start_row) as usize].line; + let x = cursor_row_layout.x_for_index(point.column() as usize) + - scroll_pixel_position.x; + let y = point.row().next_row().as_f32() * line_height - scroll_pixel_position.y; + (x, y) + } + crate::ContextMenuOrigin::GutterIndicator(row) => { + // Context menu was spawned via a click on a gutter. Ensure it's a bit closer to the indicator than just a plain first column of the + // text field. + let x = -gutter_overshoot; + let y = row.next_row().as_f32() * line_height - scroll_pixel_position.y; + (x, y) + } + }; + + let mut list_origin = content_origin + point(x, y); + let list_width = context_menu_size.width; + let list_height = context_menu_size.height; + + // Snap the right edge of the list to the right edge of the window if + // its horizontal bounds overflow. + if list_origin.x + list_width > cx.viewport_size().width { + list_origin.x = (cx.viewport_size().width - list_width).max(Pixels::ZERO); + } + + if list_origin.y + list_height > text_hitbox.lower_right().y { + list_origin.y -= line_height + list_height; + } + + cx.defer_draw(context_menu, list_origin, 1); + true + } + + fn layout_mouse_context_menu(&self, cx: &mut WindowContext) -> Option { + let mouse_context_menu = self.editor.read(cx).mouse_context_menu.as_ref()?; + let mut element = deferred( + anchored() + .position(mouse_context_menu.position) + .child(mouse_context_menu.context_menu.clone()) + .anchor(AnchorCorner::TopLeft) + .snap_to_window(), + ) + .with_priority(1) + .into_any(); + + element.prepaint_as_root(gpui::Point::default(), AvailableSpace::min_size(), cx); + Some(element) + } + + #[allow(clippy::too_many_arguments)] + fn layout_hover_popovers( + &self, + snapshot: &EditorSnapshot, + hitbox: &Hitbox, + text_hitbox: &Hitbox, + visible_display_row_range: Range, + content_origin: gpui::Point, + scroll_pixel_position: gpui::Point, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + em_width: Pixels, + cx: &mut WindowContext, + ) { + struct MeasuredHoverPopover { + element: AnyElement, + size: Size, + horizontal_offset: Pixels, + } + + let max_size = size( + (120. * em_width) // Default size + .min(hitbox.size.width / 2.) // Shrink to half of the editor width + .max(MIN_POPOVER_CHARACTER_WIDTH * em_width), // Apply minimum width of 20 characters + (16. * line_height) // Default size + .min(hitbox.size.height / 2.) // Shrink to half of the editor height + .max(MIN_POPOVER_LINE_HEIGHT * line_height), // Apply minimum height of 4 lines + ); + + let hover_popovers = self.editor.update(cx, |editor, cx| { + editor.hover_state.render( + &snapshot, + &self.style, + visible_display_row_range.clone(), + max_size, + editor.workspace.as_ref().map(|(w, _)| w.clone()), + cx, + ) + }); + let Some((position, hover_popovers)) = hover_popovers else { + return; + }; + + let available_space = size(AvailableSpace::MinContent, AvailableSpace::MinContent); + + // This is safe because we check on layout whether the required row is available + let hovered_row_layout = + &line_layouts[position.row().minus(visible_display_row_range.start) as usize].line; + + // Compute Hovered Point + let x = + hovered_row_layout.x_for_index(position.column() as usize) - scroll_pixel_position.x; + let y = position.row().as_f32() * line_height - scroll_pixel_position.y; + let hovered_point = content_origin + point(x, y); + + let mut overall_height = Pixels::ZERO; + let mut measured_hover_popovers = Vec::new(); + for mut hover_popover in hover_popovers { + let size = hover_popover.layout_as_root(available_space, cx); + let horizontal_offset = + (text_hitbox.upper_right().x - (hovered_point.x + size.width)).min(Pixels::ZERO); + + overall_height += HOVER_POPOVER_GAP + size.height; + + measured_hover_popovers.push(MeasuredHoverPopover { + element: hover_popover, + size, + horizontal_offset, + }); + } + overall_height += HOVER_POPOVER_GAP; + + fn draw_occluder(width: Pixels, origin: gpui::Point, cx: &mut WindowContext) { + let mut occlusion = div() + .size_full() + .occlude() + .on_mouse_move(|_, cx| cx.stop_propagation()) + .into_any_element(); + occlusion.layout_as_root(size(width, HOVER_POPOVER_GAP).into(), cx); + cx.defer_draw(occlusion, origin, 2); + } + + if hovered_point.y > overall_height { + // There is enough space above. Render popovers above the hovered point + let mut current_y = hovered_point.y; + for (position, popover) in measured_hover_popovers.into_iter().with_position() { + let size = popover.size; + let popover_origin = point( + hovered_point.x + popover.horizontal_offset, + current_y - size.height, + ); + + cx.defer_draw(popover.element, popover_origin, 2); + if position != itertools::Position::Last { + let origin = point(popover_origin.x, popover_origin.y - HOVER_POPOVER_GAP); + draw_occluder(size.width, origin, cx); + } + + current_y = popover_origin.y - HOVER_POPOVER_GAP; + } + } else { + // There is not enough space above. Render popovers below the hovered point + let mut current_y = hovered_point.y + line_height; + for (position, popover) in measured_hover_popovers.into_iter().with_position() { + let size = popover.size; + let popover_origin = point(hovered_point.x + popover.horizontal_offset, current_y); + + cx.defer_draw(popover.element, popover_origin, 2); + if position != itertools::Position::Last { + let origin = point(popover_origin.x, popover_origin.y + size.height); + draw_occluder(size.width, origin, cx); + } + + current_y = popover_origin.y + size.height + HOVER_POPOVER_GAP; + } + } + } + + fn paint_background(&self, layout: &EditorLayout, cx: &mut WindowContext) { + cx.paint_layer(layout.hitbox.bounds, |cx| { + let scroll_top = layout.position_map.snapshot.scroll_position().y; + let gutter_bg = cx.theme().colors().editor_gutter_background; + cx.paint_quad(fill(layout.gutter_hitbox.bounds, gutter_bg)); + cx.paint_quad(fill(layout.text_hitbox.bounds, self.style.background)); + + if let EditorMode::Full = layout.mode { + let mut active_rows = layout.active_rows.iter().peekable(); + while let Some((start_row, contains_non_empty_selection)) = active_rows.next() { + let mut end_row = start_row.0; + while active_rows + .peek() + .map_or(false, |(active_row, has_selection)| { + active_row.0 == end_row + 1 + && *has_selection == contains_non_empty_selection + }) + { + active_rows.next().unwrap(); + end_row += 1; + } + + if !contains_non_empty_selection { + let highlight_h_range = + match layout.position_map.snapshot.current_line_highlight { + CurrentLineHighlight::Gutter => Some(Range { + start: layout.hitbox.left(), + end: layout.gutter_hitbox.right(), + }), + CurrentLineHighlight::Line => Some(Range { + start: layout.text_hitbox.bounds.left(), + end: layout.text_hitbox.bounds.right(), + }), + CurrentLineHighlight::All => Some(Range { + start: layout.hitbox.left(), + end: layout.hitbox.right(), + }), + CurrentLineHighlight::None => None, + }; + if let Some(range) = highlight_h_range { + let active_line_bg = cx.theme().colors().editor_active_line_background; + let bounds = Bounds { + origin: point( + range.start, + layout.hitbox.origin.y + + (start_row.as_f32() - scroll_top) + * layout.position_map.line_height, + ), + size: size( + range.end - range.start, + layout.position_map.line_height + * (end_row - start_row.0 + 1) as f32, + ), + }; + cx.paint_quad(fill(bounds, active_line_bg)); + } + } + } + + let mut paint_highlight = + |highlight_row_start: DisplayRow, highlight_row_end: DisplayRow, color| { + let origin = point( + layout.hitbox.origin.x, + layout.hitbox.origin.y + + (highlight_row_start.as_f32() - scroll_top) + * layout.position_map.line_height, + ); + let size = size( + layout.hitbox.size.width, + layout.position_map.line_height + * highlight_row_end.next_row().minus(highlight_row_start) as f32, + ); + cx.paint_quad(fill(Bounds { origin, size }, color)); + }; + + let mut current_paint: Option<(Hsla, Range)> = None; + for (&new_row, &new_color) in &layout.highlighted_rows { + match &mut current_paint { + Some((current_color, current_range)) => { + let current_color = *current_color; + let new_range_started = current_color != new_color + || current_range.end.next_row() != new_row; + if new_range_started { + paint_highlight( + current_range.start, + current_range.end, + current_color, + ); + current_paint = Some((new_color, new_row..new_row)); + continue; + } else { + current_range.end = current_range.end.next_row(); + } + } + None => current_paint = Some((new_color, new_row..new_row)), + }; + } + if let Some((color, range)) = current_paint { + paint_highlight(range.start, range.end, color); + } + + let scroll_left = + layout.position_map.snapshot.scroll_position().x * layout.position_map.em_width; + + for (wrap_position, active) in layout.wrap_guides.iter() { + let x = (layout.text_hitbox.origin.x + + *wrap_position + + layout.position_map.em_width / 2.) + - scroll_left; + + let show_scrollbars = layout + .scrollbar_layout + .as_ref() + .map_or(false, |scrollbar| scrollbar.visible); + if x < layout.text_hitbox.origin.x + || (show_scrollbars && x > self.scrollbar_left(&layout.hitbox.bounds)) + { + continue; + } + + let color = if *active { + cx.theme().colors().editor_active_wrap_guide + } else { + cx.theme().colors().editor_wrap_guide + }; + cx.paint_quad(fill( + Bounds { + origin: point(x, layout.text_hitbox.origin.y), + size: size(px(1.), layout.text_hitbox.size.height), + }, + color, + )); + } + } + }) + } + + fn paint_gutter(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + let line_height = layout.position_map.line_height; + + let scroll_position = layout.position_map.snapshot.scroll_position(); + let scroll_top = scroll_position.y * line_height; + + cx.set_cursor_style(CursorStyle::Arrow, &layout.gutter_hitbox); + for (_, hunk_hitbox) in &layout.display_hunks { + if let Some(hunk_hitbox) = hunk_hitbox { + cx.set_cursor_style(CursorStyle::PointingHand, hunk_hitbox); + } + } + + let show_git_gutter = matches!( + ProjectSettings::get_global(cx).git.git_gutter, + Some(GitGutterSetting::TrackedFiles) + ); + if show_git_gutter { + Self::paint_diff_hunks(layout.gutter_hitbox.bounds, layout, cx) + } + + if layout.blamed_display_rows.is_some() { + self.paint_blamed_display_rows(layout, cx); + } + + for (ix, line) in layout.line_numbers.iter().enumerate() { + if let Some(line) = line { + let line_origin = layout.gutter_hitbox.origin + + point( + layout.gutter_hitbox.size.width + - line.width + - layout.gutter_dimensions.right_padding, + ix as f32 * line_height - (scroll_top % line_height), + ); + + line.paint(line_origin, line_height, cx).log_err(); + } + } + + cx.paint_layer(layout.gutter_hitbox.bounds, |cx| { + cx.with_element_namespace("gutter_fold_indicators", |cx| { + for fold_indicator in layout.fold_indicators.iter_mut().flatten() { + fold_indicator.paint(cx); + } + }); + + for test_indicators in layout.test_indicators.iter_mut() { + test_indicators.paint(cx); + } + + if let Some(indicator) = layout.code_actions_indicator.as_mut() { + indicator.paint(cx); + } + }); + } + + fn paint_diff_hunks( + gutter_bounds: Bounds, + layout: &EditorLayout, + cx: &mut WindowContext, + ) { + if layout.display_hunks.is_empty() { + return; + } + + let line_height = layout.position_map.line_height; + cx.paint_layer(layout.gutter_hitbox.bounds, |cx| { + for (hunk, hitbox) in &layout.display_hunks { + let hunk_to_paint = match hunk { + DisplayDiffHunk::Folded { .. } => { + let hunk_bounds = Self::diff_hunk_bounds( + &layout.position_map.snapshot, + line_height, + gutter_bounds, + &hunk, + ); + Some(( + hunk_bounds, + cx.theme().status().modified, + Corners::all(1. * line_height), + )) + } + DisplayDiffHunk::Unfolded { status, .. } => { + hitbox.as_ref().map(|hunk_hitbox| match status { + DiffHunkStatus::Added => ( + hunk_hitbox.bounds, + cx.theme().status().created, + Corners::all(0.05 * line_height), + ), + DiffHunkStatus::Modified => ( + hunk_hitbox.bounds, + cx.theme().status().modified, + Corners::all(0.05 * line_height), + ), + DiffHunkStatus::Removed => ( + hunk_hitbox.bounds, + cx.theme().status().deleted, + Corners::all(1. * line_height), + ), + }) + } + }; + + if let Some((hunk_bounds, background_color, corner_radii)) = hunk_to_paint { + cx.paint_quad(quad( + hunk_bounds, + corner_radii, + background_color, + Edges::default(), + transparent_black(), + )); + } + } + }); + } + + fn diff_hunk_bounds( + snapshot: &EditorSnapshot, + line_height: Pixels, + bounds: Bounds, + hunk: &DisplayDiffHunk, + ) -> Bounds { + let scroll_position = snapshot.scroll_position(); + let scroll_top = scroll_position.y * line_height; + + match hunk { + DisplayDiffHunk::Folded { display_row, .. } => { + let start_y = display_row.as_f32() * line_height - scroll_top; + let end_y = start_y + line_height; + + let width = 0.275 * line_height; + let highlight_origin = bounds.origin + point(-width, start_y); + let highlight_size = size(width * 2., end_y - start_y); + Bounds::new(highlight_origin, highlight_size) + } + DisplayDiffHunk::Unfolded { + display_row_range, + status, + .. + } => match status { + DiffHunkStatus::Added | DiffHunkStatus::Modified => { + let start_row = display_row_range.start; + let end_row = display_row_range.end; + // If we're in a multibuffer, row range span might include an + // excerpt header, so if we were to draw the marker straight away, + // the hunk might include the rows of that header. + // Making the range inclusive doesn't quite cut it, as we rely on the exclusivity for the soft wrap. + // Instead, we simply check whether the range we're dealing with includes + // any excerpt headers and if so, we stop painting the diff hunk on the first row of that header. + let end_row_in_current_excerpt = snapshot + .blocks_in_range(start_row..end_row) + .find_map(|(start_row, block)| { + if matches!(block, TransformBlock::ExcerptHeader { .. }) { + Some(start_row) + } else { + None + } + }) + .unwrap_or(end_row); + + let start_y = start_row.as_f32() * line_height - scroll_top; + let end_y = end_row_in_current_excerpt.as_f32() * line_height - scroll_top; + + let width = 0.275 * line_height; + let highlight_origin = bounds.origin + point(-width, start_y); + let highlight_size = size(width * 2., end_y - start_y); + Bounds::new(highlight_origin, highlight_size) + } + DiffHunkStatus::Removed => { + let row = display_row_range.start; + + let offset = line_height / 2.; + let start_y = row.as_f32() * line_height - offset - scroll_top; + let end_y = start_y + line_height; + + let width = 0.35 * line_height; + let highlight_origin = bounds.origin + point(-width, start_y); + let highlight_size = size(width * 2., end_y - start_y); + Bounds::new(highlight_origin, highlight_size) + } + }, + } + } + + fn paint_blamed_display_rows(&self, layout: &mut EditorLayout, cx: &mut WindowContext) { + let Some(blamed_display_rows) = layout.blamed_display_rows.take() else { + return; + }; + + cx.paint_layer(layout.gutter_hitbox.bounds, |cx| { + for mut blame_element in blamed_display_rows.into_iter() { + blame_element.paint(cx); + } + }) + } + + fn paint_text(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + cx.with_content_mask( + Some(ContentMask { + bounds: layout.text_hitbox.bounds, + }), + |cx| { + let cursor_style = if self + .editor + .read(cx) + .hovered_link_state + .as_ref() + .is_some_and(|hovered_link_state| !hovered_link_state.links.is_empty()) + { + CursorStyle::PointingHand + } else { + CursorStyle::IBeam + }; + cx.set_cursor_style(cursor_style, &layout.text_hitbox); + + cx.with_element_namespace("folds", |cx| self.paint_folds(layout, cx)); + let invisible_display_ranges = self.paint_highlights(layout, cx); + self.paint_lines(&invisible_display_ranges, layout, cx); + self.paint_redactions(layout, cx); + self.paint_cursors(layout, cx); + self.paint_inline_blame(layout, cx); + }, + ) + } + + fn paint_highlights( + &mut self, + layout: &mut EditorLayout, + cx: &mut WindowContext, + ) -> SmallVec<[Range; 32]> { + cx.paint_layer(layout.text_hitbox.bounds, |cx| { + let mut invisible_display_ranges = SmallVec::<[Range; 32]>::new(); + let line_end_overshoot = 0.15 * layout.position_map.line_height; + for (range, color) in &layout.highlighted_ranges { + self.paint_highlighted_range( + range.clone(), + *color, + Pixels::ZERO, + line_end_overshoot, + layout, + cx, + ); + } + + let corner_radius = 0.15 * layout.position_map.line_height; + + for (player_color, selections) in &layout.selections { + for selection in selections.into_iter() { + self.paint_highlighted_range( + selection.range.clone(), + player_color.selection, + corner_radius, + corner_radius * 2., + layout, + cx, + ); + + if selection.is_local && !selection.range.is_empty() { + invisible_display_ranges.push(selection.range.clone()); + } + } + } + invisible_display_ranges + }) + } + + fn paint_lines( + &mut self, + invisible_display_ranges: &[Range], + layout: &EditorLayout, + cx: &mut WindowContext, + ) { + let whitespace_setting = self + .editor + .read(cx) + .buffer + .read(cx) + .settings_at(0, cx) + .show_whitespaces; + + for (ix, line_with_invisibles) in layout.position_map.line_layouts.iter().enumerate() { + let row = DisplayRow(layout.visible_display_row_range.start.0 + ix as u32); + line_with_invisibles.draw( + layout, + row, + layout.content_origin, + whitespace_setting, + invisible_display_ranges, + cx, + ) + } + } + + fn paint_redactions(&mut self, layout: &EditorLayout, cx: &mut WindowContext) { + if layout.redacted_ranges.is_empty() { + return; + } + + let line_end_overshoot = layout.line_end_overshoot(); + + // A softer than perfect black + let redaction_color = gpui::rgb(0x0e1111); + + cx.paint_layer(layout.text_hitbox.bounds, |cx| { + for range in layout.redacted_ranges.iter() { + self.paint_highlighted_range( + range.clone(), + redaction_color.into(), + Pixels::ZERO, + line_end_overshoot, + layout, + cx, + ); + } + }); + } + + fn paint_cursors(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + for cursor in &mut layout.visible_cursors { + cursor.paint(layout.content_origin, cx); + } + } + + fn paint_scrollbar(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + let Some(scrollbar_layout) = layout.scrollbar_layout.as_ref() else { + return; + }; + + let thumb_bounds = scrollbar_layout.thumb_bounds(); + if scrollbar_layout.visible { + cx.paint_layer(scrollbar_layout.hitbox.bounds, |cx| { + cx.paint_quad(quad( + scrollbar_layout.hitbox.bounds, + Corners::default(), + cx.theme().colors().scrollbar_track_background, + Edges { + top: Pixels::ZERO, + right: Pixels::ZERO, + bottom: Pixels::ZERO, + left: ScrollbarLayout::BORDER_WIDTH, + }, + cx.theme().colors().scrollbar_track_border, + )); + + let fast_markers = + self.collect_fast_scrollbar_markers(layout, scrollbar_layout, cx); + // Refresh slow scrollbar markers in the background. Below, we paint whatever markers have already been computed. + self.refresh_slow_scrollbar_markers(layout, scrollbar_layout, cx); + + let markers = self.editor.read(cx).scrollbar_marker_state.markers.clone(); + for marker in markers.iter().chain(&fast_markers) { + let mut marker = marker.clone(); + marker.bounds.origin += scrollbar_layout.hitbox.origin; + cx.paint_quad(marker); + } + + cx.paint_quad(quad( + thumb_bounds, + Corners::default(), + cx.theme().colors().scrollbar_thumb_background, + Edges { + top: Pixels::ZERO, + right: Pixels::ZERO, + bottom: Pixels::ZERO, + left: ScrollbarLayout::BORDER_WIDTH, + }, + cx.theme().colors().scrollbar_thumb_border, + )); + }); + } + + cx.set_cursor_style(CursorStyle::Arrow, &scrollbar_layout.hitbox); + + let row_height = scrollbar_layout.row_height; + let row_range = scrollbar_layout.visible_row_range.clone(); + + cx.on_mouse_event({ + let editor = self.editor.clone(); + let hitbox = scrollbar_layout.hitbox.clone(); + let mut mouse_position = cx.mouse_position(); + move |event: &MouseMoveEvent, phase, cx| { + if phase == DispatchPhase::Capture { + return; + } + + editor.update(cx, |editor, cx| { + if event.pressed_button == Some(MouseButton::Left) + && editor.scroll_manager.is_dragging_scrollbar() + { + let y = mouse_position.y; + let new_y = event.position.y; + if (hitbox.top()..hitbox.bottom()).contains(&y) { + let mut position = editor.scroll_position(cx); + position.y += (new_y - y) / row_height; + if position.y < 0.0 { + position.y = 0.0; + } + editor.set_scroll_position(position, cx); + } + + cx.stop_propagation(); + } else { + editor.scroll_manager.set_is_dragging_scrollbar(false, cx); + if hitbox.is_hovered(cx) { + editor.scroll_manager.show_scrollbar(cx); + } + } + mouse_position = event.position; + }) + } + }); + + if self.editor.read(cx).scroll_manager.is_dragging_scrollbar() { + cx.on_mouse_event({ + let editor = self.editor.clone(); + move |_: &MouseUpEvent, phase, cx| { + if phase == DispatchPhase::Capture { + return; + } + + editor.update(cx, |editor, cx| { + editor.scroll_manager.set_is_dragging_scrollbar(false, cx); + cx.stop_propagation(); + }); + } + }); + } else { + cx.on_mouse_event({ + let editor = self.editor.clone(); + let hitbox = scrollbar_layout.hitbox.clone(); + move |event: &MouseDownEvent, phase, cx| { + if phase == DispatchPhase::Capture || !hitbox.is_hovered(cx) { + return; + } + + editor.update(cx, |editor, cx| { + editor.scroll_manager.set_is_dragging_scrollbar(true, cx); + + let y = event.position.y; + if y < thumb_bounds.top() || thumb_bounds.bottom() < y { + let center_row = ((y - hitbox.top()) / row_height).round() as u32; + let top_row = center_row + .saturating_sub((row_range.end - row_range.start) as u32 / 2); + let mut position = editor.scroll_position(cx); + position.y = top_row as f32; + editor.set_scroll_position(position, cx); + } else { + editor.scroll_manager.show_scrollbar(cx); + } + + cx.stop_propagation(); + }); + } + }); + } + } + + fn collect_fast_scrollbar_markers( + &self, + layout: &EditorLayout, + scrollbar_layout: &ScrollbarLayout, + cx: &mut WindowContext, + ) -> Vec { + const LIMIT: usize = 100; + if !EditorSettings::get_global(cx).scrollbar.cursors || layout.cursors.len() > LIMIT { + return vec![]; + } + let cursor_ranges = layout + .cursors + .iter() + .map(|(point, color)| ColoredRange { + start: point.row(), + end: point.row(), + color: *color, + }) + .collect_vec(); + scrollbar_layout.marker_quads_for_ranges(cursor_ranges, None) + } + + fn refresh_slow_scrollbar_markers( + &self, + layout: &EditorLayout, + scrollbar_layout: &ScrollbarLayout, + cx: &mut WindowContext, + ) { + self.editor.update(cx, |editor, cx| { + if !editor.is_singleton(cx) + || !editor + .scrollbar_marker_state + .should_refresh(scrollbar_layout.hitbox.size) + { + return; + } + + let scrollbar_layout = scrollbar_layout.clone(); + let background_highlights = editor.background_highlights.clone(); + let snapshot = layout.position_map.snapshot.clone(); + let theme = cx.theme().clone(); + let scrollbar_settings = EditorSettings::get_global(cx).scrollbar; + + editor.scrollbar_marker_state.dirty = false; + editor.scrollbar_marker_state.pending_refresh = + Some(cx.spawn(|editor, mut cx| async move { + let scrollbar_size = scrollbar_layout.hitbox.size; + let scrollbar_markers = cx + .background_executor() + .spawn(async move { + let max_point = snapshot.display_snapshot.buffer_snapshot.max_point(); + let mut marker_quads = Vec::new(); + if scrollbar_settings.git_diff { + let marker_row_ranges = snapshot + .buffer_snapshot + .git_diff_hunks_in_range( + MultiBufferRow::MIN..MultiBufferRow::MAX, + ) + .map(|hunk| { + let start_display_row = + MultiBufferPoint::new(hunk.associated_range.start.0, 0) + .to_display_point(&snapshot.display_snapshot) + .row(); + let mut end_display_row = + MultiBufferPoint::new(hunk.associated_range.end.0, 0) + .to_display_point(&snapshot.display_snapshot) + .row(); + if end_display_row != start_display_row { + end_display_row.0 -= 1; + } + let color = match hunk_status(&hunk) { + DiffHunkStatus::Added => theme.status().created, + DiffHunkStatus::Modified => theme.status().modified, + DiffHunkStatus::Removed => theme.status().deleted, + }; + ColoredRange { + start: start_display_row, + end: end_display_row, + color, + } + }); + + marker_quads.extend( + scrollbar_layout + .marker_quads_for_ranges(marker_row_ranges, Some(0)), + ); + } + + for (background_highlight_id, (_, background_ranges)) in + background_highlights.iter() + { + let is_search_highlights = *background_highlight_id + == TypeId::of::(); + let is_symbol_occurrences = *background_highlight_id + == TypeId::of::() + || *background_highlight_id + == TypeId::of::(); + if (is_search_highlights && scrollbar_settings.search_results) + || (is_symbol_occurrences && scrollbar_settings.selected_symbol) + { + let mut color = theme.status().info; + if is_symbol_occurrences { + color.fade_out(0.5); + } + let marker_row_ranges = + background_ranges.into_iter().map(|range| { + let display_start = range + .start + .to_display_point(&snapshot.display_snapshot); + let display_end = range + .end + .to_display_point(&snapshot.display_snapshot); + ColoredRange { + start: display_start.row(), + end: display_end.row(), + color, + } + }); + marker_quads.extend( + scrollbar_layout + .marker_quads_for_ranges(marker_row_ranges, Some(1)), + ); + } + } + + if scrollbar_settings.diagnostics { + let diagnostics = snapshot + .buffer_snapshot + .diagnostics_in_range::<_, Point>( + Point::zero()..max_point, + false, + ) + // We want to sort by severity, in order to paint the most severe diagnostics last. + .sorted_by_key(|diagnostic| { + std::cmp::Reverse(diagnostic.diagnostic.severity) + }); + + let marker_row_ranges = diagnostics.into_iter().map(|diagnostic| { + let start_display = diagnostic + .range + .start + .to_display_point(&snapshot.display_snapshot); + let end_display = diagnostic + .range + .end + .to_display_point(&snapshot.display_snapshot); + let color = match diagnostic.diagnostic.severity { + DiagnosticSeverity::ERROR => theme.status().error, + DiagnosticSeverity::WARNING => theme.status().warning, + DiagnosticSeverity::INFORMATION => theme.status().info, + _ => theme.status().hint, + }; + ColoredRange { + start: start_display.row(), + end: end_display.row(), + color, + } + }); + marker_quads.extend( + scrollbar_layout + .marker_quads_for_ranges(marker_row_ranges, Some(2)), + ); + } + + Arc::from(marker_quads) + }) + .await; + + editor.update(&mut cx, |editor, cx| { + editor.scrollbar_marker_state.markers = scrollbar_markers; + editor.scrollbar_marker_state.scrollbar_size = scrollbar_size; + editor.scrollbar_marker_state.pending_refresh = None; + cx.notify(); + })?; + + Ok(()) + })); + }); + } + + #[allow(clippy::too_many_arguments)] + fn paint_highlighted_range( + &self, + range: Range, + color: Hsla, + corner_radius: Pixels, + line_end_overshoot: Pixels, + layout: &EditorLayout, + cx: &mut WindowContext, + ) { + let start_row = layout.visible_display_row_range.start; + let end_row = layout.visible_display_row_range.end; + if range.start != range.end { + let row_range = if range.end.column() == 0 { + cmp::max(range.start.row(), start_row)..cmp::min(range.end.row(), end_row) + } else { + cmp::max(range.start.row(), start_row) + ..cmp::min(range.end.row().next_row(), end_row) + }; + + let highlighted_range = HighlightedRange { + color, + line_height: layout.position_map.line_height, + corner_radius, + start_y: layout.content_origin.y + + row_range.start.as_f32() * layout.position_map.line_height + - layout.position_map.scroll_pixel_position.y, + lines: row_range + .iter_rows() + .map(|row| { + let line_layout = + &layout.position_map.line_layouts[row.minus(start_row) as usize].line; + HighlightedRangeLine { + start_x: if row == range.start.row() { + layout.content_origin.x + + line_layout.x_for_index(range.start.column() as usize) + - layout.position_map.scroll_pixel_position.x + } else { + layout.content_origin.x + - layout.position_map.scroll_pixel_position.x + }, + end_x: if row == range.end.row() { + layout.content_origin.x + + line_layout.x_for_index(range.end.column() as usize) + - layout.position_map.scroll_pixel_position.x + } else { + layout.content_origin.x + line_layout.width + line_end_overshoot + - layout.position_map.scroll_pixel_position.x + }, + } + }) + .collect(), + }; + + highlighted_range.paint(layout.text_hitbox.bounds, cx); + } + } + + fn paint_folds(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + if layout.folds.is_empty() { + return; + } + + cx.paint_layer(layout.text_hitbox.bounds, |cx| { + let fold_corner_radius = 0.15 * layout.position_map.line_height; + for mut fold in mem::take(&mut layout.folds) { + fold.hover_element.paint(cx); + + let hover_element = fold.hover_element.downcast_mut::>().unwrap(); + let fold_background = if hover_element.interactivity().active.unwrap() { + cx.theme().colors().ghost_element_active + } else if hover_element.interactivity().hovered.unwrap() { + cx.theme().colors().ghost_element_hover + } else { + cx.theme().colors().ghost_element_background + }; + + self.paint_highlighted_range( + fold.display_range.clone(), + fold_background, + fold_corner_radius, + fold_corner_radius * 2., + layout, + cx, + ); + } + }) + } + + fn paint_inline_blame(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + if let Some(mut inline_blame) = layout.inline_blame.take() { + cx.paint_layer(layout.text_hitbox.bounds, |cx| { + inline_blame.paint(cx); + }) + } + } + + fn paint_blocks(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + for mut block in layout.blocks.drain(..) { + block.element.paint(cx); + } + } + + fn paint_mouse_context_menu(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) { + if let Some(mouse_context_menu) = layout.mouse_context_menu.as_mut() { + mouse_context_menu.paint(cx); + } + } + + fn paint_scroll_wheel_listener(&mut self, layout: &EditorLayout, cx: &mut WindowContext) { + cx.on_mouse_event({ + let position_map = layout.position_map.clone(); + let editor = self.editor.clone(); + let hitbox = layout.hitbox.clone(); + let mut delta = ScrollDelta::default(); + + // Set a minimum scroll_sensitivity of 0.01 to make sure the user doesn't + // accidentally turn off their scrolling. + let scroll_sensitivity = EditorSettings::get_global(cx).scroll_sensitivity.max(0.01); + + move |event: &ScrollWheelEvent, phase, cx| { + if phase == DispatchPhase::Bubble && hitbox.is_hovered(cx) { + delta = delta.coalesce(event.delta); + editor.update(cx, |editor, cx| { + let position_map: &PositionMap = &position_map; + + let line_height = position_map.line_height; + let max_glyph_width = position_map.em_width; + let (delta, axis) = match delta { + gpui::ScrollDelta::Pixels(mut pixels) => { + //Trackpad + let axis = position_map.snapshot.ongoing_scroll.filter(&mut pixels); + (pixels, axis) + } + + gpui::ScrollDelta::Lines(lines) => { + //Not trackpad + let pixels = + point(lines.x * max_glyph_width, lines.y * line_height); + (pixels, None) + } + }; + + let current_scroll_position = position_map.snapshot.scroll_position(); + let x = (current_scroll_position.x * max_glyph_width + - (delta.x * scroll_sensitivity)) + / max_glyph_width; + let y = (current_scroll_position.y * line_height + - (delta.y * scroll_sensitivity)) + / line_height; + let mut scroll_position = + point(x, y).clamp(&point(0., 0.), &position_map.scroll_max); + let forbid_vertical_scroll = editor.scroll_manager.forbid_vertical_scroll(); + if forbid_vertical_scroll { + scroll_position.y = current_scroll_position.y; + if scroll_position == current_scroll_position { + return; + } + } + editor.scroll(scroll_position, axis, cx); + cx.stop_propagation(); + }); + } + } + }); + } + + fn paint_mouse_listeners( + &mut self, + layout: &EditorLayout, + hovered_hunk: Option, + cx: &mut WindowContext, + ) { + self.paint_scroll_wheel_listener(layout, cx); + + cx.on_mouse_event({ + let position_map = layout.position_map.clone(); + let editor = self.editor.clone(); + let text_hitbox = layout.text_hitbox.clone(); + let gutter_hitbox = layout.gutter_hitbox.clone(); + + move |event: &MouseDownEvent, phase, cx| { + if phase == DispatchPhase::Bubble { + match event.button { + MouseButton::Left => editor.update(cx, |editor, cx| { + Self::mouse_left_down( + editor, + event, + hovered_hunk.as_ref(), + &position_map, + &text_hitbox, + &gutter_hitbox, + cx, + ); + }), + MouseButton::Right => editor.update(cx, |editor, cx| { + Self::mouse_right_down(editor, event, &position_map, &text_hitbox, cx); + }), + MouseButton::Middle => editor.update(cx, |editor, cx| { + Self::mouse_middle_down(editor, event, &position_map, &text_hitbox, cx); + }), + _ => {} + }; + } + } + }); + + cx.on_mouse_event({ + let editor = self.editor.clone(); + let position_map = layout.position_map.clone(); + let text_hitbox = layout.text_hitbox.clone(); + + move |event: &MouseUpEvent, phase, cx| { + if phase == DispatchPhase::Bubble { + editor.update(cx, |editor, cx| { + Self::mouse_up(editor, event, &position_map, &text_hitbox, cx) + }); + } + } + }); + cx.on_mouse_event({ + let position_map = layout.position_map.clone(); + let editor = self.editor.clone(); + let text_hitbox = layout.text_hitbox.clone(); + let gutter_hitbox = layout.gutter_hitbox.clone(); + + move |event: &MouseMoveEvent, phase, cx| { + if phase == DispatchPhase::Bubble { + editor.update(cx, |editor, cx| { + if event.pressed_button == Some(MouseButton::Left) + || event.pressed_button == Some(MouseButton::Middle) + { + Self::mouse_dragged( + editor, + event, + &position_map, + text_hitbox.bounds, + cx, + ) + } + + Self::mouse_moved( + editor, + event, + &position_map, + &text_hitbox, + &gutter_hitbox, + cx, + ) + }); + } + } + }); + } + + fn scrollbar_left(&self, bounds: &Bounds) -> Pixels { + bounds.upper_right().x - self.style.scrollbar_width + } + + fn column_pixels(&self, column: usize, cx: &WindowContext) -> Pixels { + let style = &self.style; + let font_size = style.text.font_size.to_pixels(cx.rem_size()); + let layout = cx + .text_system() + .shape_line( + SharedString::from(" ".repeat(column)), + font_size, + &[TextRun { + len: column, + font: style.text.font(), + color: Hsla::default(), + background_color: None, + underline: None, + strikethrough: None, + }], + ) + .unwrap(); + + layout.width + } + + fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &WindowContext) -> Pixels { + let digit_count = snapshot + .max_buffer_row() + .next_row() + .as_f32() + .log10() + .floor() as usize + + 1; + self.column_pixels(digit_count, cx) + } +} + +fn prepaint_gutter_button( + button: IconButton, + row: DisplayRow, + line_height: Pixels, + gutter_dimensions: &GutterDimensions, + scroll_pixel_position: gpui::Point, + gutter_hitbox: &Hitbox, + cx: &mut WindowContext<'_>, +) -> AnyElement { + let mut button = button.into_any_element(); + let available_space = size( + AvailableSpace::MinContent, + AvailableSpace::Definite(line_height), + ); + let indicator_size = button.layout_as_root(available_space, cx); + + let blame_width = gutter_dimensions + .git_blame_entries_width + .unwrap_or(Pixels::ZERO); + + let mut x = blame_width; + let available_width = gutter_dimensions.margin + gutter_dimensions.left_padding + - indicator_size.width + - blame_width; + x += available_width / 2.; + + let mut y = row.as_f32() * line_height - scroll_pixel_position.y; + y += (line_height - indicator_size.height) / 2.; + + button.prepaint_as_root(gutter_hitbox.origin + point(x, y), available_space, cx); + button +} + +fn render_inline_blame_entry( + blame: &gpui::Model, + blame_entry: BlameEntry, + style: &EditorStyle, + workspace: Option>, + cx: &mut WindowContext<'_>, +) -> AnyElement { + let relative_timestamp = blame_entry_relative_timestamp(&blame_entry, cx); + + let author = blame_entry.author.as_deref().unwrap_or_default(); + let text = format!("{}, {}", author, relative_timestamp); + + let details = blame.read(cx).details_for_entry(&blame_entry); + + let tooltip = cx.new_view(|_| BlameEntryTooltip::new(blame_entry, details, style, workspace)); + + h_flex() + .id("inline-blame") + .w_full() + .font_family(style.text.font().family) + .text_color(cx.theme().status().hint) + .line_height(style.text.line_height) + .child(Icon::new(IconName::FileGit).color(Color::Hint)) + .child(text) + .gap_2() + .hoverable_tooltip(move |_| tooltip.clone().into()) + .into_any() +} + +fn render_blame_entry( + ix: usize, + blame: &gpui::Model, + blame_entry: BlameEntry, + style: &EditorStyle, + last_used_color: &mut Option<(PlayerColor, Oid)>, + editor: View, + cx: &mut WindowContext<'_>, +) -> AnyElement { + let mut sha_color = cx + .theme() + .players() + .color_for_participant(blame_entry.sha.into()); + // If the last color we used is the same as the one we get for this line, but + // the commit SHAs are different, then we try again to get a different color. + match *last_used_color { + Some((color, sha)) if sha != blame_entry.sha && color.cursor == sha_color.cursor => { + let index: u32 = blame_entry.sha.into(); + sha_color = cx.theme().players().color_for_participant(index + 1); + } + _ => {} + }; + last_used_color.replace((sha_color, blame_entry.sha)); + + let relative_timestamp = blame_entry_relative_timestamp(&blame_entry, cx); + + let short_commit_id = blame_entry.sha.display_short(); + + let author_name = blame_entry.author.as_deref().unwrap_or(""); + let name = util::truncate_and_trailoff(author_name, 20); + + let details = blame.read(cx).details_for_entry(&blame_entry); + + let workspace = editor.read(cx).workspace.as_ref().map(|(w, _)| w.clone()); + + let tooltip = cx.new_view(|_| { + BlameEntryTooltip::new(blame_entry.clone(), details.clone(), style, workspace) + }); + + h_flex() + .w_full() + .font_family(style.text.font().family) + .line_height(style.text.line_height) + .id(("blame", ix)) + .children([ + div() + .text_color(sha_color.cursor) + .child(short_commit_id) + .mr_2(), + div() + .w_full() + .h_flex() + .justify_between() + .text_color(cx.theme().status().hint) + .child(name) + .child(relative_timestamp), + ]) + .on_mouse_down(MouseButton::Right, { + let blame_entry = blame_entry.clone(); + let details = details.clone(); + move |event, cx| { + deploy_blame_entry_context_menu( + &blame_entry, + details.as_ref(), + editor.clone(), + event.position, + cx, + ); + } + }) + .hover(|style| style.bg(cx.theme().colors().element_hover)) + .when_some( + details.and_then(|details| details.permalink), + |this, url| { + let url = url.clone(); + this.cursor_pointer().on_click(move |_, cx| { + cx.stop_propagation(); + cx.open_url(url.as_str()) + }) + }, + ) + .hoverable_tooltip(move |_| tooltip.clone().into()) + .into_any() +} + +fn deploy_blame_entry_context_menu( + blame_entry: &BlameEntry, + details: Option<&CommitDetails>, + editor: View, + position: gpui::Point, + cx: &mut WindowContext<'_>, +) { + let context_menu = ContextMenu::build(cx, move |this, _| { + let sha = format!("{}", blame_entry.sha); + this.entry("Copy commit SHA", None, move |cx| { + cx.write_to_clipboard(ClipboardItem::new(sha.clone())); + }) + .when_some( + details.and_then(|details| details.permalink.clone()), + |this, url| this.entry("Open permalink", None, move |cx| cx.open_url(url.as_str())), + ) + }); + + editor.update(cx, move |editor, cx| { + editor.mouse_context_menu = Some(MouseContextMenu::new(position, context_menu, cx)); + cx.notify(); + }); +} + +#[derive(Debug)] +pub(crate) struct LineWithInvisibles { + pub line: ShapedLine, + invisibles: Vec, +} + +impl LineWithInvisibles { + fn from_chunks<'a>( + chunks: impl Iterator>, + text_style: &TextStyle, + max_line_len: usize, + max_line_count: usize, + line_number_layouts: &[Option], + editor_mode: EditorMode, + cx: &WindowContext, + ) -> Vec { + let mut layouts = Vec::with_capacity(max_line_count); + let mut line = String::new(); + let mut invisibles = Vec::new(); + let mut styles = Vec::new(); + let mut non_whitespace_added = false; + let mut row = 0; + let mut line_exceeded_max_len = false; + let font_size = text_style.font_size.to_pixels(cx.rem_size()); + + for highlighted_chunk in chunks.chain([HighlightedChunk { + chunk: "\n", + style: None, + is_tab: false, + }]) { + for (ix, mut line_chunk) in highlighted_chunk.chunk.split('\n').enumerate() { + if ix > 0 { + let shaped_line = cx + .text_system() + .shape_line(line.clone().into(), font_size, &styles) + .unwrap(); + layouts.push(Self { + line: shaped_line, + invisibles: std::mem::take(&mut invisibles), + }); + + line.clear(); + styles.clear(); + row += 1; + line_exceeded_max_len = false; + non_whitespace_added = false; + if row == max_line_count { + return layouts; + } + } + + if !line_chunk.is_empty() && !line_exceeded_max_len { + let text_style = if let Some(style) = highlighted_chunk.style { + Cow::Owned(text_style.clone().highlight(style)) + } else { + Cow::Borrowed(text_style) + }; + + if line.len() + line_chunk.len() > max_line_len { + let mut chunk_len = max_line_len - line.len(); + while !line_chunk.is_char_boundary(chunk_len) { + chunk_len -= 1; + } + line_chunk = &line_chunk[..chunk_len]; + line_exceeded_max_len = true; + } + + styles.push(TextRun { + len: line_chunk.len(), + font: text_style.font(), + color: text_style.color, + background_color: text_style.background_color, + underline: text_style.underline, + strikethrough: text_style.strikethrough, + }); + + if editor_mode == EditorMode::Full { + // Line wrap pads its contents with fake whitespaces, + // avoid printing them + let inside_wrapped_string = line_number_layouts + .get(row) + .and_then(|layout| layout.as_ref()) + .is_none(); + if highlighted_chunk.is_tab { + if non_whitespace_added || !inside_wrapped_string { + invisibles.push(Invisible::Tab { + line_start_offset: line.len(), + }); + } + } else { + invisibles.extend( + line_chunk + .chars() + .enumerate() + .filter(|(_, line_char)| { + let is_whitespace = line_char.is_whitespace(); + non_whitespace_added |= !is_whitespace; + is_whitespace + && (non_whitespace_added || !inside_wrapped_string) + }) + .map(|(whitespace_index, _)| Invisible::Whitespace { + line_offset: line.len() + whitespace_index, + }), + ) + } + } + + line.push_str(line_chunk); + } + } + } + + layouts + } + + fn draw( + &self, + layout: &EditorLayout, + row: DisplayRow, + content_origin: gpui::Point, + whitespace_setting: ShowWhitespaceSetting, + selection_ranges: &[Range], + cx: &mut WindowContext, + ) { + let line_height = layout.position_map.line_height; + let line_y = line_height + * (row.as_f32() - layout.position_map.scroll_pixel_position.y / line_height); + + let line_origin = + content_origin + gpui::point(-layout.position_map.scroll_pixel_position.x, line_y); + self.line.paint(line_origin, line_height, cx).log_err(); + + self.draw_invisibles( + &selection_ranges, + layout, + content_origin, + line_y, + row, + line_height, + whitespace_setting, + cx, + ); + } + + #[allow(clippy::too_many_arguments)] + fn draw_invisibles( + &self, + selection_ranges: &[Range], + layout: &EditorLayout, + content_origin: gpui::Point, + line_y: Pixels, + row: DisplayRow, + line_height: Pixels, + whitespace_setting: ShowWhitespaceSetting, + cx: &mut WindowContext, + ) { + let allowed_invisibles_regions = match whitespace_setting { + ShowWhitespaceSetting::None => return, + ShowWhitespaceSetting::Selection => Some(selection_ranges), + ShowWhitespaceSetting::All => None, + }; + + for invisible in &self.invisibles { + let (&token_offset, invisible_symbol) = match invisible { + Invisible::Tab { line_start_offset } => (line_start_offset, &layout.tab_invisible), + Invisible::Whitespace { line_offset } => (line_offset, &layout.space_invisible), + }; + + let x_offset = self.line.x_for_index(token_offset); + let invisible_offset = + (layout.position_map.em_width - invisible_symbol.width).max(Pixels::ZERO) / 2.0; + let origin = content_origin + + gpui::point( + x_offset + invisible_offset - layout.position_map.scroll_pixel_position.x, + line_y, + ); + + if let Some(allowed_regions) = allowed_invisibles_regions { + let invisible_point = DisplayPoint::new(row, token_offset as u32); + if !allowed_regions + .iter() + .any(|region| region.start <= invisible_point && invisible_point < region.end) + { + continue; + } + } + invisible_symbol.paint(origin, line_height, cx).log_err(); + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum Invisible { + Tab { line_start_offset: usize }, + Whitespace { line_offset: usize }, +} + +impl EditorElement { + /// Returns the rem size to use when rendering the [`EditorElement`]. + /// + /// This allows UI elements to scale based on the `buffer_font_size`. + fn rem_size(&self, cx: &WindowContext) -> Option { + match self.editor.read(cx).mode { + EditorMode::Full => { + let buffer_font_size = self.style.text.font_size; + match buffer_font_size { + AbsoluteLength::Pixels(pixels) => { + let rem_size_scale = { + // Our default UI font size is 14px on a 16px base scale. + // This means the default UI font size is 0.875rems. + let default_font_size_scale = 14. / ui::BASE_REM_SIZE_IN_PX; + + // We then determine the delta between a single rem and the default font + // size scale. + let default_font_size_delta = 1. - default_font_size_scale; + + // Finally, we add this delta to 1rem to get the scale factor that + // should be used to scale up the UI. + 1. + default_font_size_delta + }; + + Some(pixels * rem_size_scale) + } + AbsoluteLength::Rems(rems) => { + Some(rems.to_pixels(ui::BASE_REM_SIZE_IN_PX.into())) + } + } + } + // We currently use single-line and auto-height editors in UI contexts, + // so we don't want to scale everything with the buffer font size, as it + // ends up looking off. + EditorMode::SingleLine | EditorMode::AutoHeight { .. } => None, + } + } +} + +impl Element for EditorElement { + type RequestLayoutState = (); + type PrepaintState = EditorLayout; + + fn id(&self) -> Option { + None + } + + fn request_layout( + &mut self, + _: Option<&GlobalElementId>, + cx: &mut WindowContext, + ) -> (gpui::LayoutId, ()) { + let rem_size = self.rem_size(cx); + cx.with_rem_size(rem_size, |cx| { + self.editor.update(cx, |editor, cx| { + editor.set_style(self.style.clone(), cx); + + let layout_id = match editor.mode { + EditorMode::SingleLine => { + let rem_size = cx.rem_size(); + let mut style = Style::default(); + style.size.width = relative(1.).into(); + style.size.height = self.style.text.line_height_in_pixels(rem_size).into(); + cx.request_layout(style, None) + } + EditorMode::AutoHeight { max_lines } => { + let editor_handle = cx.view().clone(); + let max_line_number_width = + self.max_line_number_width(&editor.snapshot(cx), cx); + cx.request_measured_layout( + Style::default(), + move |known_dimensions, available_space, cx| { + editor_handle + .update(cx, |editor, cx| { + compute_auto_height_layout( + editor, + max_lines, + max_line_number_width, + known_dimensions, + available_space.width, + cx, + ) + }) + .unwrap_or_default() + }, + ) + } + EditorMode::Full => { + let mut style = Style::default(); + style.size.width = relative(1.).into(); + style.size.height = relative(1.).into(); + cx.request_layout(style, None) + } + }; + + (layout_id, ()) + }) + }) + } + + fn prepaint( + &mut self, + _: Option<&GlobalElementId>, + bounds: Bounds, + _: &mut Self::RequestLayoutState, + cx: &mut WindowContext, + ) -> Self::PrepaintState { + let text_style = TextStyleRefinement { + font_size: Some(self.style.text.font_size), + line_height: Some(self.style.text.line_height), + ..Default::default() + }; + cx.set_view_id(self.editor.entity_id()); + + let rem_size = self.rem_size(cx); + cx.with_rem_size(rem_size, |cx| { + cx.with_text_style(Some(text_style), |cx| { + cx.with_content_mask(Some(ContentMask { bounds }), |cx| { + let mut snapshot = self.editor.update(cx, |editor, cx| editor.snapshot(cx)); + let style = self.style.clone(); + + let font_id = cx.text_system().resolve_font(&style.text.font()); + let font_size = style.text.font_size.to_pixels(cx.rem_size()); + let line_height = style.text.line_height_in_pixels(cx.rem_size()); + let em_width = cx + .text_system() + .typographic_bounds(font_id, font_size, 'm') + .unwrap() + .size + .width; + let em_advance = cx + .text_system() + .advance(font_id, font_size, 'm') + .unwrap() + .width; + + let gutter_dimensions = snapshot.gutter_dimensions( + font_id, + font_size, + em_width, + self.max_line_number_width(&snapshot, cx), + cx, + ); + let text_width = bounds.size.width - gutter_dimensions.width; + + let right_margin = if snapshot.mode == EditorMode::Full { + EditorElement::SCROLLBAR_WIDTH + } else { + px(0.) + }; + let overscroll = size(em_width + right_margin, px(0.)); + + snapshot = self.editor.update(cx, |editor, cx| { + editor.last_bounds = Some(bounds); + editor.gutter_dimensions = gutter_dimensions; + editor.set_visible_line_count(bounds.size.height / line_height, cx); + + let editor_width = + text_width - gutter_dimensions.margin - overscroll.width - em_width; + let wrap_width = match editor.soft_wrap_mode(cx) { + SoftWrap::None => None, + SoftWrap::PreferLine => Some((MAX_LINE_LEN / 2) as f32 * em_advance), + SoftWrap::EditorWidth => Some(editor_width), + SoftWrap::Column(column) => { + Some(editor_width.min(column as f32 * em_advance)) + } + }; + + if editor.set_wrap_width(wrap_width, cx) { + editor.snapshot(cx) + } else { + snapshot + } + }); + + let wrap_guides = self + .editor + .read(cx) + .wrap_guides(cx) + .iter() + .map(|(guide, active)| (self.column_pixels(*guide, cx), *active)) + .collect::>(); + + let hitbox = cx.insert_hitbox(bounds, false); + let gutter_hitbox = cx.insert_hitbox( + Bounds { + origin: bounds.origin, + size: size(gutter_dimensions.width, bounds.size.height), + }, + false, + ); + let text_hitbox = cx.insert_hitbox( + Bounds { + origin: gutter_hitbox.upper_right(), + size: size(text_width, bounds.size.height), + }, + false, + ); + // Offset the content_bounds from the text_bounds by the gutter margin (which + // is roughly half a character wide) to make hit testing work more like how we want. + let content_origin = + text_hitbox.origin + point(gutter_dimensions.margin, Pixels::ZERO); + + let mut autoscroll_containing_element = false; + let mut autoscroll_horizontally = false; + self.editor.update(cx, |editor, cx| { + autoscroll_containing_element = + editor.autoscroll_requested() || editor.has_pending_selection(); + autoscroll_horizontally = + editor.autoscroll_vertically(bounds, line_height, cx); + snapshot = editor.snapshot(cx); + }); + + let mut scroll_position = snapshot.scroll_position(); + // The scroll position is a fractional point, the whole number of which represents + // the top of the window in terms of display rows. + let start_row = DisplayRow(scroll_position.y as u32); + let height_in_lines = bounds.size.height / line_height; + let max_row = snapshot.max_point().row(); + let end_row = cmp::min( + (scroll_position.y + height_in_lines).ceil() as u32, + max_row.next_row().0, + ); + let end_row = DisplayRow(end_row); + + let buffer_rows = snapshot + .buffer_rows(start_row) + .take((start_row..end_row).len()) + .collect::>(); + + let start_anchor = if start_row == Default::default() { + Anchor::min() + } else { + snapshot.buffer_snapshot.anchor_before( + DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left), + ) + }; + let end_anchor = if end_row > max_row { + Anchor::max() + } else { + snapshot.buffer_snapshot.anchor_before( + DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right), + ) + }; + + let highlighted_rows = self + .editor + .update(cx, |editor, cx| editor.highlighted_display_rows(cx)); + let highlighted_ranges = self.editor.read(cx).background_highlights_in_range( + start_anchor..end_anchor, + &snapshot.display_snapshot, + cx.theme().colors(), + ); + + let redacted_ranges = self.editor.read(cx).redacted_ranges( + start_anchor..end_anchor, + &snapshot.display_snapshot, + cx, + ); + + let (selections, active_rows, newest_selection_head) = self.layout_selections( + start_anchor, + end_anchor, + &snapshot, + start_row, + end_row, + cx, + ); + + let (line_numbers, fold_statuses) = self.layout_line_numbers( + start_row..end_row, + buffer_rows.clone().into_iter(), + &active_rows, + newest_selection_head, + &snapshot, + cx, + ); + + let display_hunks = self.layout_git_gutters( + line_height, + &gutter_hitbox, + start_row..end_row, + &snapshot, + cx, + ); + + let mut max_visible_line_width = Pixels::ZERO; + let line_layouts = + self.layout_lines(start_row..end_row, &line_numbers, &snapshot, cx); + for line_with_invisibles in &line_layouts { + if line_with_invisibles.line.width > max_visible_line_width { + max_visible_line_width = line_with_invisibles.line.width; + } + } + + let longest_line_width = + layout_line(snapshot.longest_row(), &snapshot, &style, cx) + .unwrap() + .width; + let mut scroll_width = + longest_line_width.max(max_visible_line_width) + overscroll.width; + + let mut blocks = cx.with_element_namespace("blocks", |cx| { + self.build_blocks( + start_row..end_row, + &snapshot, + &hitbox, + &text_hitbox, + &mut scroll_width, + &gutter_dimensions, + em_width, + gutter_dimensions.width + gutter_dimensions.margin, + line_height, + &line_layouts, + cx, + ) + }); + + let scroll_pixel_position = point( + scroll_position.x * em_width, + scroll_position.y * line_height, + ); + + let mut inline_blame = None; + if let Some(newest_selection_head) = newest_selection_head { + let display_row = newest_selection_head.row(); + if (start_row..end_row).contains(&display_row) { + let line_layout = &line_layouts[display_row.minus(start_row) as usize]; + inline_blame = self.layout_inline_blame( + display_row, + &snapshot.display_snapshot, + line_layout, + em_width, + content_origin, + scroll_pixel_position, + line_height, + cx, + ); + } + } + + let blamed_display_rows = self.layout_blame_entries( + buffer_rows.into_iter(), + em_width, + scroll_position, + line_height, + &gutter_hitbox, + gutter_dimensions.git_blame_entries_width, + cx, + ); + + let scroll_max = point( + ((scroll_width - text_hitbox.size.width) / em_width).max(0.0), + max_row.as_f32(), + ); + + self.editor.update(cx, |editor, cx| { + let clamped = editor.scroll_manager.clamp_scroll_left(scroll_max.x); + + let autoscrolled = if autoscroll_horizontally { + editor.autoscroll_horizontally( + start_row, + text_hitbox.size.width, + scroll_width, + em_width, + &line_layouts, + cx, + ) + } else { + false + }; + + if clamped || autoscrolled { + snapshot = editor.snapshot(cx); + scroll_position = snapshot.scroll_position(); + } + }); + + cx.with_element_namespace("blocks", |cx| { + self.layout_blocks( + &mut blocks, + &hitbox, + line_height, + scroll_pixel_position, + cx, + ); + }); + + let cursors = self.collect_cursors(&snapshot, cx); + let visible_row_range = start_row..end_row; + let non_visible_cursors = cursors + .iter() + .any(move |c| !visible_row_range.contains(&c.0.row())); + + let visible_cursors = self.layout_visible_cursors( + &snapshot, + &selections, + start_row..end_row, + &line_layouts, + &text_hitbox, + content_origin, + scroll_position, + scroll_pixel_position, + line_height, + em_width, + autoscroll_containing_element, + cx, + ); + + let scrollbar_layout = self.layout_scrollbar( + &snapshot, + bounds, + scroll_position, + height_in_lines, + non_visible_cursors, + cx, + ); + + let folds = cx.with_element_namespace("folds", |cx| { + self.layout_folds( + &snapshot, + content_origin, + start_anchor..end_anchor, + start_row..end_row, + scroll_pixel_position, + line_height, + &line_layouts, + cx, + ) + }); + + let gutter_settings = EditorSettings::get_global(cx).gutter; + + let mut context_menu_visible = false; + let mut code_actions_indicator = None; + if let Some(newest_selection_head) = newest_selection_head { + if (start_row..end_row).contains(&newest_selection_head.row()) { + context_menu_visible = self.layout_context_menu( + line_height, + &hitbox, + &text_hitbox, + content_origin, + start_row, + scroll_pixel_position, + &line_layouts, + newest_selection_head, + gutter_dimensions.width - gutter_dimensions.left_padding, + cx, + ); + if gutter_settings.code_actions { + let newest_selection_point = + newest_selection_head.to_point(&snapshot.display_snapshot); + let buffer = snapshot.buffer_snapshot.buffer_line_for_row( + MultiBufferRow(newest_selection_point.row), + ); + if let Some((buffer, range)) = buffer { + let buffer_id = buffer.remote_id(); + let row = range.start.row; + let has_test_indicator = + self.editor.read(cx).tasks.contains_key(&(buffer_id, row)); + + if !has_test_indicator { + code_actions_indicator = self + .layout_code_actions_indicator( + line_height, + newest_selection_head, + scroll_pixel_position, + &gutter_dimensions, + &gutter_hitbox, + cx, + ); + } + } + } + } + } + + let test_indicators = self.layout_run_indicators( + line_height, + scroll_pixel_position, + &gutter_dimensions, + &gutter_hitbox, + &snapshot, + cx, + ); + + if !context_menu_visible && !cx.has_active_drag() { + self.layout_hover_popovers( + &snapshot, + &hitbox, + &text_hitbox, + start_row..end_row, + content_origin, + scroll_pixel_position, + &line_layouts, + line_height, + em_width, + cx, + ); + } + + let mouse_context_menu = self.layout_mouse_context_menu(cx); + + let fold_indicators = if gutter_settings.folds { + cx.with_element_namespace("gutter_fold_indicators", |cx| { + self.layout_gutter_fold_indicators( + fold_statuses, + line_height, + &gutter_dimensions, + gutter_settings, + scroll_pixel_position, + &gutter_hitbox, + cx, + ) + }) + } else { + Vec::new() + }; + + let invisible_symbol_font_size = font_size / 2.; + let tab_invisible = cx + .text_system() + .shape_line( + "→".into(), + invisible_symbol_font_size, + &[TextRun { + len: "→".len(), + font: self.style.text.font(), + color: cx.theme().colors().editor_invisible, + background_color: None, + underline: None, + strikethrough: None, + }], + ) + .unwrap(); + let space_invisible = cx + .text_system() + .shape_line( + "•".into(), + invisible_symbol_font_size, + &[TextRun { + len: "•".len(), + font: self.style.text.font(), + color: cx.theme().colors().editor_invisible, + background_color: None, + underline: None, + strikethrough: None, + }], + ) + .unwrap(); + + EditorLayout { + mode: snapshot.mode, + position_map: Arc::new(PositionMap { + size: bounds.size, + scroll_pixel_position, + scroll_max, + line_layouts, + line_height, + em_width, + em_advance, + snapshot, + }), + visible_display_row_range: start_row..end_row, + wrap_guides, + hitbox, + text_hitbox, + gutter_hitbox, + gutter_dimensions, + content_origin, + scrollbar_layout, + active_rows, + highlighted_rows, + highlighted_ranges, + redacted_ranges, + line_numbers, + display_hunks, + blamed_display_rows, + inline_blame, + folds, + blocks, + cursors, + visible_cursors, + selections, + mouse_context_menu, + test_indicators, + code_actions_indicator, + fold_indicators, + tab_invisible, + space_invisible, + } + }) + }) + }) + } + + fn paint( + &mut self, + _: Option<&GlobalElementId>, + bounds: Bounds, + _: &mut Self::RequestLayoutState, + layout: &mut Self::PrepaintState, + cx: &mut WindowContext, + ) { + let focus_handle = self.editor.focus_handle(cx); + let key_context = self.editor.read(cx).key_context(cx); + cx.set_focus_handle(&focus_handle); + cx.set_key_context(key_context); + cx.handle_input( + &focus_handle, + ElementInputHandler::new(bounds, self.editor.clone()), + ); + self.register_actions(cx); + self.register_key_listeners(cx, layout); + + let text_style = TextStyleRefinement { + font_size: Some(self.style.text.font_size), + line_height: Some(self.style.text.line_height), + ..Default::default() + }; + let mouse_position = cx.mouse_position(); + let hovered_hunk = layout + .display_hunks + .iter() + .find_map(|(hunk, hunk_hitbox)| match hunk { + DisplayDiffHunk::Folded { .. } => None, + DisplayDiffHunk::Unfolded { + diff_base_byte_range, + multi_buffer_range, + status, + .. + } => { + if hunk_hitbox + .as_ref() + .map(|hitbox| hitbox.contains(&mouse_position)) + .unwrap_or(false) + { + Some(HunkToExpand { + status: *status, + multi_buffer_range: multi_buffer_range.clone(), + diff_base_byte_range: diff_base_byte_range.clone(), + }) + } else { + None + } + } + }); + let rem_size = self.rem_size(cx); + cx.with_rem_size(rem_size, |cx| { + cx.with_text_style(Some(text_style), |cx| { + cx.with_content_mask(Some(ContentMask { bounds }), |cx| { + self.paint_mouse_listeners(layout, hovered_hunk, cx); + self.paint_background(layout, cx); + if layout.gutter_hitbox.size.width > Pixels::ZERO { + self.paint_gutter(layout, cx) + } + + self.paint_text(layout, cx); + + if !layout.blocks.is_empty() { + cx.with_element_namespace("blocks", |cx| { + self.paint_blocks(layout, cx); + }); + } + + self.paint_scrollbar(layout, cx); + self.paint_mouse_context_menu(layout, cx); + }); + }) + }) + } +} + +impl IntoElement for EditorElement { + type Element = Self; + + fn into_element(self) -> Self::Element { + self + } +} + +pub struct EditorLayout { + position_map: Arc, + hitbox: Hitbox, + text_hitbox: Hitbox, + gutter_hitbox: Hitbox, + gutter_dimensions: GutterDimensions, + content_origin: gpui::Point, + scrollbar_layout: Option, + mode: EditorMode, + wrap_guides: SmallVec<[(Pixels, bool); 2]>, + visible_display_row_range: Range, + active_rows: BTreeMap, + highlighted_rows: BTreeMap, + line_numbers: Vec>, + display_hunks: Vec<(DisplayDiffHunk, Option)>, + blamed_display_rows: Option>, + inline_blame: Option, + folds: Vec, + blocks: Vec, + highlighted_ranges: Vec<(Range, Hsla)>, + redacted_ranges: Vec>, + cursors: Vec<(DisplayPoint, Hsla)>, + visible_cursors: Vec, + selections: Vec<(PlayerColor, Vec)>, + code_actions_indicator: Option, + test_indicators: Vec, + fold_indicators: Vec>, + mouse_context_menu: Option, + tab_invisible: ShapedLine, + space_invisible: ShapedLine, +} + +impl EditorLayout { + fn line_end_overshoot(&self) -> Pixels { + 0.15 * self.position_map.line_height + } +} + +struct ColoredRange { + start: T, + end: T, + color: Hsla, +} + +#[derive(Clone)] +struct ScrollbarLayout { + hitbox: Hitbox, + visible_row_range: Range, + visible: bool, + row_height: Pixels, + thumb_height: Pixels, +} + +impl ScrollbarLayout { + const BORDER_WIDTH: Pixels = px(1.0); + const LINE_MARKER_HEIGHT: Pixels = px(2.0); + const MIN_MARKER_HEIGHT: Pixels = px(5.0); + const MIN_THUMB_HEIGHT: Pixels = px(20.0); + + fn thumb_bounds(&self) -> Bounds { + let thumb_top = self.y_for_row(self.visible_row_range.start); + let thumb_bottom = thumb_top + self.thumb_height; + Bounds::from_corners( + point(self.hitbox.left(), thumb_top), + point(self.hitbox.right(), thumb_bottom), + ) + } + + fn y_for_row(&self, row: f32) -> Pixels { + self.hitbox.top() + row * self.row_height + } + + fn marker_quads_for_ranges( + &self, + row_ranges: impl IntoIterator>, + column: Option, + ) -> Vec { + struct MinMax { + min: Pixels, + max: Pixels, + } + let (x_range, height_limit) = if let Some(column) = column { + let column_width = px(((self.hitbox.size.width - Self::BORDER_WIDTH).0 / 3.0).floor()); + let start = Self::BORDER_WIDTH + (column as f32 * column_width); + let end = start + column_width; + ( + Range { start, end }, + MinMax { + min: Self::MIN_MARKER_HEIGHT, + max: px(f32::MAX), + }, + ) + } else { + ( + Range { + start: Self::BORDER_WIDTH, + end: self.hitbox.size.width, + }, + MinMax { + min: Self::LINE_MARKER_HEIGHT, + max: Self::LINE_MARKER_HEIGHT, + }, + ) + }; + + let row_to_y = |row: DisplayRow| row.as_f32() * self.row_height; + let mut pixel_ranges = row_ranges + .into_iter() + .map(|range| { + let start_y = row_to_y(range.start); + let end_y = row_to_y(range.end) + + self.row_height.max(height_limit.min).min(height_limit.max); + ColoredRange { + start: start_y, + end: end_y, + color: range.color, + } + }) + .peekable(); + + let mut quads = Vec::new(); + while let Some(mut pixel_range) = pixel_ranges.next() { + while let Some(next_pixel_range) = pixel_ranges.peek() { + if pixel_range.end >= next_pixel_range.start - px(1.0) + && pixel_range.color == next_pixel_range.color + { + pixel_range.end = next_pixel_range.end.max(pixel_range.end); + pixel_ranges.next(); + } else { + break; + } + } + + let bounds = Bounds::from_corners( + point(x_range.start, pixel_range.start), + point(x_range.end, pixel_range.end), + ); + quads.push(quad( + bounds, + Corners::default(), + pixel_range.color, + Edges::default(), + Hsla::transparent_black(), + )); + } + + quads + } +} + +struct FoldLayout { + display_range: Range, + hover_element: AnyElement, +} + +struct PositionMap { + size: Size, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + scroll_max: gpui::Point, + em_width: Pixels, + em_advance: Pixels, + line_layouts: Vec, + snapshot: EditorSnapshot, +} + +#[derive(Debug, Copy, Clone)] +pub struct PointForPosition { + pub previous_valid: DisplayPoint, + pub next_valid: DisplayPoint, + pub exact_unclipped: DisplayPoint, + pub column_overshoot_after_line_end: u32, +} + +impl PointForPosition { + pub fn as_valid(&self) -> Option { + if self.previous_valid == self.exact_unclipped && self.next_valid == self.exact_unclipped { + Some(self.previous_valid) + } else { + None + } + } +} + +impl PositionMap { + fn point_for_position( + &self, + text_bounds: Bounds, + position: gpui::Point, + ) -> PointForPosition { + let scroll_position = self.snapshot.scroll_position(); + let position = position - text_bounds.origin; + let y = position.y.max(px(0.)).min(self.size.height); + let x = position.x + (scroll_position.x * self.em_width); + let row = ((y / self.line_height) + scroll_position.y) as u32; + + let (column, x_overshoot_after_line_end) = if let Some(line) = self + .line_layouts + .get(row as usize - scroll_position.y as usize) + .map(|LineWithInvisibles { line, .. }| line) + { + if let Some(ix) = line.index_for_x(x) { + (ix as u32, px(0.)) + } else { + (line.len as u32, px(0.).max(x - line.width)) + } + } else { + (0, x) + }; + + let mut exact_unclipped = DisplayPoint::new(DisplayRow(row), column); + let previous_valid = self.snapshot.clip_point(exact_unclipped, Bias::Left); + let next_valid = self.snapshot.clip_point(exact_unclipped, Bias::Right); + + let column_overshoot_after_line_end = (x_overshoot_after_line_end / self.em_advance) as u32; + *exact_unclipped.column_mut() += column_overshoot_after_line_end; + PointForPosition { + previous_valid, + next_valid, + exact_unclipped, + column_overshoot_after_line_end, + } + } +} + +struct BlockLayout { + row: DisplayRow, + element: AnyElement, + available_space: Size, + style: BlockStyle, +} + +fn layout_line( + row: DisplayRow, + snapshot: &EditorSnapshot, + style: &EditorStyle, + cx: &WindowContext, +) -> Result { + let mut line = snapshot.line(row); + + let len = { + let line_len = line.len(); + if line_len > MAX_LINE_LEN { + let mut len = MAX_LINE_LEN; + while !line.is_char_boundary(len) { + len -= 1; + } + + line.truncate(len); + len + } else { + line_len + } + }; + + cx.text_system().shape_line( + line.into(), + style.text.font_size.to_pixels(cx.rem_size()), + &[TextRun { + len, + font: style.text.font(), + color: Hsla::default(), + background_color: None, + underline: None, + strikethrough: None, + }], + ) +} + +pub struct CursorLayout { + origin: gpui::Point, + block_width: Pixels, + line_height: Pixels, + color: Hsla, + shape: CursorShape, + block_text: Option, + cursor_name: Option, +} + +#[derive(Debug)] +pub struct CursorName { + string: SharedString, + color: Hsla, + is_top_row: bool, +} + +impl CursorLayout { + pub fn new( + origin: gpui::Point, + block_width: Pixels, + line_height: Pixels, + color: Hsla, + shape: CursorShape, + block_text: Option, + ) -> CursorLayout { + CursorLayout { + origin, + block_width, + line_height, + color, + shape, + block_text, + cursor_name: None, + } + } + + pub fn bounding_rect(&self, origin: gpui::Point) -> Bounds { + Bounds { + origin: self.origin + origin, + size: size(self.block_width, self.line_height), + } + } + + fn bounds(&self, origin: gpui::Point) -> Bounds { + match self.shape { + CursorShape::Bar => Bounds { + origin: self.origin + origin, + size: size(px(2.0), self.line_height), + }, + CursorShape::Block | CursorShape::Hollow => Bounds { + origin: self.origin + origin, + size: size(self.block_width, self.line_height), + }, + CursorShape::Underscore => Bounds { + origin: self.origin + + origin + + gpui::Point::new(Pixels::ZERO, self.line_height - px(2.0)), + size: size(self.block_width, px(2.0)), + }, + } + } + + pub fn layout( + &mut self, + origin: gpui::Point, + cursor_name: Option, + cx: &mut WindowContext, + ) { + if let Some(cursor_name) = cursor_name { + let bounds = self.bounds(origin); + let text_size = self.line_height / 1.5; + + let name_origin = if cursor_name.is_top_row { + point(bounds.right() - px(1.), bounds.top()) + } else { + point(bounds.left(), bounds.top() - text_size / 2. - px(1.)) + }; + let mut name_element = div() + .bg(self.color) + .text_size(text_size) + .px_0p5() + .line_height(text_size + px(2.)) + .text_color(cursor_name.color) + .child(cursor_name.string.clone()) + .into_any_element(); + + name_element.prepaint_as_root( + name_origin, + size(AvailableSpace::MinContent, AvailableSpace::MinContent), + cx, + ); + + self.cursor_name = Some(name_element); + } + } + + pub fn paint(&mut self, origin: gpui::Point, cx: &mut WindowContext) { + let bounds = self.bounds(origin); + + //Draw background or border quad + let cursor = if matches!(self.shape, CursorShape::Hollow) { + outline(bounds, self.color) + } else { + fill(bounds, self.color) + }; + + if let Some(name) = &mut self.cursor_name { + name.paint(cx); + } + + cx.paint_quad(cursor); + + if let Some(block_text) = &self.block_text { + block_text + .paint(self.origin + origin, self.line_height, cx) + .log_err(); + } + } + + pub fn shape(&self) -> CursorShape { + self.shape + } +} + +#[derive(Debug)] +pub struct HighlightedRange { + pub start_y: Pixels, + pub line_height: Pixels, + pub lines: Vec, + pub color: Hsla, + pub corner_radius: Pixels, +} + +#[derive(Debug)] +pub struct HighlightedRangeLine { + pub start_x: Pixels, + pub end_x: Pixels, +} + +impl HighlightedRange { + pub fn paint(&self, bounds: Bounds, cx: &mut WindowContext) { + if self.lines.len() >= 2 && self.lines[0].start_x > self.lines[1].end_x { + self.paint_lines(self.start_y, &self.lines[0..1], bounds, cx); + self.paint_lines( + self.start_y + self.line_height, + &self.lines[1..], + bounds, + cx, + ); + } else { + self.paint_lines(self.start_y, &self.lines, bounds, cx); + } + } + + fn paint_lines( + &self, + start_y: Pixels, + lines: &[HighlightedRangeLine], + _bounds: Bounds, + cx: &mut WindowContext, + ) { + if lines.is_empty() { + return; + } + + let first_line = lines.first().unwrap(); + let last_line = lines.last().unwrap(); + + let first_top_left = point(first_line.start_x, start_y); + let first_top_right = point(first_line.end_x, start_y); + + let curve_height = point(Pixels::ZERO, self.corner_radius); + let curve_width = |start_x: Pixels, end_x: Pixels| { + let max = (end_x - start_x) / 2.; + let width = if max < self.corner_radius { + max + } else { + self.corner_radius + }; + + point(width, Pixels::ZERO) + }; + + let top_curve_width = curve_width(first_line.start_x, first_line.end_x); + let mut path = gpui::Path::new(first_top_right - top_curve_width); + path.curve_to(first_top_right + curve_height, first_top_right); + + let mut iter = lines.iter().enumerate().peekable(); + while let Some((ix, line)) = iter.next() { + let bottom_right = point(line.end_x, start_y + (ix + 1) as f32 * self.line_height); + + if let Some((_, next_line)) = iter.peek() { + let next_top_right = point(next_line.end_x, bottom_right.y); + + match next_top_right.x.partial_cmp(&bottom_right.x).unwrap() { + Ordering::Equal => { + path.line_to(bottom_right); + } + Ordering::Less => { + let curve_width = curve_width(next_top_right.x, bottom_right.x); + path.line_to(bottom_right - curve_height); + if self.corner_radius > Pixels::ZERO { + path.curve_to(bottom_right - curve_width, bottom_right); + } + path.line_to(next_top_right + curve_width); + if self.corner_radius > Pixels::ZERO { + path.curve_to(next_top_right + curve_height, next_top_right); + } + } + Ordering::Greater => { + let curve_width = curve_width(bottom_right.x, next_top_right.x); + path.line_to(bottom_right - curve_height); + if self.corner_radius > Pixels::ZERO { + path.curve_to(bottom_right + curve_width, bottom_right); + } + path.line_to(next_top_right - curve_width); + if self.corner_radius > Pixels::ZERO { + path.curve_to(next_top_right + curve_height, next_top_right); + } + } + } + } else { + let curve_width = curve_width(line.start_x, line.end_x); + path.line_to(bottom_right - curve_height); + if self.corner_radius > Pixels::ZERO { + path.curve_to(bottom_right - curve_width, bottom_right); + } + + let bottom_left = point(line.start_x, bottom_right.y); + path.line_to(bottom_left + curve_width); + if self.corner_radius > Pixels::ZERO { + path.curve_to(bottom_left - curve_height, bottom_left); + } + } + } + + if first_line.start_x > last_line.start_x { + let curve_width = curve_width(last_line.start_x, first_line.start_x); + let second_top_left = point(last_line.start_x, start_y + self.line_height); + path.line_to(second_top_left + curve_height); + if self.corner_radius > Pixels::ZERO { + path.curve_to(second_top_left + curve_width, second_top_left); + } + let first_bottom_left = point(first_line.start_x, second_top_left.y); + path.line_to(first_bottom_left - curve_width); + if self.corner_radius > Pixels::ZERO { + path.curve_to(first_bottom_left - curve_height, first_bottom_left); + } + } + + path.line_to(first_top_left + curve_height); + if self.corner_radius > Pixels::ZERO { + path.curve_to(first_top_left + top_curve_width, first_top_left); + } + path.line_to(first_top_right - top_curve_width); + + cx.paint_path(path, self.color); + } +} + +pub fn scale_vertical_mouse_autoscroll_delta(delta: Pixels) -> f32 { + (delta.pow(1.5) / 100.0).into() +} + +fn scale_horizontal_mouse_autoscroll_delta(delta: Pixels) -> f32 { + (delta.pow(1.2) / 300.0).into() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + display_map::{BlockDisposition, BlockProperties}, + editor_tests::{init_test, update_test_language_settings}, + Editor, MultiBuffer, + }; + use gpui::{TestAppContext, VisualTestContext}; + use language::language_settings; + use log::info; + use std::num::NonZeroU32; + use ui::Context; + use util::test::sample_text; + + #[gpui::test] + fn test_shape_line_numbers(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let window = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); + Editor::new(EditorMode::Full, buffer, None, cx) + }); + + let editor = window.root(cx).unwrap(); + let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + let element = EditorElement::new(&editor, style); + let snapshot = window.update(cx, |editor, cx| editor.snapshot(cx)).unwrap(); + + let layouts = cx + .update_window(*window, |_, cx| { + element + .layout_line_numbers( + DisplayRow(0)..DisplayRow(6), + (0..6).map(MultiBufferRow).map(Some), + &Default::default(), + Some(DisplayPoint::new(DisplayRow(0), 0)), + &snapshot, + cx, + ) + .0 + }) + .unwrap(); + assert_eq!(layouts.len(), 6); + + let relative_rows = window + .update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + element.calculate_relative_line_numbers( + &snapshot, + &(DisplayRow(0)..DisplayRow(6)), + Some(DisplayRow(3)), + ) + }) + .unwrap(); + assert_eq!(relative_rows[&DisplayRow(0)], 3); + assert_eq!(relative_rows[&DisplayRow(1)], 2); + assert_eq!(relative_rows[&DisplayRow(2)], 1); + // current line has no relative number + assert_eq!(relative_rows[&DisplayRow(4)], 1); + assert_eq!(relative_rows[&DisplayRow(5)], 2); + + // works if cursor is before screen + let relative_rows = window + .update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + element.calculate_relative_line_numbers( + &snapshot, + &(DisplayRow(3)..DisplayRow(6)), + Some(DisplayRow(1)), + ) + }) + .unwrap(); + assert_eq!(relative_rows.len(), 3); + assert_eq!(relative_rows[&DisplayRow(3)], 2); + assert_eq!(relative_rows[&DisplayRow(4)], 3); + assert_eq!(relative_rows[&DisplayRow(5)], 4); + + // works if cursor is after screen + let relative_rows = window + .update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + element.calculate_relative_line_numbers( + &snapshot, + &(DisplayRow(0)..DisplayRow(3)), + Some(DisplayRow(6)), + ) + }) + .unwrap(); + assert_eq!(relative_rows.len(), 3); + assert_eq!(relative_rows[&DisplayRow(0)], 5); + assert_eq!(relative_rows[&DisplayRow(1)], 4); + assert_eq!(relative_rows[&DisplayRow(2)], 3); + } + + #[gpui::test] + async fn test_vim_visual_selections(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let window = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&(sample_text(6, 6, 'a') + "\n"), cx); + Editor::new(EditorMode::Full, buffer, None, cx) + }); + let cx = &mut VisualTestContext::from_window(*window, cx); + let editor = window.root(cx).unwrap(); + let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + + window + .update(cx, |editor, cx| { + editor.cursor_shape = CursorShape::Block; + editor.change_selections(None, cx, |s| { + s.select_ranges([ + Point::new(0, 0)..Point::new(1, 0), + Point::new(3, 2)..Point::new(3, 3), + Point::new(5, 6)..Point::new(6, 0), + ]); + }); + }) + .unwrap(); + + let (_, state) = cx.draw(point(px(500.), px(500.)), size(px(500.), px(500.)), |_| { + EditorElement::new(&editor, style) + }); + + assert_eq!(state.selections.len(), 1); + let local_selections = &state.selections[0].1; + assert_eq!(local_selections.len(), 3); + // moves cursor back one line + assert_eq!( + local_selections[0].head, + DisplayPoint::new(DisplayRow(0), 6) + ); + assert_eq!( + local_selections[0].range, + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(1), 0) + ); + + // moves cursor back one column + assert_eq!( + local_selections[1].range, + DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(3), 3) + ); + assert_eq!( + local_selections[1].head, + DisplayPoint::new(DisplayRow(3), 2) + ); + + // leaves cursor on the max point + assert_eq!( + local_selections[2].range, + DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(6), 0) + ); + assert_eq!( + local_selections[2].head, + DisplayPoint::new(DisplayRow(6), 0) + ); + + // active lines does not include 1 (even though the range of the selection does) + assert_eq!( + state.active_rows.keys().cloned().collect::>(), + vec![DisplayRow(0), DisplayRow(3), DisplayRow(5), DisplayRow(6)] + ); + + // multi-buffer support + // in DisplayPoint coordinates, this is what we're dealing with: + // 0: [[file + // 1: header]] + // 2: aaaaaa + // 3: bbbbbb + // 4: cccccc + // 5: + // 6: ... + // 7: ffffff + // 8: gggggg + // 9: hhhhhh + // 10: + // 11: [[file + // 12: header]] + // 13: bbbbbb + // 14: cccccc + // 15: dddddd + let window = cx.add_window(|cx| { + let buffer = MultiBuffer::build_multi( + [ + ( + &(sample_text(8, 6, 'a') + "\n"), + vec![ + Point::new(0, 0)..Point::new(3, 0), + Point::new(4, 0)..Point::new(7, 0), + ], + ), + ( + &(sample_text(8, 6, 'a') + "\n"), + vec![Point::new(1, 0)..Point::new(3, 0)], + ), + ], + cx, + ); + Editor::new(EditorMode::Full, buffer, None, cx) + }); + let editor = window.root(cx).unwrap(); + let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + let _state = window.update(cx, |editor, cx| { + editor.cursor_shape = CursorShape::Block; + editor.change_selections(None, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(7), 0), + DisplayPoint::new(DisplayRow(10), 0)..DisplayPoint::new(DisplayRow(13), 0), + ]); + }); + }); + + let (_, state) = cx.draw(point(px(500.), px(500.)), size(px(500.), px(500.)), |_| { + EditorElement::new(&editor, style) + }); + assert_eq!(state.selections.len(), 1); + let local_selections = &state.selections[0].1; + assert_eq!(local_selections.len(), 2); + + // moves cursor on excerpt boundary back a line + // and doesn't allow selection to bleed through + assert_eq!( + local_selections[0].range, + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(6), 0) + ); + assert_eq!( + local_selections[0].head, + DisplayPoint::new(DisplayRow(5), 0) + ); + // moves cursor on buffer boundary back two lines + // and doesn't allow selection to bleed through + assert_eq!( + local_selections[1].range, + DisplayPoint::new(DisplayRow(10), 0)..DisplayPoint::new(DisplayRow(11), 0) + ); + assert_eq!( + local_selections[1].head, + DisplayPoint::new(DisplayRow(10), 0) + ); + } + + #[gpui::test] + fn test_layout_with_placeholder_text_and_blocks(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let window = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple("", cx); + Editor::new(EditorMode::Full, buffer, None, cx) + }); + let cx = &mut VisualTestContext::from_window(*window, cx); + let editor = window.root(cx).unwrap(); + let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + window + .update(cx, |editor, cx| { + editor.set_placeholder_text("hello", cx); + editor.insert_blocks( + [BlockProperties { + style: BlockStyle::Fixed, + disposition: BlockDisposition::Above, + height: 3, + position: Anchor::min(), + render: Box::new(|_| div().into_any()), + }], + None, + cx, + ); + + // Blur the editor so that it displays placeholder text. + cx.blur(); + }) + .unwrap(); + + let (_, state) = cx.draw(point(px(500.), px(500.)), size(px(500.), px(500.)), |_| { + EditorElement::new(&editor, style) + }); + assert_eq!(state.position_map.line_layouts.len(), 4); + assert_eq!( + state + .line_numbers + .iter() + .map(Option::is_some) + .collect::>(), + &[false, false, false, true] + ); + } + + #[gpui::test] + fn test_all_invisibles_drawing(cx: &mut TestAppContext) { + const TAB_SIZE: u32 = 4; + + let input_text = "\t \t|\t| a b"; + let expected_invisibles = vec![ + Invisible::Tab { + line_start_offset: 0, + }, + Invisible::Whitespace { + line_offset: TAB_SIZE as usize, + }, + Invisible::Tab { + line_start_offset: TAB_SIZE as usize + 1, + }, + Invisible::Tab { + line_start_offset: TAB_SIZE as usize * 2 + 1, + }, + Invisible::Whitespace { + line_offset: TAB_SIZE as usize * 3 + 1, + }, + Invisible::Whitespace { + line_offset: TAB_SIZE as usize * 3 + 3, + }, + ]; + assert_eq!( + expected_invisibles.len(), + input_text + .chars() + .filter(|initial_char| initial_char.is_whitespace()) + .count(), + "Hardcoded expected invisibles differ from the actual ones in '{input_text}'" + ); + + init_test(cx, |s| { + s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All); + s.defaults.tab_size = NonZeroU32::new(TAB_SIZE); + }); + + let actual_invisibles = + collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, px(500.0)); + + assert_eq!(expected_invisibles, actual_invisibles); + } + + #[gpui::test] + fn test_invisibles_dont_appear_in_certain_editors(cx: &mut TestAppContext) { + init_test(cx, |s| { + s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All); + s.defaults.tab_size = NonZeroU32::new(4); + }); + + for editor_mode_without_invisibles in [ + EditorMode::SingleLine, + EditorMode::AutoHeight { max_lines: 100 }, + ] { + let invisibles = collect_invisibles_from_new_editor( + cx, + editor_mode_without_invisibles, + "\t\t\t| | a b", + px(500.0), + ); + assert!(invisibles.is_empty(), + "For editor mode {editor_mode_without_invisibles:?} no invisibles was expected but got {invisibles:?}"); + } + } + + #[gpui::test] + fn test_wrapped_invisibles_drawing(cx: &mut TestAppContext) { + let tab_size = 4; + let input_text = "a\tbcd ".repeat(9); + let repeated_invisibles = [ + Invisible::Tab { + line_start_offset: 1, + }, + Invisible::Whitespace { + line_offset: tab_size as usize + 3, + }, + Invisible::Whitespace { + line_offset: tab_size as usize + 4, + }, + Invisible::Whitespace { + line_offset: tab_size as usize + 5, + }, + ]; + let expected_invisibles = std::iter::once(repeated_invisibles) + .cycle() + .take(9) + .flatten() + .collect::>(); + assert_eq!( + expected_invisibles.len(), + input_text + .chars() + .filter(|initial_char| initial_char.is_whitespace()) + .count(), + "Hardcoded expected invisibles differ from the actual ones in '{input_text}'" + ); + info!("Expected invisibles: {expected_invisibles:?}"); + + init_test(cx, |_| {}); + + // Put the same string with repeating whitespace pattern into editors of various size, + // take deliberately small steps during resizing, to put all whitespace kinds near the wrap point. + let resize_step = 10.0; + let mut editor_width = 200.0; + while editor_width <= 1000.0 { + update_test_language_settings(cx, |s| { + s.defaults.tab_size = NonZeroU32::new(tab_size); + s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All); + s.defaults.preferred_line_length = Some(editor_width as u32); + s.defaults.soft_wrap = Some(language_settings::SoftWrap::PreferredLineLength); + }); + + let actual_invisibles = collect_invisibles_from_new_editor( + cx, + EditorMode::Full, + &input_text, + px(editor_width), + ); + + // Whatever the editor size is, ensure it has the same invisible kinds in the same order + // (no good guarantees about the offsets: wrapping could trigger padding and its tests should check the offsets). + let mut i = 0; + for (actual_index, actual_invisible) in actual_invisibles.iter().enumerate() { + i = actual_index; + match expected_invisibles.get(i) { + Some(expected_invisible) => match (expected_invisible, actual_invisible) { + (Invisible::Whitespace { .. }, Invisible::Whitespace { .. }) + | (Invisible::Tab { .. }, Invisible::Tab { .. }) => {} + _ => { + panic!("At index {i}, expected invisible {expected_invisible:?} does not match actual {actual_invisible:?} by kind. Actual invisibles: {actual_invisibles:?}") + } + }, + None => panic!("Unexpected extra invisible {actual_invisible:?} at index {i}"), + } + } + let missing_expected_invisibles = &expected_invisibles[i + 1..]; + assert!( + missing_expected_invisibles.is_empty(), + "Missing expected invisibles after index {i}: {missing_expected_invisibles:?}" + ); + + editor_width += resize_step; + } + } + + fn collect_invisibles_from_new_editor( + cx: &mut TestAppContext, + editor_mode: EditorMode, + input_text: &str, + editor_width: Pixels, + ) -> Vec { + info!( + "Creating editor with mode {editor_mode:?}, width {}px and text '{input_text}'", + editor_width.0 + ); + let window = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple(&input_text, cx); + Editor::new(editor_mode, buffer, None, cx) + }); + let cx = &mut VisualTestContext::from_window(*window, cx); + let editor = window.root(cx).unwrap(); + let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + window + .update(cx, |editor, cx| { + editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); + editor.set_wrap_width(Some(editor_width), cx); + }) + .unwrap(); + let (_, state) = cx.draw(point(px(500.), px(500.)), size(px(500.), px(500.)), |_| { + EditorElement::new(&editor, style) + }); + state + .position_map + .line_layouts + .iter() + .flat_map(|line_with_invisibles| &line_with_invisibles.invisibles) + .cloned() + .collect() + } +} + +pub fn register_action( + view: &View, + cx: &mut WindowContext, + listener: impl Fn(&mut Editor, &T, &mut ViewContext) + 'static, +) { + let view = view.clone(); + cx.on_action(TypeId::of::(), move |action, phase, cx| { + let action = action.downcast_ref().unwrap(); + if phase == DispatchPhase::Bubble { + view.update(cx, |editor, cx| { + listener(editor, action, cx); + }) + } + }) +} + +fn compute_auto_height_layout( + editor: &mut Editor, + max_lines: usize, + max_line_number_width: Pixels, + known_dimensions: Size>, + available_width: AvailableSpace, + cx: &mut ViewContext, +) -> Option> { + let width = known_dimensions.width.or_else(|| { + if let AvailableSpace::Definite(available_width) = available_width { + Some(available_width) + } else { + None + } + })?; + if let Some(height) = known_dimensions.height { + return Some(size(width, height)); + } + + let style = editor.style.as_ref().unwrap(); + let font_id = cx.text_system().resolve_font(&style.text.font()); + let font_size = style.text.font_size.to_pixels(cx.rem_size()); + let line_height = style.text.line_height_in_pixels(cx.rem_size()); + let em_width = cx + .text_system() + .typographic_bounds(font_id, font_size, 'm') + .unwrap() + .size + .width; + + let mut snapshot = editor.snapshot(cx); + let gutter_dimensions = + snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx); + + editor.gutter_dimensions = gutter_dimensions; + let text_width = width - gutter_dimensions.width; + let overscroll = size(em_width, px(0.)); + + let editor_width = text_width - gutter_dimensions.margin - overscroll.width - em_width; + if editor.set_wrap_width(Some(editor_width), cx) { + snapshot = editor.snapshot(cx); + } + + let scroll_height = Pixels::from(snapshot.max_point().row().next_row().0) * line_height; + let height = scroll_height + .max(line_height) + .min(line_height * max_lines as f32); + + Some(size(width, height)) +} diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs new file mode 100644 index 0000000..665c649 --- /dev/null +++ b/crates/editor/src/git.rs @@ -0,0 +1,309 @@ +pub mod blame; + +use std::ops::Range; + +use git::diff::{DiffHunk, DiffHunkStatus}; +use language::Point; +use multi_buffer::{Anchor, MultiBufferRow}; + +use crate::{ + display_map::{DisplaySnapshot, ToDisplayPoint}, + hunk_status, AnchorRangeExt, DisplayRow, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DisplayDiffHunk { + Folded { + display_row: DisplayRow, + }, + + Unfolded { + diff_base_byte_range: Range, + display_row_range: Range, + multi_buffer_range: Range, + status: DiffHunkStatus, + }, +} + +impl DisplayDiffHunk { + pub fn start_display_row(&self) -> DisplayRow { + match self { + &DisplayDiffHunk::Folded { display_row } => display_row, + DisplayDiffHunk::Unfolded { + display_row_range, .. + } => display_row_range.start, + } + } + + pub fn contains_display_row(&self, display_row: DisplayRow) -> bool { + let range = match self { + &DisplayDiffHunk::Folded { display_row } => display_row..=display_row, + + DisplayDiffHunk::Unfolded { + display_row_range, .. + } => display_row_range.start..=display_row_range.end, + }; + + range.contains(&display_row) + } +} + +pub fn diff_hunk_to_display( + hunk: &DiffHunk, + snapshot: &DisplaySnapshot, +) -> DisplayDiffHunk { + let hunk_start_point = Point::new(hunk.associated_range.start.0, 0); + let hunk_start_point_sub = Point::new(hunk.associated_range.start.0.saturating_sub(1), 0); + let hunk_end_point_sub = Point::new( + hunk.associated_range + .end + .0 + .saturating_sub(1) + .max(hunk.associated_range.start.0), + 0, + ); + + let status = hunk_status(hunk); + let is_removal = status == DiffHunkStatus::Removed; + + let folds_start = Point::new(hunk.associated_range.start.0.saturating_sub(2), 0); + let folds_end = Point::new(hunk.associated_range.end.0 + 2, 0); + let folds_range = folds_start..folds_end; + + let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { + let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot); + let fold_point_range = fold_point_range.start..=fold_point_range.end; + + let folded_start = fold_point_range.contains(&hunk_start_point); + let folded_end = fold_point_range.contains(&hunk_end_point_sub); + let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub); + + (folded_start && folded_end) || (is_removal && folded_start_sub) + }); + + if let Some(fold) = containing_fold { + let row = fold.range.start.to_display_point(snapshot).row(); + DisplayDiffHunk::Folded { display_row: row } + } else { + let start = hunk_start_point.to_display_point(snapshot).row(); + + let hunk_end_row = hunk.associated_range.end.max(hunk.associated_range.start); + let hunk_end_point = Point::new(hunk_end_row.0, 0); + + let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point); + let multi_buffer_end = snapshot.buffer_snapshot.anchor_before(hunk_end_point); + let end = hunk_end_point.to_display_point(snapshot).row(); + + DisplayDiffHunk::Unfolded { + display_row_range: start..end, + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use crate::Point; + use crate::{editor_tests::init_test, hunk_status}; + use gpui::{Context, TestAppContext}; + use language::Capability::ReadWrite; + use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow}; + use project::{FakeFs, Project}; + use unindent::Unindent; + #[gpui::test] + async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { + use git::diff::DiffHunkStatus; + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + + // buffer has two modified hunks with two rows each + let buffer_1 = project.update(cx, |project, cx| { + project.create_local_buffer( + " + 1.zero + 1.ONE + 1.TWO + 1.three + 1.FOUR + 1.FIVE + 1.six + " + .unindent() + .as_str(), + None, + cx, + ) + }); + buffer_1.update(cx, |buffer, cx| { + buffer.set_diff_base( + Some( + " + 1.zero + 1.one + 1.two + 1.three + 1.four + 1.five + 1.six + " + .unindent(), + ), + cx, + ); + }); + + // buffer has a deletion hunk and an insertion hunk + let buffer_2 = project.update(cx, |project, cx| { + project.create_local_buffer( + " + 2.zero + 2.one + 2.two + 2.three + 2.four + 2.five + 2.six + " + .unindent() + .as_str(), + None, + cx, + ) + }); + buffer_2.update(cx, |buffer, cx| { + buffer.set_diff_base( + Some( + " + 2.zero + 2.one + 2.one-and-a-half + 2.two + 2.three + 2.four + 2.six + " + .unindent(), + ), + cx, + ); + }); + + cx.background_executor.run_until_parked(); + + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + // excerpt ends in the middle of a modified hunk + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt begins in the middle of a modified hunk + ExcerptRange { + context: Point::new(5, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + // excerpt ends at a deletion + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt starts at a deletion + ExcerptRange { + context: Point::new(2, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains a deletion hunk + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains an insertion hunk + ExcerptRange { + context: Point::new(4, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer + }); + + let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); + + assert_eq!( + snapshot.text(), + " + 1.zero + 1.ONE + 1.FIVE + 1.six + 2.zero + 2.one + 2.two + 2.one + 2.two + 2.four + 2.five + 2.six" + .unindent() + ); + + let expected = [ + ( + DiffHunkStatus::Modified, + MultiBufferRow(1)..MultiBufferRow(2), + ), + ( + DiffHunkStatus::Modified, + MultiBufferRow(2)..MultiBufferRow(3), + ), + //TODO: Define better when and where removed hunks show up at range extremities + ( + DiffHunkStatus::Removed, + MultiBufferRow(6)..MultiBufferRow(6), + ), + ( + DiffHunkStatus::Removed, + MultiBufferRow(8)..MultiBufferRow(8), + ), + ( + DiffHunkStatus::Added, + MultiBufferRow(10)..MultiBufferRow(11), + ), + ]; + + assert_eq!( + snapshot + .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) + .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .collect::>(), + &expected, + ); + + assert_eq!( + snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) + .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .collect::>(), + expected + .iter() + .rev() + .cloned() + .collect::>() + .as_slice(), + ); + } +} diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs new file mode 100644 index 0000000..a7dac9c --- /dev/null +++ b/crates/editor/src/git/blame.rs @@ -0,0 +1,952 @@ +use std::{sync::Arc, time::Duration}; + +use anyhow::Result; +use collections::HashMap; +use git::{ + blame::{Blame, BlameEntry}, + parse_git_remote_url, GitHostingProvider, GitHostingProviderRegistry, Oid, PullRequest, +}; +use gpui::{Model, ModelContext, Subscription, Task}; +use http::HttpClient; +use language::{markdown, Bias, Buffer, BufferSnapshot, Edit, LanguageRegistry, ParsedMarkdown}; +use multi_buffer::MultiBufferRow; +use project::{Item, Project}; +use smallvec::SmallVec; +use sum_tree::SumTree; +use url::Url; + +#[derive(Clone, Debug, Default)] +pub struct GitBlameEntry { + pub rows: u32, + pub blame: Option, +} + +#[derive(Clone, Debug, Default)] +pub struct GitBlameEntrySummary { + rows: u32, +} + +impl sum_tree::Item for GitBlameEntry { + type Summary = GitBlameEntrySummary; + + fn summary(&self) -> Self::Summary { + GitBlameEntrySummary { rows: self.rows } + } +} + +impl sum_tree::Summary for GitBlameEntrySummary { + type Context = (); + + fn add_summary(&mut self, summary: &Self, _cx: &()) { + self.rows += summary.rows; + } +} + +impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 { + fn add_summary(&mut self, summary: &'a GitBlameEntrySummary, _cx: &()) { + *self += summary.rows; + } +} + +#[derive(Clone)] +pub struct GitRemote { + pub host: Arc, + pub owner: String, + pub repo: String, +} + +impl std::fmt::Debug for GitRemote { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("GitRemote") + .field("host", &self.host.name()) + .field("owner", &self.owner) + .field("repo", &self.repo) + .finish() + } +} + +impl GitRemote { + pub fn host_supports_avatars(&self) -> bool { + self.host.supports_avatars() + } + + pub async fn avatar_url(&self, commit: Oid, client: Arc) -> Option { + self.host + .commit_author_avatar_url(&self.owner, &self.repo, commit, client) + .await + .ok() + .flatten() + } +} + +#[derive(Clone, Debug)] +pub struct CommitDetails { + pub message: String, + pub parsed_message: ParsedMarkdown, + pub permalink: Option, + pub pull_request: Option, + pub remote: Option, +} + +pub struct GitBlame { + project: Model, + buffer: Model, + entries: SumTree, + commit_details: HashMap, + buffer_snapshot: BufferSnapshot, + buffer_edits: text::Subscription, + task: Task>, + focused: bool, + generated: bool, + changed_while_blurred: bool, + user_triggered: bool, + regenerate_on_edit_task: Task>, + _regenerate_subscriptions: Vec, +} + +impl GitBlame { + pub fn new( + buffer: Model, + project: Model, + user_triggered: bool, + focused: bool, + cx: &mut ModelContext, + ) -> Self { + let entries = SumTree::from_item( + GitBlameEntry { + rows: buffer.read(cx).max_point().row + 1, + blame: None, + }, + &(), + ); + + let buffer_subscriptions = cx.subscribe(&buffer, |this, buffer, event, cx| match event { + language::Event::DirtyChanged => { + if !buffer.read(cx).is_dirty() { + this.generate(cx); + } + } + language::Event::Edited => { + this.regenerate_on_edit(cx); + } + _ => {} + }); + + let project_subscription = cx.subscribe(&project, { + let buffer = buffer.clone(); + + move |this, _, event, cx| match event { + project::Event::WorktreeUpdatedEntries(_, updated) => { + let project_entry_id = buffer.read(cx).entry_id(cx); + if updated + .iter() + .any(|(_, entry_id, _)| project_entry_id == Some(*entry_id)) + { + log::debug!("Updated buffers. Regenerating blame data...",); + this.generate(cx); + } + } + project::Event::WorktreeUpdatedGitRepositories => { + log::debug!("Status of git repositories updated. Regenerating blame data...",); + this.generate(cx); + } + _ => {} + } + }); + + let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe()); + + let mut this = Self { + project, + buffer, + buffer_snapshot, + entries, + buffer_edits, + user_triggered, + focused, + changed_while_blurred: false, + commit_details: HashMap::default(), + task: Task::ready(Ok(())), + generated: false, + regenerate_on_edit_task: Task::ready(Ok(())), + _regenerate_subscriptions: vec![buffer_subscriptions, project_subscription], + }; + this.generate(cx); + this + } + + pub fn has_generated_entries(&self) -> bool { + self.generated + } + + pub fn details_for_entry(&self, entry: &BlameEntry) -> Option { + self.commit_details.get(&entry.sha).cloned() + } + + pub fn blame_for_rows<'a>( + &'a mut self, + rows: impl 'a + IntoIterator>, + cx: &mut ModelContext, + ) -> impl 'a + Iterator> { + self.sync(cx); + + let mut cursor = self.entries.cursor::(); + rows.into_iter().map(move |row| { + let row = row?; + cursor.seek_forward(&row.0, Bias::Right, &()); + cursor.item()?.blame.clone() + }) + } + + pub fn blur(&mut self, _: &mut ModelContext) { + self.focused = false; + } + + pub fn focus(&mut self, cx: &mut ModelContext) { + self.focused = true; + if self.changed_while_blurred { + self.changed_while_blurred = false; + self.generate(cx); + } + } + + fn sync(&mut self, cx: &mut ModelContext) { + let edits = self.buffer_edits.consume(); + let new_snapshot = self.buffer.read(cx).snapshot(); + + let mut row_edits = edits + .into_iter() + .map(|edit| { + let old_point_range = self.buffer_snapshot.offset_to_point(edit.old.start) + ..self.buffer_snapshot.offset_to_point(edit.old.end); + let new_point_range = new_snapshot.offset_to_point(edit.new.start) + ..new_snapshot.offset_to_point(edit.new.end); + + if old_point_range.start.column + == self.buffer_snapshot.line_len(old_point_range.start.row) + && (new_snapshot.chars_at(edit.new.start).next() == Some('\n') + || self.buffer_snapshot.line_len(old_point_range.end.row) == 0) + { + Edit { + old: old_point_range.start.row + 1..old_point_range.end.row + 1, + new: new_point_range.start.row + 1..new_point_range.end.row + 1, + } + } else if old_point_range.start.column == 0 + && old_point_range.end.column == 0 + && new_point_range.end.column == 0 + { + Edit { + old: old_point_range.start.row..old_point_range.end.row, + new: new_point_range.start.row..new_point_range.end.row, + } + } else { + Edit { + old: old_point_range.start.row..old_point_range.end.row + 1, + new: new_point_range.start.row..new_point_range.end.row + 1, + } + } + }) + .peekable(); + + let mut new_entries = SumTree::new(); + let mut cursor = self.entries.cursor::(); + + while let Some(mut edit) = row_edits.next() { + while let Some(next_edit) = row_edits.peek() { + if edit.old.end >= next_edit.old.start { + edit.old.end = next_edit.old.end; + edit.new.end = next_edit.new.end; + row_edits.next(); + } else { + break; + } + } + + new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &()); + + if edit.new.start > new_entries.summary().rows { + new_entries.push( + GitBlameEntry { + rows: edit.new.start - new_entries.summary().rows, + blame: cursor.item().and_then(|entry| entry.blame.clone()), + }, + &(), + ); + } + + cursor.seek(&edit.old.end, Bias::Right, &()); + if !edit.new.is_empty() { + new_entries.push( + GitBlameEntry { + rows: edit.new.len() as u32, + blame: None, + }, + &(), + ); + } + + let old_end = cursor.end(&()); + if row_edits + .peek() + .map_or(true, |next_edit| next_edit.old.start >= old_end) + { + if let Some(entry) = cursor.item() { + if old_end > edit.old.end { + new_entries.push( + GitBlameEntry { + rows: cursor.end(&()) - edit.old.end, + blame: entry.blame.clone(), + }, + &(), + ); + } + + cursor.next(&()); + } + } + } + new_entries.append(cursor.suffix(&()), &()); + drop(cursor); + + self.buffer_snapshot = new_snapshot; + self.entries = new_entries; + } + + #[cfg(test)] + fn check_invariants(&mut self, cx: &mut ModelContext) { + self.sync(cx); + assert_eq!( + self.entries.summary().rows, + self.buffer.read(cx).max_point().row + 1 + ); + } + + fn generate(&mut self, cx: &mut ModelContext) { + if !self.focused { + self.changed_while_blurred = true; + return; + } + let buffer_edits = self.buffer.update(cx, |buffer, _| buffer.subscribe()); + let snapshot = self.buffer.read(cx).snapshot(); + let blame = self.project.read(cx).blame_buffer(&self.buffer, None, cx); + let languages = self.project.read(cx).languages().clone(); + let provider_registry = GitHostingProviderRegistry::default_global(cx); + + self.task = cx.spawn(|this, mut cx| async move { + let result = cx + .background_executor() + .spawn({ + let snapshot = snapshot.clone(); + async move { + let Blame { + entries, + permalinks, + messages, + remote_url, + } = blame.await?; + + let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row); + let commit_details = parse_commit_messages( + messages, + remote_url, + &permalinks, + provider_registry, + &languages, + ) + .await; + + anyhow::Ok((entries, commit_details)) + } + }) + .await; + + this.update(&mut cx, |this, cx| match result { + Ok((entries, commit_details)) => { + this.buffer_edits = buffer_edits; + this.buffer_snapshot = snapshot; + this.entries = entries; + this.commit_details = commit_details; + this.generated = true; + cx.notify(); + } + Err(error) => this.project.update(cx, |_, cx| { + if this.user_triggered { + log::error!("failed to get git blame data: {error:?}"); + let notification = format!("{:#}", error).trim().to_string(); + cx.emit(project::Event::Notification(notification)); + } else { + // If we weren't triggered by a user, we just log errors in the background, instead of sending + // notifications. + // Except for `NoRepositoryError`, which can happen often if a user has inline-blame turned on + // and opens a non-git file. + if error.downcast_ref::().is_none() { + log::error!("failed to get git blame data: {error:?}"); + } + } + }), + }) + }); + } + + fn regenerate_on_edit(&mut self, cx: &mut ModelContext) { + self.regenerate_on_edit_task = cx.spawn(|this, mut cx| async move { + cx.background_executor() + .timer(REGENERATE_ON_EDIT_DEBOUNCE_INTERVAL) + .await; + + this.update(&mut cx, |this, cx| { + this.generate(cx); + }) + }) + } +} + +const REGENERATE_ON_EDIT_DEBOUNCE_INTERVAL: Duration = Duration::from_secs(2); + +fn build_blame_entry_sum_tree(entries: Vec, max_row: u32) -> SumTree { + let mut current_row = 0; + let mut entries = SumTree::from_iter( + entries.into_iter().flat_map(|entry| { + let mut entries = SmallVec::<[GitBlameEntry; 2]>::new(); + + if entry.range.start > current_row { + let skipped_rows = entry.range.start - current_row; + entries.push(GitBlameEntry { + rows: skipped_rows, + blame: None, + }); + } + entries.push(GitBlameEntry { + rows: entry.range.len() as u32, + blame: Some(entry.clone()), + }); + + current_row = entry.range.end; + entries + }), + &(), + ); + + if max_row >= current_row { + entries.push( + GitBlameEntry { + rows: (max_row + 1) - current_row, + blame: None, + }, + &(), + ); + } + + entries +} + +async fn parse_commit_messages( + messages: impl IntoIterator, + remote_url: Option, + deprecated_permalinks: &HashMap, + provider_registry: Arc, + languages: &Arc, +) -> HashMap { + let mut commit_details = HashMap::default(); + + let parsed_remote_url = remote_url + .as_deref() + .and_then(|remote_url| parse_git_remote_url(provider_registry, remote_url)); + + for (oid, message) in messages { + let parsed_message = parse_markdown(&message, &languages).await; + + let permalink = if let Some((provider, git_remote)) = parsed_remote_url.as_ref() { + Some(provider.build_commit_permalink( + git_remote, + git::BuildCommitPermalinkParams { + sha: oid.to_string().as_str(), + }, + )) + } else { + // DEPRECATED (18 Apr 24): Sending permalinks over the wire is deprecated. Clients + // now do the parsing. This is here for backwards compatibility, so that + // when an old peer sends a client no `parsed_remote_url` but `deprecated_permalinks`, + // we fall back to that. + deprecated_permalinks.get(&oid).cloned() + }; + + let remote = parsed_remote_url + .as_ref() + .map(|(provider, remote)| GitRemote { + host: provider.clone(), + owner: remote.owner.to_string(), + repo: remote.repo.to_string(), + }); + + let pull_request = parsed_remote_url + .as_ref() + .and_then(|(provider, remote)| provider.extract_pull_request(remote, &message)); + + commit_details.insert( + oid, + CommitDetails { + message, + parsed_message, + permalink, + remote, + pull_request, + }, + ); + } + + commit_details +} + +async fn parse_markdown(text: &str, language_registry: &Arc) -> ParsedMarkdown { + let mut parsed_message = ParsedMarkdown::default(); + + markdown::parse_markdown_block( + text, + language_registry, + None, + &mut parsed_message.text, + &mut parsed_message.highlights, + &mut parsed_message.region_ranges, + &mut parsed_message.regions, + ) + .await; + + parsed_message +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::Context; + use language::{Point, Rope}; + use project::FakeFs; + use rand::prelude::*; + use serde_json::json; + use settings::SettingsStore; + use std::{cmp, env, ops::Range, path::Path}; + use unindent::Unindent as _; + use util::RandomCharIter; + + macro_rules! assert_blame_rows { + ($blame:expr, $rows:expr, $expected:expr, $cx:expr) => { + assert_eq!( + $blame + .blame_for_rows($rows.map(MultiBufferRow).map(Some), $cx) + .collect::>(), + $expected + ); + }; + } + + fn init_test(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + let settings = SettingsStore::test(cx); + cx.set_global(settings); + + theme::init(theme::LoadThemes::JustBase, cx); + + language::init(cx); + client::init_settings(cx); + workspace::init_settings(cx); + Project::init_settings(cx); + + crate::init(cx); + }); + } + + #[gpui::test] + async fn test_blame_error_notifications(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/my-repo", + json!({ + ".git": {}, + "file.txt": r#" + irrelevant contents + "# + .unindent() + }), + ) + .await; + + // Creating a GitBlame without a corresponding blame state + // will result in an error. + + let project = Project::test(fs, ["/my-repo".as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/my-repo/file.txt", cx) + }) + .await + .unwrap(); + + let blame = + cx.new_model(|cx| GitBlame::new(buffer.clone(), project.clone(), true, true, cx)); + + let event = project.next_event(cx).await; + assert_eq!( + event, + project::Event::Notification( + "Failed to blame \"file.txt\": failed to get blame for \"file.txt\"".to_string() + ) + ); + + blame.update(cx, |blame, cx| { + assert_eq!( + blame + .blame_for_rows((0..1).map(MultiBufferRow).map(Some), cx) + .collect::>(), + vec![None] + ); + }); + } + + #[gpui::test] + async fn test_blame_for_rows(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/my-repo", + json!({ + ".git": {}, + "file.txt": r#" + AAA Line 1 + BBB Line 2 - Modified 1 + CCC Line 3 - Modified 2 + modified in memory 1 + modified in memory 1 + DDD Line 4 - Modified 2 + EEE Line 5 - Modified 1 + FFF Line 6 - Modified 2 + "# + .unindent() + }), + ) + .await; + + fs.set_blame_for_repo( + Path::new("/my-repo/.git"), + vec![( + Path::new("file.txt"), + Blame { + entries: vec![ + blame_entry("1b1b1b", 0..1), + blame_entry("0d0d0d", 1..2), + blame_entry("3a3a3a", 2..3), + blame_entry("3a3a3a", 5..6), + blame_entry("0d0d0d", 6..7), + blame_entry("3a3a3a", 7..8), + ], + ..Default::default() + }, + )], + ); + let project = Project::test(fs, ["/my-repo".as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/my-repo/file.txt", cx) + }) + .await + .unwrap(); + + let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, false, true, cx)); + + cx.executor().run_until_parked(); + + git_blame.update(cx, |blame, cx| { + // All lines + assert_eq!( + blame + .blame_for_rows((0..8).map(MultiBufferRow).map(Some), cx) + .collect::>(), + vec![ + Some(blame_entry("1b1b1b", 0..1)), + Some(blame_entry("0d0d0d", 1..2)), + Some(blame_entry("3a3a3a", 2..3)), + None, + None, + Some(blame_entry("3a3a3a", 5..6)), + Some(blame_entry("0d0d0d", 6..7)), + Some(blame_entry("3a3a3a", 7..8)), + ] + ); + // Subset of lines + assert_eq!( + blame + .blame_for_rows((1..4).map(MultiBufferRow).map(Some), cx) + .collect::>(), + vec![ + Some(blame_entry("0d0d0d", 1..2)), + Some(blame_entry("3a3a3a", 2..3)), + None + ] + ); + // Subset of lines, with some not displayed + assert_eq!( + blame + .blame_for_rows(vec![Some(MultiBufferRow(1)), None, None], cx) + .collect::>(), + vec![Some(blame_entry("0d0d0d", 1..2)), None, None] + ); + }); + } + + #[gpui::test] + async fn test_blame_for_rows_with_edits(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/my-repo", + json!({ + ".git": {}, + "file.txt": r#" + Line 1 + Line 2 + Line 3 + "# + .unindent() + }), + ) + .await; + + fs.set_blame_for_repo( + Path::new("/my-repo/.git"), + vec![( + Path::new("file.txt"), + Blame { + entries: vec![blame_entry("1b1b1b", 0..4)], + ..Default::default() + }, + )], + ); + + let project = Project::test(fs, ["/my-repo".as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/my-repo/file.txt", cx) + }) + .await + .unwrap(); + + let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, false, true, cx)); + + cx.executor().run_until_parked(); + + git_blame.update(cx, |blame, cx| { + // Sanity check before edits: make sure that we get the same blame entry for all + // lines. + assert_blame_rows!( + blame, + (0..4), + vec![ + Some(blame_entry("1b1b1b", 0..4)), + Some(blame_entry("1b1b1b", 0..4)), + Some(blame_entry("1b1b1b", 0..4)), + Some(blame_entry("1b1b1b", 0..4)), + ], + cx + ); + }); + + // Modify a single line, at the start of the line + buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "X")], None, cx); + }); + git_blame.update(cx, |blame, cx| { + assert_blame_rows!( + blame, + (0..2), + vec![None, Some(blame_entry("1b1b1b", 0..4))], + cx + ); + }); + // Modify a single line, in the middle of the line + buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(1, 2)..Point::new(1, 2), "X")], None, cx); + }); + git_blame.update(cx, |blame, cx| { + assert_blame_rows!( + blame, + (1..4), + vec![ + None, + Some(blame_entry("1b1b1b", 0..4)), + Some(blame_entry("1b1b1b", 0..4)) + ], + cx + ); + }); + + // Before we insert a newline at the end, sanity check: + git_blame.update(cx, |blame, cx| { + assert_blame_rows!(blame, (3..4), vec![Some(blame_entry("1b1b1b", 0..4))], cx); + }); + // Insert a newline at the end + buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(3, 6)..Point::new(3, 6), "\n")], None, cx); + }); + // Only the new line is marked as edited: + git_blame.update(cx, |blame, cx| { + assert_blame_rows!( + blame, + (3..5), + vec![Some(blame_entry("1b1b1b", 0..4)), None], + cx + ); + }); + + // Before we insert a newline at the start, sanity check: + git_blame.update(cx, |blame, cx| { + assert_blame_rows!(blame, (2..3), vec![Some(blame_entry("1b1b1b", 0..4)),], cx); + }); + + // Usage example + // Insert a newline at the start of the row + buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "\n")], None, cx); + }); + // Only the new line is marked as edited: + git_blame.update(cx, |blame, cx| { + assert_blame_rows!( + blame, + (2..4), + vec![None, Some(blame_entry("1b1b1b", 0..4)),], + cx + ); + }); + } + + #[gpui::test(iterations = 100)] + async fn test_blame_random(mut rng: StdRng, cx: &mut gpui::TestAppContext) { + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + let max_edits_per_operation = env::var("MAX_EDITS_PER_OPERATION") + .map(|i| { + i.parse() + .expect("invalid `MAX_EDITS_PER_OPERATION` variable") + }) + .unwrap_or(5); + + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let buffer_initial_text_len = rng.gen_range(5..15); + let mut buffer_initial_text = Rope::from( + RandomCharIter::new(&mut rng) + .take(buffer_initial_text_len) + .collect::() + .as_str(), + ); + + let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5); + newline_ixs.sort_unstable(); + for newline_ix in newline_ixs.into_iter().rev() { + let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right); + buffer_initial_text.replace(newline_ix..newline_ix, "\n"); + } + log::info!("initial buffer text: {:?}", buffer_initial_text); + + fs.insert_tree( + "/my-repo", + json!({ + ".git": {}, + "file.txt": buffer_initial_text.to_string() + }), + ) + .await; + + let blame_entries = gen_blame_entries(buffer_initial_text.max_point().row, &mut rng); + log::info!("initial blame entries: {:?}", blame_entries); + fs.set_blame_for_repo( + Path::new("/my-repo/.git"), + vec![( + Path::new("file.txt"), + Blame { + entries: blame_entries, + ..Default::default() + }, + )], + ); + + let project = Project::test(fs.clone(), ["/my-repo".as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/my-repo/file.txt", cx) + }) + .await + .unwrap(); + + let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, false, true, cx)); + cx.executor().run_until_parked(); + git_blame.update(cx, |blame, cx| blame.check_invariants(cx)); + + for _ in 0..operations { + match rng.gen_range(0..100) { + 0..=19 => { + log::info!("quiescing"); + cx.executor().run_until_parked(); + } + 20..=69 => { + log::info!("editing buffer"); + buffer.update(cx, |buffer, cx| { + buffer.randomly_edit(&mut rng, max_edits_per_operation, cx); + log::info!("buffer text: {:?}", buffer.text()); + }); + + let blame_entries = gen_blame_entries( + buffer.read_with(cx, |buffer, _| buffer.max_point().row), + &mut rng, + ); + log::info!("regenerating blame entries: {:?}", blame_entries); + + fs.set_blame_for_repo( + Path::new("/my-repo/.git"), + vec![( + Path::new("file.txt"), + Blame { + entries: blame_entries, + ..Default::default() + }, + )], + ); + } + _ => { + git_blame.update(cx, |blame, cx| blame.check_invariants(cx)); + } + } + } + + git_blame.update(cx, |blame, cx| blame.check_invariants(cx)); + } + + fn gen_blame_entries(max_row: u32, rng: &mut StdRng) -> Vec { + let mut last_row = 0; + let mut blame_entries = Vec::new(); + for ix in 0..5 { + if last_row < max_row { + let row_start = rng.gen_range(last_row..max_row); + let row_end = rng.gen_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1); + blame_entries.push(blame_entry(&ix.to_string(), row_start..row_end)); + last_row = row_end; + } else { + break; + } + } + blame_entries + } + + fn blame_entry(sha: &str, range: Range) -> BlameEntry { + BlameEntry { + sha: sha.parse().unwrap(), + range, + ..Default::default() + } + } +} diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs new file mode 100644 index 0000000..ca905fe --- /dev/null +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -0,0 +1,141 @@ +use gpui::ViewContext; + +use crate::{Editor, RangeToAnchorExt}; + +enum MatchingBracketHighlight {} + +pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewContext) { + editor.clear_background_highlights::(cx); + + let newest_selection = editor.selections.newest::(cx); + // Don't highlight brackets if the selection isn't empty + if !newest_selection.is_empty() { + return; + } + + let head = newest_selection.head(); + let snapshot = editor.snapshot(cx); + if let Some((opening_range, closing_range)) = snapshot + .buffer_snapshot + .innermost_enclosing_bracket_ranges(head..head, None) + { + editor.highlight_background::( + &[ + opening_range.to_anchors(&snapshot.buffer_snapshot), + closing_range.to_anchors(&snapshot.buffer_snapshot), + ], + |theme| theme.editor_document_highlight_read_background, + cx, + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext}; + use indoc::indoc; + use language::{BracketPair, BracketPairConfig, Language, LanguageConfig, LanguageMatcher}; + + #[gpui::test] + async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new( + Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + brackets: BracketPairConfig { + pairs: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + newline: true, + }, + BracketPair { + start: "(".to_string(), + end: ")".to_string(), + close: false, + newline: true, + }, + ], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_brackets_query(indoc! {r#" + ("{" @open "}" @close) + ("(" @open ")" @close) + "#}) + .unwrap(), + Default::default(), + cx, + ) + .await; + + // positioning cursor inside bracket highlights both + cx.set_state(indoc! {r#" + pub fn test("Test ˇargument") { + another_test(1, 2, 3); + } + "#}); + cx.assert_editor_background_highlights::(indoc! {r#" + pub fn test«(»"Test argument"«)» { + another_test(1, 2, 3); + } + "#}); + + cx.set_state(indoc! {r#" + pub fn test("Test argument") { + another_test(1, ˇ2, 3); + } + "#}); + cx.assert_editor_background_highlights::(indoc! {r#" + pub fn test("Test argument") { + another_test«(»1, 2, 3«)»; + } + "#}); + + cx.set_state(indoc! {r#" + pub fn test("Test argument") { + anotherˇ_test(1, 2, 3); + } + "#}); + cx.assert_editor_background_highlights::(indoc! {r#" + pub fn test("Test argument") «{» + another_test(1, 2, 3); + «}» + "#}); + + // positioning outside of brackets removes highlight + cx.set_state(indoc! {r#" + pub fˇn test("Test argument") { + another_test(1, 2, 3); + } + "#}); + cx.assert_editor_background_highlights::(indoc! {r#" + pub fn test("Test argument") { + another_test(1, 2, 3); + } + "#}); + + // non empty selection dismisses highlight + cx.set_state(indoc! {r#" + pub fn test("Te«st argˇ»ument") { + another_test(1, 2, 3); + } + "#}); + cx.assert_editor_background_highlights::(indoc! {r#" + pub fn test("Test argument") { + another_test(1, 2, 3); + } + "#}); + } +} diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs new file mode 100644 index 0000000..07d41db --- /dev/null +++ b/crates/editor/src/hover_links.rs @@ -0,0 +1,1250 @@ +use crate::{ + hover_popover::{self, InlayHover}, + Anchor, Editor, EditorSnapshot, FindAllReferences, GoToDefinition, GoToTypeDefinition, InlayId, + PointForPosition, SelectPhase, +}; +use gpui::{px, AppContext, AsyncWindowContext, Model, Modifiers, Task, ViewContext}; +use language::{Bias, ToOffset}; +use linkify::{LinkFinder, LinkKind}; +use lsp::LanguageServerId; +use project::{ + HoverBlock, HoverBlockKind, InlayHintLabelPartTooltip, InlayHintTooltip, LocationLink, + ResolveState, +}; +use std::ops::Range; +use theme::ActiveTheme as _; +use util::{maybe, ResultExt, TryFutureExt}; + +#[derive(Debug)] +pub struct HoveredLinkState { + pub last_trigger_point: TriggerPoint, + pub preferred_kind: LinkDefinitionKind, + pub symbol_range: Option, + pub links: Vec, + pub task: Option>>, +} + +#[derive(Debug, Eq, PartialEq, Clone)] +pub enum RangeInEditor { + Text(Range), + Inlay(InlayHighlight), +} + +impl RangeInEditor { + pub fn as_text_range(&self) -> Option> { + match self { + Self::Text(range) => Some(range.clone()), + Self::Inlay(_) => None, + } + } + + fn point_within_range(&self, trigger_point: &TriggerPoint, snapshot: &EditorSnapshot) -> bool { + match (self, trigger_point) { + (Self::Text(range), TriggerPoint::Text(point)) => { + let point_after_start = range.start.cmp(point, &snapshot.buffer_snapshot).is_le(); + point_after_start && range.end.cmp(point, &snapshot.buffer_snapshot).is_ge() + } + (Self::Inlay(highlight), TriggerPoint::InlayHint(point, _, _)) => { + highlight.inlay == point.inlay + && highlight.range.contains(&point.range.start) + && highlight.range.contains(&point.range.end) + } + (Self::Inlay(_), TriggerPoint::Text(_)) + | (Self::Text(_), TriggerPoint::InlayHint(_, _, _)) => false, + } + } +} + +#[derive(Debug, Clone)] +pub enum HoverLink { + Url(String), + Text(LocationLink), + InlayHint(lsp::Location, LanguageServerId), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct InlayHighlight { + pub inlay: InlayId, + pub inlay_position: Anchor, + pub range: Range, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum TriggerPoint { + Text(Anchor), + InlayHint(InlayHighlight, lsp::Location, LanguageServerId), +} + +impl TriggerPoint { + fn anchor(&self) -> &Anchor { + match self { + TriggerPoint::Text(anchor) => anchor, + TriggerPoint::InlayHint(inlay_range, _, _) => &inlay_range.inlay_position, + } + } +} + +pub fn exclude_link_to_position( + buffer: &Model, + current_position: &text::Anchor, + location: &LocationLink, + cx: &AppContext, +) -> bool { + // Exclude definition links that points back to cursor position. + // (i.e., currently cursor upon definition). + let snapshot = buffer.read(cx).snapshot(); + !(buffer == &location.target.buffer + && current_position + .bias_right(&snapshot) + .cmp(&location.target.range.start, &snapshot) + .is_ge() + && current_position + .cmp(&location.target.range.end, &snapshot) + .is_le()) +} + +impl Editor { + pub(crate) fn update_hovered_link( + &mut self, + point_for_position: PointForPosition, + snapshot: &EditorSnapshot, + modifiers: Modifiers, + cx: &mut ViewContext, + ) { + if !modifiers.secondary() || self.has_pending_selection() { + self.hide_hovered_link(cx); + return; + } + + match point_for_position.as_valid() { + Some(point) => { + let trigger_point = TriggerPoint::Text( + snapshot + .buffer_snapshot + .anchor_before(point.to_offset(&snapshot.display_snapshot, Bias::Left)), + ); + + show_link_definition(modifiers.shift, self, trigger_point, snapshot, cx); + } + None => { + update_inlay_link_and_hover_points( + &snapshot, + point_for_position, + self, + modifiers.secondary(), + modifiers.shift, + cx, + ); + } + } + } + + pub(crate) fn hide_hovered_link(&mut self, cx: &mut ViewContext) { + self.hovered_link_state.take(); + self.clear_highlights::(cx); + } + + pub(crate) fn handle_click_hovered_link( + &mut self, + point: PointForPosition, + modifiers: Modifiers, + cx: &mut ViewContext, + ) { + let reveal_task = self.cmd_click_reveal_task(point, modifiers, cx); + cx.spawn(|editor, mut cx| async move { + let definition_revealed = reveal_task.await.log_err().unwrap_or(false); + let find_references = editor + .update(&mut cx, |editor, cx| { + if definition_revealed { + return None; + } + editor.find_all_references(&FindAllReferences, cx) + }) + .ok() + .flatten(); + if let Some(find_references) = find_references { + find_references.await.log_err(); + } + }) + .detach(); + } + + fn cmd_click_reveal_task( + &mut self, + point: PointForPosition, + modifiers: Modifiers, + cx: &mut ViewContext, + ) -> Task> { + if let Some(hovered_link_state) = self.hovered_link_state.take() { + self.hide_hovered_link(cx); + if !hovered_link_state.links.is_empty() { + if !self.focus_handle.is_focused(cx) { + cx.focus(&self.focus_handle); + } + + // exclude links pointing back to the current anchor + let current_position = point + .next_valid + .to_point(&self.snapshot(cx).display_snapshot); + let Some((buffer, anchor)) = self + .buffer() + .read(cx) + .text_anchor_for_position(current_position, cx) + else { + return Task::ready(Ok(false)); + }; + let links = hovered_link_state + .links + .into_iter() + .filter(|link| { + if let HoverLink::Text(location) = link { + exclude_link_to_position(&buffer, &anchor, location, cx) + } else { + true + } + }) + .collect(); + + return self.navigate_to_hover_links(None, links, modifiers.alt, cx); + } + } + + // We don't have the correct kind of link cached, set the selection on + // click and immediately trigger GoToDefinition. + self.select( + SelectPhase::Begin { + position: point.next_valid, + add: false, + click_count: 1, + }, + cx, + ); + + if point.as_valid().is_some() { + if modifiers.shift { + self.go_to_type_definition(&GoToTypeDefinition, cx) + } else { + self.go_to_definition(&GoToDefinition, cx) + } + } else { + Task::ready(Ok(false)) + } + } +} + +pub fn update_inlay_link_and_hover_points( + snapshot: &EditorSnapshot, + point_for_position: PointForPosition, + editor: &mut Editor, + secondary_held: bool, + shift_held: bool, + cx: &mut ViewContext<'_, Editor>, +) { + let hovered_offset = if point_for_position.column_overshoot_after_line_end == 0 { + Some(snapshot.display_point_to_inlay_offset(point_for_position.exact_unclipped, Bias::Left)) + } else { + None + }; + let mut go_to_definition_updated = false; + let mut hover_updated = false; + if let Some(hovered_offset) = hovered_offset { + let buffer_snapshot = editor.buffer().read(cx).snapshot(cx); + let previous_valid_anchor = buffer_snapshot.anchor_at( + point_for_position.previous_valid.to_point(snapshot), + Bias::Left, + ); + let next_valid_anchor = buffer_snapshot.anchor_at( + point_for_position.next_valid.to_point(snapshot), + Bias::Right, + ); + if let Some(hovered_hint) = editor + .visible_inlay_hints(cx) + .into_iter() + .skip_while(|hint| { + hint.position + .cmp(&previous_valid_anchor, &buffer_snapshot) + .is_lt() + }) + .take_while(|hint| { + hint.position + .cmp(&next_valid_anchor, &buffer_snapshot) + .is_le() + }) + .max_by_key(|hint| hint.id) + { + let inlay_hint_cache = editor.inlay_hint_cache(); + let excerpt_id = previous_valid_anchor.excerpt_id; + if let Some(cached_hint) = inlay_hint_cache.hint_by_id(excerpt_id, hovered_hint.id) { + match cached_hint.resolve_state { + ResolveState::CanResolve(_, _) => { + if let Some(buffer_id) = previous_valid_anchor.buffer_id { + inlay_hint_cache.spawn_hint_resolve( + buffer_id, + excerpt_id, + hovered_hint.id, + cx, + ); + } + } + ResolveState::Resolved => { + let mut extra_shift_left = 0; + let mut extra_shift_right = 0; + if cached_hint.padding_left { + extra_shift_left += 1; + extra_shift_right += 1; + } + if cached_hint.padding_right { + extra_shift_right += 1; + } + match cached_hint.label { + project::InlayHintLabel::String(_) => { + if let Some(tooltip) = cached_hint.tooltip { + hover_popover::hover_at_inlay( + editor, + InlayHover { + excerpt: excerpt_id, + tooltip: match tooltip { + InlayHintTooltip::String(text) => HoverBlock { + text, + kind: HoverBlockKind::PlainText, + }, + InlayHintTooltip::MarkupContent(content) => { + HoverBlock { + text: content.value, + kind: content.kind, + } + } + }, + range: InlayHighlight { + inlay: hovered_hint.id, + inlay_position: hovered_hint.position, + range: extra_shift_left + ..hovered_hint.text.len() + extra_shift_right, + }, + }, + cx, + ); + hover_updated = true; + } + } + project::InlayHintLabel::LabelParts(label_parts) => { + let hint_start = + snapshot.anchor_to_inlay_offset(hovered_hint.position); + if let Some((hovered_hint_part, part_range)) = + hover_popover::find_hovered_hint_part( + label_parts, + hint_start, + hovered_offset, + ) + { + let highlight_start = + (part_range.start - hint_start).0 + extra_shift_left; + let highlight_end = + (part_range.end - hint_start).0 + extra_shift_right; + let highlight = InlayHighlight { + inlay: hovered_hint.id, + inlay_position: hovered_hint.position, + range: highlight_start..highlight_end, + }; + if let Some(tooltip) = hovered_hint_part.tooltip { + hover_popover::hover_at_inlay( + editor, + InlayHover { + excerpt: excerpt_id, + tooltip: match tooltip { + InlayHintLabelPartTooltip::String(text) => { + HoverBlock { + text, + kind: HoverBlockKind::PlainText, + } + } + InlayHintLabelPartTooltip::MarkupContent( + content, + ) => HoverBlock { + text: content.value, + kind: content.kind, + }, + }, + range: highlight.clone(), + }, + cx, + ); + hover_updated = true; + } + if let Some((language_server_id, location)) = + hovered_hint_part.location + { + if secondary_held + && !editor.has_pending_nonempty_selection() + { + go_to_definition_updated = true; + show_link_definition( + shift_held, + editor, + TriggerPoint::InlayHint( + highlight, + location, + language_server_id, + ), + snapshot, + cx, + ); + } + } + } + } + }; + } + ResolveState::Resolving => {} + } + } + } + } + + if !go_to_definition_updated { + editor.hide_hovered_link(cx) + } + if !hover_updated { + hover_popover::hover_at(editor, None, cx); + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum LinkDefinitionKind { + Symbol, + Type, +} + +pub fn show_link_definition( + shift_held: bool, + editor: &mut Editor, + trigger_point: TriggerPoint, + snapshot: &EditorSnapshot, + cx: &mut ViewContext, +) { + let preferred_kind = match trigger_point { + TriggerPoint::Text(_) if !shift_held => LinkDefinitionKind::Symbol, + _ => LinkDefinitionKind::Type, + }; + + let (mut hovered_link_state, is_cached) = + if let Some(existing) = editor.hovered_link_state.take() { + (existing, true) + } else { + ( + HoveredLinkState { + last_trigger_point: trigger_point.clone(), + symbol_range: None, + preferred_kind, + links: vec![], + task: None, + }, + false, + ) + }; + + if editor.pending_rename.is_some() { + return; + } + + let trigger_anchor = trigger_point.anchor(); + let Some((buffer, buffer_position)) = editor + .buffer + .read(cx) + .text_anchor_for_position(*trigger_anchor, cx) + else { + return; + }; + + let Some((excerpt_id, _, _)) = editor + .buffer() + .read(cx) + .excerpt_containing(*trigger_anchor, cx) + else { + return; + }; + + let same_kind = hovered_link_state.preferred_kind == preferred_kind + || hovered_link_state + .links + .first() + .is_some_and(|d| matches!(d, HoverLink::Url(_))); + + if same_kind { + if is_cached && (&hovered_link_state.last_trigger_point == &trigger_point) + || hovered_link_state + .symbol_range + .as_ref() + .is_some_and(|symbol_range| { + symbol_range.point_within_range(&trigger_point, &snapshot) + }) + { + editor.hovered_link_state = Some(hovered_link_state); + return; + } + } else { + editor.hide_hovered_link(cx) + } + let project = editor.project.clone(); + + let snapshot = snapshot.buffer_snapshot.clone(); + hovered_link_state.task = Some(cx.spawn(|this, mut cx| { + async move { + let result = match &trigger_point { + TriggerPoint::Text(_) => { + if let Some((url_range, url)) = find_url(&buffer, buffer_position, cx.clone()) { + this.update(&mut cx, |_, _| { + let range = maybe!({ + let start = + snapshot.anchor_in_excerpt(excerpt_id, url_range.start)?; + let end = snapshot.anchor_in_excerpt(excerpt_id, url_range.end)?; + Some(RangeInEditor::Text(start..end)) + }); + (range, vec![HoverLink::Url(url)]) + }) + .ok() + } else if let Some(project) = project { + // query the LSP for definition info + project + .update(&mut cx, |project, cx| match preferred_kind { + LinkDefinitionKind::Symbol => { + project.definition(&buffer, buffer_position, cx) + } + + LinkDefinitionKind::Type => { + project.type_definition(&buffer, buffer_position, cx) + } + })? + .await + .ok() + .map(|definition_result| { + ( + definition_result.iter().find_map(|link| { + link.origin.as_ref().and_then(|origin| { + let start = snapshot.anchor_in_excerpt( + excerpt_id, + origin.range.start, + )?; + let end = snapshot + .anchor_in_excerpt(excerpt_id, origin.range.end)?; + Some(RangeInEditor::Text(start..end)) + }) + }), + definition_result.into_iter().map(HoverLink::Text).collect(), + ) + }) + } else { + None + } + } + TriggerPoint::InlayHint(highlight, lsp_location, server_id) => Some(( + Some(RangeInEditor::Inlay(highlight.clone())), + vec![HoverLink::InlayHint(lsp_location.clone(), *server_id)], + )), + }; + + this.update(&mut cx, |editor, cx| { + // Clear any existing highlights + editor.clear_highlights::(cx); + let Some(hovered_link_state) = editor.hovered_link_state.as_mut() else { + editor.hide_hovered_link(cx); + return; + }; + hovered_link_state.preferred_kind = preferred_kind; + hovered_link_state.symbol_range = result + .as_ref() + .and_then(|(symbol_range, _)| symbol_range.clone()); + + if let Some((symbol_range, definitions)) = result { + hovered_link_state.links = definitions; + + let underline_hovered_link = hovered_link_state.links.len() > 0 + || hovered_link_state.symbol_range.is_some(); + + if underline_hovered_link { + let style = gpui::HighlightStyle { + underline: Some(gpui::UnderlineStyle { + thickness: px(1.), + ..Default::default() + }), + color: Some(cx.theme().colors().link_text_hover), + ..Default::default() + }; + let highlight_range = + symbol_range.unwrap_or_else(|| match &trigger_point { + TriggerPoint::Text(trigger_anchor) => { + // If no symbol range returned from language server, use the surrounding word. + let (offset_range, _) = + snapshot.surrounding_word(*trigger_anchor); + RangeInEditor::Text( + snapshot.anchor_before(offset_range.start) + ..snapshot.anchor_after(offset_range.end), + ) + } + TriggerPoint::InlayHint(highlight, _, _) => { + RangeInEditor::Inlay(highlight.clone()) + } + }); + + match highlight_range { + RangeInEditor::Text(text_range) => editor + .highlight_text::(vec![text_range], style, cx), + RangeInEditor::Inlay(highlight) => editor + .highlight_inlays::(vec![highlight], style, cx), + } + } + } else { + editor.hide_hovered_link(cx); + } + })?; + + Ok::<_, anyhow::Error>(()) + } + .log_err() + })); + + editor.hovered_link_state = Some(hovered_link_state); +} + +pub(crate) fn find_url( + buffer: &Model, + position: text::Anchor, + mut cx: AsyncWindowContext, +) -> Option<(Range, String)> { + const LIMIT: usize = 2048; + + let Ok(snapshot) = buffer.update(&mut cx, |buffer, _| buffer.snapshot()) else { + return None; + }; + + let offset = position.to_offset(&snapshot); + let mut token_start = offset; + let mut token_end = offset; + let mut found_start = false; + let mut found_end = false; + + for ch in snapshot.reversed_chars_at(offset).take(LIMIT) { + if ch.is_whitespace() { + found_start = true; + break; + } + token_start -= ch.len_utf8(); + } + // Check if we didn't find the starting whitespace or if we didn't reach the start of the buffer + if !found_start && token_start != 0 { + return None; + } + + for ch in snapshot + .chars_at(offset) + .take(LIMIT - (offset - token_start)) + { + if ch.is_whitespace() { + found_end = true; + break; + } + token_end += ch.len_utf8(); + } + // Check if we didn't find the ending whitespace or if we read more or equal than LIMIT + // which at this point would happen only if we reached the end of buffer + if !found_end && (token_end - token_start >= LIMIT) { + return None; + } + + let mut finder = LinkFinder::new(); + finder.kinds(&[LinkKind::Url]); + let input = snapshot + .text_for_range(token_start..token_end) + .collect::(); + + let relative_offset = offset - token_start; + for link in finder.links(&input) { + if link.start() <= relative_offset && link.end() >= relative_offset { + let range = snapshot.anchor_before(token_start + link.start()) + ..snapshot.anchor_after(token_start + link.end()); + return Some((range, link.as_str().to_string())); + } + } + None +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + display_map::ToDisplayPoint, + editor_tests::init_test, + inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels}, + test::editor_lsp_test_context::EditorLspTestContext, + DisplayPoint, + }; + use futures::StreamExt; + use gpui::Modifiers; + use indoc::indoc; + use language::language_settings::InlayHintSettings; + use lsp::request::{GotoDefinition, GotoTypeDefinition}; + use util::assert_set_eq; + use workspace::item::Item; + + #[gpui::test] + async fn test_hover_type_links(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + type_definition_provider: Some(lsp::TypeDefinitionProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + struct A; + let vˇariable = A; + "}); + let screen_coord = cx.editor(|editor, cx| editor.pixel_position_of_cursor(cx)); + + // Basic hold cmd+shift, expect highlight in region if response contains type definition + let symbol_range = cx.lsp_range(indoc! {" + struct A; + let «variable» = A; + "}); + let target_range = cx.lsp_range(indoc! {" + struct «A»; + let variable = A; + "}); + + cx.run_until_parked(); + + let mut requests = + cx.handle_request::(move |url, _, _| async move { + Ok(Some(lsp::GotoTypeDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: Some(symbol_range), + target_uri: url.clone(), + target_range, + target_selection_range: target_range, + }, + ]))) + }); + + cx.cx + .cx + .simulate_mouse_move(screen_coord.unwrap(), None, Modifiers::command_shift()); + + requests.next().await; + cx.run_until_parked(); + cx.assert_editor_text_highlights::(indoc! {" + struct A; + let «variable» = A; + "}); + + cx.simulate_modifiers_change(Modifiers::secondary_key()); + cx.run_until_parked(); + // Assert no link highlights + cx.assert_editor_text_highlights::(indoc! {" + struct A; + let variable = A; + "}); + + cx.cx + .cx + .simulate_click(screen_coord.unwrap(), Modifiers::command_shift()); + + cx.assert_editor_state(indoc! {" + struct «Aˇ»; + let variable = A; + "}); + } + + #[gpui::test] + async fn test_hover_links(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { do_work(); } + fn do_work() { test(); } + "}); + + // Basic hold cmd, expect highlight in region if response contains definition + let hover_point = cx.pixel_position(indoc! {" + fn test() { do_wˇork(); } + fn do_work() { test(); } + "}); + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «do_work»(); } + fn do_work() { test(); } + "}); + let target_range = cx.lsp_range(indoc! {" + fn test() { do_work(); } + fn «do_work»() { test(); } + "}); + + let mut requests = cx.handle_request::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: Some(symbol_range), + target_uri: url.clone(), + target_range, + target_selection_range: target_range, + }, + ]))) + }); + + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + requests.next().await; + cx.background_executor.run_until_parked(); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { «do_work»(); } + fn do_work() { test(); } + "}); + + // Unpress cmd causes highlight to go away + cx.simulate_modifiers_change(Modifiers::none()); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { test(); } + "}); + + let mut requests = cx.handle_request::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: Some(symbol_range), + target_uri: url.clone(), + target_range, + target_selection_range: target_range, + }, + ]))) + }); + + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + requests.next().await; + cx.background_executor.run_until_parked(); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { «do_work»(); } + fn do_work() { test(); } + "}); + + // Moving mouse to location with no response dismisses highlight + let hover_point = cx.pixel_position(indoc! {" + fˇn test() { do_work(); } + fn do_work() { test(); } + "}); + let mut requests = cx + .lsp + .handle_request::(move |_, _| async move { + // No definitions returned + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![]))) + }); + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + + requests.next().await; + cx.background_executor.run_until_parked(); + + // Assert no link highlights + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { test(); } + "}); + + // // Move mouse without cmd and then pressing cmd triggers highlight + let hover_point = cx.pixel_position(indoc! {" + fn test() { do_work(); } + fn do_work() { teˇst(); } + "}); + cx.simulate_mouse_move(hover_point, None, Modifiers::none()); + + // Assert no link highlights + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { test(); } + "}); + + let symbol_range = cx.lsp_range(indoc! {" + fn test() { do_work(); } + fn do_work() { «test»(); } + "}); + let target_range = cx.lsp_range(indoc! {" + fn «test»() { do_work(); } + fn do_work() { test(); } + "}); + + let mut requests = cx.handle_request::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: Some(symbol_range), + target_uri: url, + target_range, + target_selection_range: target_range, + }, + ]))) + }); + + cx.simulate_modifiers_change(Modifiers::secondary_key()); + + requests.next().await; + cx.background_executor.run_until_parked(); + + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { «test»(); } + "}); + + cx.deactivate_window(); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { test(); } + "}); + + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + cx.background_executor.run_until_parked(); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { «test»(); } + "}); + + // Moving again within the same symbol range doesn't re-request + let hover_point = cx.pixel_position(indoc! {" + fn test() { do_work(); } + fn do_work() { tesˇt(); } + "}); + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + cx.background_executor.run_until_parked(); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { «test»(); } + "}); + + // Cmd click with existing definition doesn't re-request and dismisses highlight + cx.simulate_click(hover_point, Modifiers::secondary_key()); + cx.lsp + .handle_request::(move |_, _| async move { + // Empty definition response to make sure we aren't hitting the lsp and using + // the cached location instead + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![]))) + }); + cx.background_executor.run_until_parked(); + cx.assert_editor_state(indoc! {" + fn «testˇ»() { do_work(); } + fn do_work() { test(); } + "}); + + // Assert no link highlights after jump + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { test(); } + "}); + + // Cmd click without existing definition requests and jumps + let hover_point = cx.pixel_position(indoc! {" + fn test() { do_wˇork(); } + fn do_work() { test(); } + "}); + let target_range = cx.lsp_range(indoc! {" + fn test() { do_work(); } + fn «do_work»() { test(); } + "}); + + let mut requests = cx.handle_request::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: None, + target_uri: url, + target_range, + target_selection_range: target_range, + }, + ]))) + }); + cx.simulate_click(hover_point, Modifiers::secondary_key()); + requests.next().await; + cx.background_executor.run_until_parked(); + cx.assert_editor_state(indoc! {" + fn test() { do_work(); } + fn «do_workˇ»() { test(); } + "}); + + // 1. We have a pending selection, mouse point is over a symbol that we have a response for, hitting cmd and nothing happens + // 2. Selection is completed, hovering + let hover_point = cx.pixel_position(indoc! {" + fn test() { do_wˇork(); } + fn do_work() { test(); } + "}); + let target_range = cx.lsp_range(indoc! {" + fn test() { do_work(); } + fn «do_work»() { test(); } + "}); + let mut requests = cx.handle_request::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: None, + target_uri: url, + target_range, + target_selection_range: target_range, + }, + ]))) + }); + + // create a pending selection + let selection_range = cx.ranges(indoc! {" + fn «test() { do_w»ork(); } + fn do_work() { test(); } + "})[0] + .clone(); + cx.update_editor(|editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let anchor_range = snapshot.anchor_before(selection_range.start) + ..snapshot.anchor_after(selection_range.end); + editor.change_selections(Some(crate::Autoscroll::fit()), cx, |s| { + s.set_pending_anchor_range(anchor_range, crate::SelectMode::Character) + }); + }); + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + cx.background_executor.run_until_parked(); + assert!(requests.try_next().is_err()); + cx.assert_editor_text_highlights::(indoc! {" + fn test() { do_work(); } + fn do_work() { test(); } + "}); + cx.background_executor.run_until_parked(); + } + + #[gpui::test] + async fn test_inlay_hover_links(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + cx, + ) + .await; + cx.set_state(indoc! {" + struct TestStruct; + + fn main() { + let variableˇ = TestStruct; + } + "}); + let hint_start_offset = cx.ranges(indoc! {" + struct TestStruct; + + fn main() { + let variableˇ = TestStruct; + } + "})[0] + .start; + let hint_position = cx.to_lsp(hint_start_offset); + let target_range = cx.lsp_range(indoc! {" + struct «TestStruct»; + + fn main() { + let variable = TestStruct; + } + "}); + + let expected_uri = cx.buffer_lsp_url.clone(); + let hint_label = ": TestStruct"; + cx.lsp + .handle_request::(move |params, _| { + let expected_uri = expected_uri.clone(); + async move { + assert_eq!(params.text_document.uri, expected_uri); + Ok(Some(vec![lsp::InlayHint { + position: hint_position, + label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart { + value: hint_label.to_string(), + location: Some(lsp::Location { + uri: params.text_document.uri, + range: target_range, + }), + ..Default::default() + }]), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(false), + padding_right: Some(false), + data: None, + }])) + } + }) + .next() + .await; + cx.background_executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let expected_layers = vec![hint_label.to_string()]; + assert_eq!(expected_layers, cached_hint_labels(editor)); + assert_eq!(expected_layers, visible_hint_labels(editor, cx)); + }); + + let inlay_range = cx + .ranges(indoc! {" + struct TestStruct; + + fn main() { + let variable« »= TestStruct; + } + "}) + .get(0) + .cloned() + .unwrap(); + let midpoint = cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let previous_valid = inlay_range.start.to_display_point(&snapshot); + let next_valid = inlay_range.end.to_display_point(&snapshot); + assert_eq!(previous_valid.row(), next_valid.row()); + assert!(previous_valid.column() < next_valid.column()); + DisplayPoint::new( + previous_valid.row(), + previous_valid.column() + (hint_label.len() / 2) as u32, + ) + }); + // Press cmd to trigger highlight + let hover_point = cx.pixel_position_for(midpoint); + cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); + cx.background_executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let actual_highlights = snapshot + .inlay_highlights::() + .into_iter() + .flat_map(|highlights| highlights.values().map(|(_, highlight)| highlight)) + .collect::>(); + + let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx)); + let expected_highlight = InlayHighlight { + inlay: InlayId::Hint(0), + inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right), + range: 0..hint_label.len(), + }; + assert_set_eq!(actual_highlights, vec![&expected_highlight]); + }); + + cx.simulate_mouse_move(hover_point, None, Modifiers::none()); + // Assert no link highlights + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let actual_ranges = snapshot + .text_highlight_ranges::() + .map(|ranges| ranges.as_ref().clone().1) + .unwrap_or_default(); + + assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}"); + }); + + cx.simulate_modifiers_change(Modifiers::secondary_key()); + cx.background_executor.run_until_parked(); + cx.simulate_click(hover_point, Modifiers::secondary_key()); + cx.background_executor.run_until_parked(); + cx.assert_editor_state(indoc! {" + struct «TestStructˇ»; + + fn main() { + let variable = TestStruct; + } + "}); + } + + #[gpui::test] + async fn test_urls(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + Let's test a [complex](https://zed.dev/channel/had-(oops)) caseˇ. + "}); + + let screen_coord = cx.pixel_position(indoc! {" + Let's test a [complex](https://zed.dev/channel/had-(ˇoops)) case. + "}); + + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights::(indoc! {" + Let's test a [complex](«https://zed.dev/channel/had-(oops)ˇ») case. + "}); + + cx.simulate_click(screen_coord, Modifiers::secondary_key()); + assert_eq!( + cx.opened_url(), + Some("https://zed.dev/channel/had-(oops)".into()) + ); + } + + #[gpui::test] + async fn test_urls_at_beginning_of_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {"https://zed.dev/releases is a cool ˇwebpage."}); + + let screen_coord = + cx.pixel_position(indoc! {"https://zed.dev/relˇeases is a cool webpage."}); + + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights::( + indoc! {"«https://zed.dev/releasesˇ» is a cool webpage."}, + ); + + cx.simulate_click(screen_coord, Modifiers::secondary_key()); + assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into())); + } + + #[gpui::test] + async fn test_urls_at_end_of_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {"A cool ˇwebpage is https://zed.dev/releases"}); + + let screen_coord = + cx.pixel_position(indoc! {"A cool webpage is https://zed.dev/releˇases"}); + + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights::( + indoc! {"A cool webpage is «https://zed.dev/releasesˇ»"}, + ); + + cx.simulate_click(screen_coord, Modifiers::secondary_key()); + assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into())); + } +} diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs new file mode 100644 index 0000000..143d5e3 --- /dev/null +++ b/crates/editor/src/hover_popover.rs @@ -0,0 +1,1418 @@ +use crate::{ + display_map::{InlayOffset, ToDisplayPoint}, + hover_links::{InlayHighlight, RangeInEditor}, + Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot, + EditorStyle, ExcerptId, Hover, RangeToAnchorExt, +}; +use futures::{stream::FuturesUnordered, FutureExt}; +use gpui::{ + div, px, AnyElement, CursorStyle, Hsla, InteractiveElement, IntoElement, MouseButton, + ParentElement, Pixels, SharedString, Size, StatefulInteractiveElement, Styled, Task, + ViewContext, WeakView, +}; +use language::{markdown, DiagnosticEntry, Language, LanguageRegistry, ParsedMarkdown}; + +use lsp::DiagnosticSeverity; +use multi_buffer::ToOffset; +use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart}; +use settings::Settings; +use smol::stream::StreamExt; +use std::{ops::Range, sync::Arc, time::Duration}; +use ui::{prelude::*, Tooltip}; +use util::TryFutureExt; +use workspace::Workspace; + +pub const HOVER_DELAY_MILLIS: u64 = 350; +pub const HOVER_REQUEST_DELAY_MILLIS: u64 = 200; + +pub const MIN_POPOVER_CHARACTER_WIDTH: f32 = 20.; +pub const MIN_POPOVER_LINE_HEIGHT: Pixels = px(4.); +pub const HOVER_POPOVER_GAP: Pixels = px(10.); + +/// Bindable action which uses the most recent selection head to trigger a hover +pub fn hover(editor: &mut Editor, _: &Hover, cx: &mut ViewContext) { + let head = editor.selections.newest_anchor().head(); + show_hover(editor, head, true, cx); +} + +/// The internal hover action dispatches between `show_hover` or `hide_hover` +/// depending on whether a point to hover over is provided. +pub fn hover_at(editor: &mut Editor, anchor: Option, cx: &mut ViewContext) { + if EditorSettings::get_global(cx).hover_popover_enabled { + if let Some(anchor) = anchor { + show_hover(editor, anchor, false, cx); + } else { + hide_hover(editor, cx); + } + } +} + +pub struct InlayHover { + pub excerpt: ExcerptId, + pub range: InlayHighlight, + pub tooltip: HoverBlock, +} + +pub fn find_hovered_hint_part( + label_parts: Vec, + hint_start: InlayOffset, + hovered_offset: InlayOffset, +) -> Option<(InlayHintLabelPart, Range)> { + if hovered_offset >= hint_start { + let mut hovered_character = (hovered_offset - hint_start).0; + let mut part_start = hint_start; + for part in label_parts { + let part_len = part.value.chars().count(); + if hovered_character > part_len { + hovered_character -= part_len; + part_start.0 += part_len; + } else { + let part_end = InlayOffset(part_start.0 + part_len); + return Some((part, part_start..part_end)); + } + } + } + None +} + +pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut ViewContext) { + if EditorSettings::get_global(cx).hover_popover_enabled { + if editor.pending_rename.is_some() { + return; + } + + let Some(project) = editor.project.clone() else { + return; + }; + + if editor + .hover_state + .info_popovers + .iter() + .any(|InfoPopover { symbol_range, .. }| { + if let RangeInEditor::Inlay(range) = symbol_range { + if range == &inlay_hover.range { + // Hover triggered from same location as last time. Don't show again. + return true; + } + } + false + }) + { + hide_hover(editor, cx); + } + + let task = cx.spawn(|this, mut cx| { + async move { + cx.background_executor() + .timer(Duration::from_millis(HOVER_DELAY_MILLIS)) + .await; + this.update(&mut cx, |this, _| { + this.hover_state.diagnostic_popover = None; + })?; + + let language_registry = project.update(&mut cx, |p, _| p.languages().clone())?; + let blocks = vec![inlay_hover.tooltip]; + let parsed_content = parse_blocks(&blocks, &language_registry, None).await; + + let hover_popover = InfoPopover { + symbol_range: RangeInEditor::Inlay(inlay_hover.range.clone()), + parsed_content, + }; + + this.update(&mut cx, |this, cx| { + // TODO: no background highlights happen for inlays currently + this.hover_state.info_popovers = vec![hover_popover]; + cx.notify(); + })?; + + anyhow::Ok(()) + } + .log_err() + }); + + editor.hover_state.info_task = Some(task); + } +} + +/// Hides the type information popup. +/// Triggered by the `Hover` action when the cursor is not over a symbol or when the +/// selections changed. +pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext) -> bool { + let info_popovers = editor.hover_state.info_popovers.drain(..); + let diagnostics_popover = editor.hover_state.diagnostic_popover.take(); + let did_hide = info_popovers.count() > 0 || diagnostics_popover.is_some(); + + editor.hover_state.info_task = None; + editor.hover_state.triggered_from = None; + + editor.clear_background_highlights::(cx); + + if did_hide { + cx.notify(); + } + + did_hide +} + +/// Queries the LSP and shows type info and documentation +/// about the symbol the mouse is currently hovering over. +/// Triggered by the `Hover` action when the cursor may be over a symbol. +fn show_hover( + editor: &mut Editor, + anchor: Anchor, + ignore_timeout: bool, + cx: &mut ViewContext, +) { + if editor.pending_rename.is_some() { + return; + } + + let snapshot = editor.snapshot(cx); + + let (buffer, buffer_position) = + if let Some(output) = editor.buffer.read(cx).text_anchor_for_position(anchor, cx) { + output + } else { + return; + }; + + let excerpt_id = + if let Some((excerpt_id, _, _)) = editor.buffer().read(cx).excerpt_containing(anchor, cx) { + excerpt_id + } else { + return; + }; + + let project = if let Some(project) = editor.project.clone() { + project + } else { + return; + }; + + if !ignore_timeout { + if editor + .hover_state + .info_popovers + .iter() + .any(|InfoPopover { symbol_range, .. }| { + symbol_range + .as_text_range() + .map(|range| { + let hover_range = range.to_offset(&snapshot.buffer_snapshot); + let offset = anchor.to_offset(&snapshot.buffer_snapshot); + // LSP returns a hover result for the end index of ranges that should be hovered, so we need to + // use an inclusive range here to check if we should dismiss the popover + (hover_range.start..=hover_range.end).contains(&offset) + }) + .unwrap_or(false) + }) + { + // Hover triggered from same location as last time. Don't show again. + return; + } else { + hide_hover(editor, cx); + } + } + + // Don't request again if the location is the same as the previous request + if let Some(triggered_from) = &editor.hover_state.triggered_from { + if triggered_from + .cmp(&anchor, &snapshot.buffer_snapshot) + .is_eq() + { + return; + } + } + + let task = cx.spawn(|this, mut cx| { + async move { + // If we need to delay, delay a set amount initially before making the lsp request + let delay = if ignore_timeout { + None + } else { + // Construct delay task to wait for later + let total_delay = Some( + cx.background_executor() + .timer(Duration::from_millis(HOVER_DELAY_MILLIS)), + ); + + cx.background_executor() + .timer(Duration::from_millis(HOVER_REQUEST_DELAY_MILLIS)) + .await; + total_delay + }; + + // query the LSP for hover info + let hover_request = cx.update(|cx| { + project.update(cx, |project, cx| { + project.hover(&buffer, buffer_position, cx) + }) + })?; + + if let Some(delay) = delay { + delay.await; + } + + // If there's a diagnostic, assign it on the hover state and notify + let local_diagnostic = snapshot + .buffer_snapshot + .diagnostics_in_range::<_, usize>(anchor..anchor, false) + // Find the entry with the most specific range + .min_by_key(|entry| entry.range.end - entry.range.start) + .map(|entry| DiagnosticEntry { + diagnostic: entry.diagnostic, + range: entry.range.to_anchors(&snapshot.buffer_snapshot), + }); + + // Pull the primary diagnostic out so we can jump to it if the popover is clicked + let primary_diagnostic = local_diagnostic.as_ref().and_then(|local_diagnostic| { + snapshot + .buffer_snapshot + .diagnostic_group::(local_diagnostic.diagnostic.group_id) + .find(|diagnostic| diagnostic.diagnostic.is_primary) + .map(|entry| DiagnosticEntry { + diagnostic: entry.diagnostic, + range: entry.range.to_anchors(&snapshot.buffer_snapshot), + }) + }); + + this.update(&mut cx, |this, _| { + this.hover_state.diagnostic_popover = + local_diagnostic.map(|local_diagnostic| DiagnosticPopover { + local_diagnostic, + primary_diagnostic, + }); + })?; + + let hovers_response = hover_request.await; + let language_registry = project.update(&mut cx, |p, _| p.languages().clone())?; + let snapshot = this.update(&mut cx, |this, cx| this.snapshot(cx))?; + let mut hover_highlights = Vec::with_capacity(hovers_response.len()); + let mut info_popovers = Vec::with_capacity(hovers_response.len()); + let mut info_popover_tasks = hovers_response + .into_iter() + .map(|hover_result| async { + // Create symbol range of anchors for highlighting and filtering of future requests. + let range = hover_result + .range + .and_then(|range| { + let start = snapshot + .buffer_snapshot + .anchor_in_excerpt(excerpt_id, range.start)?; + let end = snapshot + .buffer_snapshot + .anchor_in_excerpt(excerpt_id, range.end)?; + + Some(start..end) + }) + .unwrap_or_else(|| anchor..anchor); + + let blocks = hover_result.contents; + let language = hover_result.language; + let parsed_content = parse_blocks(&blocks, &language_registry, language).await; + + ( + range.clone(), + InfoPopover { + symbol_range: RangeInEditor::Text(range), + parsed_content, + }, + ) + }) + .collect::>(); + while let Some((highlight_range, info_popover)) = info_popover_tasks.next().await { + hover_highlights.push(highlight_range); + info_popovers.push(info_popover); + } + + this.update(&mut cx, |editor, cx| { + if hover_highlights.is_empty() { + editor.clear_background_highlights::(cx); + } else { + // Highlight the selected symbol using a background highlight + editor.highlight_background::( + &hover_highlights, + |theme| theme.element_hover, // todo update theme + cx, + ); + } + + editor.hover_state.info_popovers = info_popovers; + cx.notify(); + cx.refresh(); + })?; + + anyhow::Ok(()) + } + .log_err() + }); + + editor.hover_state.info_task = Some(task); +} + +async fn parse_blocks( + blocks: &[HoverBlock], + language_registry: &Arc, + language: Option>, +) -> markdown::ParsedMarkdown { + let mut text = String::new(); + let mut highlights = Vec::new(); + let mut region_ranges = Vec::new(); + let mut regions = Vec::new(); + + for block in blocks { + match &block.kind { + HoverBlockKind::PlainText => { + markdown::new_paragraph(&mut text, &mut Vec::new()); + text.push_str(&block.text.replace("\\n", "\n")); + } + + HoverBlockKind::Markdown => { + markdown::parse_markdown_block( + &block.text.replace("\\n", "\n"), + language_registry, + language.clone(), + &mut text, + &mut highlights, + &mut region_ranges, + &mut regions, + ) + .await + } + + HoverBlockKind::Code { language } => { + if let Some(language) = language_registry + .language_for_name(language) + .now_or_never() + .and_then(Result::ok) + { + markdown::highlight_code(&mut text, &mut highlights, &block.text, &language); + } else { + text.push_str(&block.text); + } + } + } + } + + let leading_space = text.chars().take_while(|c| c.is_whitespace()).count(); + if leading_space > 0 { + highlights = highlights + .into_iter() + .map(|(range, style)| { + ( + range.start.saturating_sub(leading_space) + ..range.end.saturating_sub(leading_space), + style, + ) + }) + .collect(); + region_ranges = region_ranges + .into_iter() + .map(|range| { + range.start.saturating_sub(leading_space)..range.end.saturating_sub(leading_space) + }) + .collect(); + } + + ParsedMarkdown { + text: text.trim().to_string(), + highlights, + region_ranges, + regions, + } +} + +#[derive(Default)] +pub struct HoverState { + pub info_popovers: Vec, + pub diagnostic_popover: Option, + pub triggered_from: Option, + pub info_task: Option>>, +} + +impl HoverState { + pub fn visible(&self) -> bool { + !self.info_popovers.is_empty() || self.diagnostic_popover.is_some() + } + + pub fn render( + &mut self, + snapshot: &EditorSnapshot, + style: &EditorStyle, + visible_rows: Range, + max_size: Size, + workspace: Option>, + cx: &mut ViewContext, + ) -> Option<(DisplayPoint, Vec)> { + // If there is a diagnostic, position the popovers based on that. + // Otherwise use the start of the hover range + let anchor = self + .diagnostic_popover + .as_ref() + .map(|diagnostic_popover| &diagnostic_popover.local_diagnostic.range.start) + .or_else(|| { + self.info_popovers.iter().find_map(|info_popover| { + match &info_popover.symbol_range { + RangeInEditor::Text(range) => Some(&range.start), + RangeInEditor::Inlay(_) => None, + } + }) + }) + .or_else(|| { + self.info_popovers.iter().find_map(|info_popover| { + match &info_popover.symbol_range { + RangeInEditor::Text(_) => None, + RangeInEditor::Inlay(range) => Some(&range.inlay_position), + } + }) + })?; + let point = anchor.to_display_point(&snapshot.display_snapshot); + + // Don't render if the relevant point isn't on screen + if !self.visible() || !visible_rows.contains(&point.row()) { + return None; + } + + let mut elements = Vec::new(); + + if let Some(diagnostic_popover) = self.diagnostic_popover.as_ref() { + elements.push(diagnostic_popover.render(style, max_size, cx)); + } + for info_popover in &mut self.info_popovers { + elements.push(info_popover.render(style, max_size, workspace.clone(), cx)); + } + + Some((point, elements)) + } +} + +#[derive(Debug, Clone)] +pub struct InfoPopover { + symbol_range: RangeInEditor, + parsed_content: ParsedMarkdown, +} + +impl InfoPopover { + pub fn render( + &mut self, + style: &EditorStyle, + max_size: Size, + workspace: Option>, + cx: &mut ViewContext, + ) -> AnyElement { + div() + .id("info_popover") + .elevation_2(cx) + .p_2() + .overflow_y_scroll() + .max_w(max_size.width) + .max_h(max_size.height) + // Prevent a mouse down/move on the popover from being propagated to the editor, + // because that would dismiss the popover. + .on_mouse_move(|_, cx| cx.stop_propagation()) + .on_mouse_down(MouseButton::Left, |_, cx| cx.stop_propagation()) + .child(crate::render_parsed_markdown( + "content", + &self.parsed_content, + style, + workspace, + cx, + )) + .into_any_element() + } +} + +#[derive(Debug, Clone)] +pub struct DiagnosticPopover { + local_diagnostic: DiagnosticEntry, + primary_diagnostic: Option>, +} + +impl DiagnosticPopover { + pub fn render( + &self, + style: &EditorStyle, + max_size: Size, + cx: &mut ViewContext, + ) -> AnyElement { + let text = match &self.local_diagnostic.diagnostic.source { + Some(source) => format!("{source}: {}", self.local_diagnostic.diagnostic.message), + None => self.local_diagnostic.diagnostic.message.clone(), + }; + + let status_colors = cx.theme().status(); + + struct DiagnosticColors { + pub background: Hsla, + pub border: Hsla, + } + + let diagnostic_colors = match self.local_diagnostic.diagnostic.severity { + DiagnosticSeverity::ERROR => DiagnosticColors { + background: status_colors.error_background, + border: status_colors.error_border, + }, + DiagnosticSeverity::WARNING => DiagnosticColors { + background: status_colors.warning_background, + border: status_colors.warning_border, + }, + DiagnosticSeverity::INFORMATION => DiagnosticColors { + background: status_colors.info_background, + border: status_colors.info_border, + }, + DiagnosticSeverity::HINT => DiagnosticColors { + background: status_colors.hint_background, + border: status_colors.hint_border, + }, + _ => DiagnosticColors { + background: status_colors.ignored_background, + border: status_colors.ignored_border, + }, + }; + + div() + .id("diagnostic") + .block() + .elevation_2(cx) + .overflow_y_scroll() + .px_2() + .py_1() + .bg(diagnostic_colors.background) + .text_color(style.text.color) + .border_1() + .border_color(diagnostic_colors.border) + .rounded_md() + .max_w(max_size.width) + .max_h(max_size.height) + .cursor(CursorStyle::PointingHand) + .tooltip(move |cx| Tooltip::for_action("Go To Diagnostic", &crate::GoToDiagnostic, cx)) + // Prevent a mouse move on the popover from being propagated to the editor, + // because that would dismiss the popover. + .on_mouse_move(|_, cx| cx.stop_propagation()) + // Prevent a mouse down on the popover from being propagated to the editor, + // because that would move the cursor. + .on_mouse_down(MouseButton::Left, |_, cx| cx.stop_propagation()) + .on_click(cx.listener(|editor, _, cx| editor.go_to_diagnostic(&Default::default(), cx))) + .child(SharedString::from(text)) + .into_any_element() + } + + pub fn activation_info(&self) -> (usize, Anchor) { + let entry = self + .primary_diagnostic + .as_ref() + .unwrap_or(&self.local_diagnostic); + + (entry.diagnostic.group_id, entry.range.start) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + editor_tests::init_test, + hover_links::update_inlay_link_and_hover_points, + inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels}, + test::editor_lsp_test_context::EditorLspTestContext, + InlayId, PointForPosition, + }; + use collections::BTreeSet; + use gpui::{FontWeight, HighlightStyle, UnderlineStyle}; + use indoc::indoc; + use language::{language_settings::InlayHintSettings, Diagnostic, DiagnosticSet}; + use lsp::LanguageServerId; + use project::{HoverBlock, HoverBlockKind}; + use smol::stream::StreamExt; + use text::Bias; + use unindent::Unindent; + use util::test::marked_text_ranges; + + #[gpui::test] + async fn test_mouse_hover_info_popover(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + // Basic hover delays and then pops without moving the mouse + cx.set_state(indoc! {" + fn ˇtest() { println!(); } + "}); + let hover_point = cx.display_point(indoc! {" + fn test() { printˇln!(); } + "}); + + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let anchor = snapshot + .buffer_snapshot + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), cx) + }); + assert!(!cx.editor(|editor, _| editor.hover_state.visible())); + + // After delay, hover should be visible. + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «println!»(); } + "}); + let mut requests = + cx.handle_request::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some basic docs".to_string(), + }), + range: Some(symbol_range), + })) + }); + cx.background_executor + .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + requests.next().await; + + cx.editor(|editor, _| { + assert!(editor.hover_state.visible()); + assert_eq!( + editor.hover_state.info_popovers.len(), + 1, + "Expected exactly one hover but got: {:?}", + editor.hover_state.info_popovers + ); + let rendered = editor + .hover_state + .info_popovers + .first() + .cloned() + .unwrap() + .parsed_content; + assert_eq!(rendered.text, "some basic docs".to_string()) + }); + + // Mouse moved with no hover response dismisses + let hover_point = cx.display_point(indoc! {" + fn teˇst() { println!(); } + "}); + let mut request = cx + .lsp + .handle_request::(|_, _| async move { Ok(None) }); + cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let anchor = snapshot + .buffer_snapshot + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), cx) + }); + cx.background_executor + .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + request.next().await; + cx.editor(|editor, _| { + assert!(!editor.hover_state.visible()); + }); + } + + #[gpui::test] + async fn test_keyboard_hover_info_popover(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + // Hover with keyboard has no delay + cx.set_state(indoc! {" + fˇn test() { println!(); } + "}); + cx.update_editor(|editor, cx| hover(editor, &Hover, cx)); + let symbol_range = cx.lsp_range(indoc! {" + «fn» test() { println!(); } + "}); + cx.handle_request::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some other basic docs".to_string(), + }), + range: Some(symbol_range), + })) + }) + .next() + .await; + + cx.condition(|editor, _| editor.hover_state.visible()).await; + cx.editor(|editor, _| { + assert_eq!( + editor.hover_state.info_popovers.len(), + 1, + "Expected exactly one hover but got: {:?}", + editor.hover_state.info_popovers + ); + let rendered = editor + .hover_state + .info_popovers + .first() + .cloned() + .unwrap() + .parsed_content; + assert_eq!(rendered.text, "some other basic docs".to_string()) + }); + } + + #[gpui::test] + async fn test_empty_hovers_filtered(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + // Hover with keyboard has no delay + cx.set_state(indoc! {" + fˇn test() { println!(); } + "}); + cx.update_editor(|editor, cx| hover(editor, &Hover, cx)); + let symbol_range = cx.lsp_range(indoc! {" + «fn» test() { println!(); } + "}); + cx.handle_request::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Array(vec![ + lsp::MarkedString::String("regular text for hover to show".to_string()), + lsp::MarkedString::String("".to_string()), + lsp::MarkedString::LanguageString(lsp::LanguageString { + language: "Rust".to_string(), + value: "".to_string(), + }), + ]), + range: Some(symbol_range), + })) + }) + .next() + .await; + + cx.condition(|editor, _| editor.hover_state.visible()).await; + cx.editor(|editor, _| { + assert_eq!( + editor.hover_state.info_popovers.len(), + 1, + "Expected exactly one hover but got: {:?}", + editor.hover_state.info_popovers + ); + let rendered = editor + .hover_state + .info_popovers + .first() + .cloned() + .unwrap() + .parsed_content; + assert_eq!( + rendered.text, + "regular text for hover to show".to_string(), + "No empty string hovers should be shown" + ); + }); + } + + #[gpui::test] + async fn test_line_ends_trimmed(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + // Hover with keyboard has no delay + cx.set_state(indoc! {" + fˇn test() { println!(); } + "}); + cx.update_editor(|editor, cx| hover(editor, &Hover, cx)); + let symbol_range = cx.lsp_range(indoc! {" + «fn» test() { println!(); } + "}); + + let code_str = "\nlet hovered_point: Vector2F // size = 8, align = 0x4\n"; + let markdown_string = format!("\n```rust\n{code_str}```"); + + let closure_markdown_string = markdown_string.clone(); + cx.handle_request::(move |_, _, _| { + let future_markdown_string = closure_markdown_string.clone(); + async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: future_markdown_string, + }), + range: Some(symbol_range), + })) + } + }) + .next() + .await; + + cx.condition(|editor, _| editor.hover_state.visible()).await; + cx.editor(|editor, _| { + assert_eq!( + editor.hover_state.info_popovers.len(), + 1, + "Expected exactly one hover but got: {:?}", + editor.hover_state.info_popovers + ); + let rendered = editor + .hover_state + .info_popovers + .first() + .cloned() + .unwrap() + .parsed_content; + assert_eq!( + rendered.text, + code_str.trim(), + "Should not have extra line breaks at end of rendered hover" + ); + }); + } + + #[gpui::test] + async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + // Hover with just diagnostic, pops DiagnosticPopover immediately and then + // info popover once request completes + cx.set_state(indoc! {" + fn teˇst() { println!(); } + "}); + + // Send diagnostic to client + let range = cx.text_anchor_range(indoc! {" + fn «test»() { println!(); } + "}); + cx.update_buffer(|buffer, cx| { + let snapshot = buffer.text_snapshot(); + let set = DiagnosticSet::from_sorted_entries( + vec![DiagnosticEntry { + range, + diagnostic: Diagnostic { + message: "A test diagnostic message.".to_string(), + ..Default::default() + }, + }], + &snapshot, + ); + buffer.update_diagnostics(LanguageServerId(0), set, cx); + }); + + // Hover pops diagnostic immediately + cx.update_editor(|editor, cx| hover(editor, &Hover, cx)); + cx.background_executor.run_until_parked(); + + cx.editor(|Editor { hover_state, .. }, _| { + assert!( + hover_state.diagnostic_popover.is_some() && hover_state.info_popovers.is_empty() + ) + }); + + // Info Popover shows after request responded to + let range = cx.lsp_range(indoc! {" + fn «test»() { println!(); } + "}); + cx.handle_request::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some new docs".to_string(), + }), + range: Some(range), + })) + }); + cx.background_executor + .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + + cx.background_executor.run_until_parked(); + cx.editor(|Editor { hover_state, .. }, _| { + hover_state.diagnostic_popover.is_some() && hover_state.info_task.is_some() + }); + } + + #[gpui::test] + fn test_render_blocks(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let languages = Arc::new(LanguageRegistry::test(cx.executor())); + let editor = cx.add_window(|cx| Editor::single_line(cx)); + editor + .update(cx, |editor, _cx| { + let style = editor.style.clone().unwrap(); + + struct Row { + blocks: Vec, + expected_marked_text: String, + expected_styles: Vec, + } + + let rows = &[ + // Strong emphasis + Row { + blocks: vec![HoverBlock { + text: "one **two** three".to_string(), + kind: HoverBlockKind::Markdown, + }], + expected_marked_text: "one «two» three".to_string(), + expected_styles: vec![HighlightStyle { + font_weight: Some(FontWeight::BOLD), + ..Default::default() + }], + }, + // Links + Row { + blocks: vec![HoverBlock { + text: "one [two](https://the-url) three".to_string(), + kind: HoverBlockKind::Markdown, + }], + expected_marked_text: "one «two» three".to_string(), + expected_styles: vec![HighlightStyle { + underline: Some(UnderlineStyle { + thickness: 1.0.into(), + ..Default::default() + }), + ..Default::default() + }], + }, + // Lists + Row { + blocks: vec![HoverBlock { + text: " + lists: + * one + - a + - b + * two + - [c](https://the-url) + - d" + .unindent(), + kind: HoverBlockKind::Markdown, + }], + expected_marked_text: " + lists: + - one + - a + - b + - two + - «c» + - d" + .unindent(), + expected_styles: vec![HighlightStyle { + underline: Some(UnderlineStyle { + thickness: 1.0.into(), + ..Default::default() + }), + ..Default::default() + }], + }, + // Multi-paragraph list items + Row { + blocks: vec![HoverBlock { + text: " + * one two + three + + * four five + * six seven + eight + + nine + * ten + * six" + .unindent(), + kind: HoverBlockKind::Markdown, + }], + expected_marked_text: " + - one two three + - four five + - six seven eight + + nine + - ten + - six" + .unindent(), + expected_styles: vec![HighlightStyle { + underline: Some(UnderlineStyle { + thickness: 1.0.into(), + ..Default::default() + }), + ..Default::default() + }], + }, + ]; + + for Row { + blocks, + expected_marked_text, + expected_styles, + } in &rows[0..] + { + let rendered = smol::block_on(parse_blocks(&blocks, &languages, None)); + + let (expected_text, ranges) = marked_text_ranges(expected_marked_text, false); + let expected_highlights = ranges + .into_iter() + .zip(expected_styles.iter().cloned()) + .collect::>(); + assert_eq!( + rendered.text, expected_text, + "wrong text for input {blocks:?}" + ); + + let rendered_highlights: Vec<_> = rendered + .highlights + .iter() + .filter_map(|(range, highlight)| { + let highlight = highlight.to_highlight_style(&style.syntax)?; + Some((range.clone(), highlight)) + }) + .collect(); + + assert_eq!( + rendered_highlights, expected_highlights, + "wrong highlights for input {blocks:?}" + ); + } + }) + .unwrap(); + } + + #[gpui::test] + async fn test_hover_inlay_label_parts(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Right( + lsp::InlayHintServerCapabilities::Options(lsp::InlayHintOptions { + resolve_provider: Some(true), + ..Default::default() + }), + )), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + struct TestStruct; + + // ================== + + struct TestNewType(T); + + fn main() { + let variableˇ = TestNewType(TestStruct); + } + "}); + + let hint_start_offset = cx.ranges(indoc! {" + struct TestStruct; + + // ================== + + struct TestNewType(T); + + fn main() { + let variableˇ = TestNewType(TestStruct); + } + "})[0] + .start; + let hint_position = cx.to_lsp(hint_start_offset); + let new_type_target_range = cx.lsp_range(indoc! {" + struct TestStruct; + + // ================== + + struct «TestNewType»(T); + + fn main() { + let variable = TestNewType(TestStruct); + } + "}); + let struct_target_range = cx.lsp_range(indoc! {" + struct «TestStruct»; + + // ================== + + struct TestNewType(T); + + fn main() { + let variable = TestNewType(TestStruct); + } + "}); + + let uri = cx.buffer_lsp_url.clone(); + let new_type_label = "TestNewType"; + let struct_label = "TestStruct"; + let entire_hint_label = ": TestNewType"; + let closure_uri = uri.clone(); + cx.lsp + .handle_request::(move |params, _| { + let task_uri = closure_uri.clone(); + async move { + assert_eq!(params.text_document.uri, task_uri); + Ok(Some(vec![lsp::InlayHint { + position: hint_position, + label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart { + value: entire_hint_label.to_string(), + ..Default::default() + }]), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: Some(false), + padding_right: Some(false), + data: None, + }])) + } + }) + .next() + .await; + cx.background_executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let expected_layers = vec![entire_hint_label.to_string()]; + assert_eq!(expected_layers, cached_hint_labels(editor)); + assert_eq!(expected_layers, visible_hint_labels(editor, cx)); + }); + + let inlay_range = cx + .ranges(indoc! {" + struct TestStruct; + + // ================== + + struct TestNewType(T); + + fn main() { + let variable« »= TestNewType(TestStruct); + } + "}) + .get(0) + .cloned() + .unwrap(); + let new_type_hint_part_hover_position = cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let previous_valid = inlay_range.start.to_display_point(&snapshot); + let next_valid = inlay_range.end.to_display_point(&snapshot); + assert_eq!(previous_valid.row(), next_valid.row()); + assert!(previous_valid.column() < next_valid.column()); + let exact_unclipped = DisplayPoint::new( + previous_valid.row(), + previous_valid.column() + + (entire_hint_label.find(new_type_label).unwrap() + new_type_label.len() / 2) + as u32, + ); + PointForPosition { + previous_valid, + next_valid, + exact_unclipped, + column_overshoot_after_line_end: 0, + } + }); + cx.update_editor(|editor, cx| { + update_inlay_link_and_hover_points( + &editor.snapshot(cx), + new_type_hint_part_hover_position, + editor, + true, + false, + cx, + ); + }); + + let resolve_closure_uri = uri.clone(); + cx.lsp + .handle_request::( + move |mut hint_to_resolve, _| { + let mut resolved_hint_positions = BTreeSet::new(); + let task_uri = resolve_closure_uri.clone(); + async move { + let inserted = resolved_hint_positions.insert(hint_to_resolve.position); + assert!(inserted, "Hint {hint_to_resolve:?} was resolved twice"); + + // `: TestNewType` + hint_to_resolve.label = lsp::InlayHintLabel::LabelParts(vec![ + lsp::InlayHintLabelPart { + value: ": ".to_string(), + ..Default::default() + }, + lsp::InlayHintLabelPart { + value: new_type_label.to_string(), + location: Some(lsp::Location { + uri: task_uri.clone(), + range: new_type_target_range, + }), + tooltip: Some(lsp::InlayHintLabelPartTooltip::String(format!( + "A tooltip for `{new_type_label}`" + ))), + ..Default::default() + }, + lsp::InlayHintLabelPart { + value: "<".to_string(), + ..Default::default() + }, + lsp::InlayHintLabelPart { + value: struct_label.to_string(), + location: Some(lsp::Location { + uri: task_uri, + range: struct_target_range, + }), + tooltip: Some(lsp::InlayHintLabelPartTooltip::MarkupContent( + lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: format!("A tooltip for `{struct_label}`"), + }, + )), + ..Default::default() + }, + lsp::InlayHintLabelPart { + value: ">".to_string(), + ..Default::default() + }, + ]); + + Ok(hint_to_resolve) + } + }, + ) + .next() + .await; + cx.background_executor.run_until_parked(); + + cx.update_editor(|editor, cx| { + update_inlay_link_and_hover_points( + &editor.snapshot(cx), + new_type_hint_part_hover_position, + editor, + true, + false, + cx, + ); + }); + cx.background_executor + .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + cx.background_executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let hover_state = &editor.hover_state; + assert!( + hover_state.diagnostic_popover.is_none() && hover_state.info_popovers.len() == 1 + ); + let popover = hover_state.info_popovers.first().cloned().unwrap(); + let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx)); + assert_eq!( + popover.symbol_range, + RangeInEditor::Inlay(InlayHighlight { + inlay: InlayId::Hint(0), + inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right), + range: ": ".len()..": ".len() + new_type_label.len(), + }), + "Popover range should match the new type label part" + ); + assert_eq!( + popover.parsed_content.text, + format!("A tooltip for `{new_type_label}`"), + "Rendered text should not anyhow alter backticks" + ); + }); + + let struct_hint_part_hover_position = cx.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + let previous_valid = inlay_range.start.to_display_point(&snapshot); + let next_valid = inlay_range.end.to_display_point(&snapshot); + assert_eq!(previous_valid.row(), next_valid.row()); + assert!(previous_valid.column() < next_valid.column()); + let exact_unclipped = DisplayPoint::new( + previous_valid.row(), + previous_valid.column() + + (entire_hint_label.find(struct_label).unwrap() + struct_label.len() / 2) + as u32, + ); + PointForPosition { + previous_valid, + next_valid, + exact_unclipped, + column_overshoot_after_line_end: 0, + } + }); + cx.update_editor(|editor, cx| { + update_inlay_link_and_hover_points( + &editor.snapshot(cx), + struct_hint_part_hover_position, + editor, + true, + false, + cx, + ); + }); + cx.background_executor + .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100)); + cx.background_executor.run_until_parked(); + cx.update_editor(|editor, cx| { + let hover_state = &editor.hover_state; + assert!( + hover_state.diagnostic_popover.is_none() && hover_state.info_popovers.len() == 1 + ); + let popover = hover_state.info_popovers.first().cloned().unwrap(); + let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx)); + assert_eq!( + popover.symbol_range, + RangeInEditor::Inlay(InlayHighlight { + inlay: InlayId::Hint(0), + inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right), + range: ": ".len() + new_type_label.len() + "<".len() + ..": ".len() + new_type_label.len() + "<".len() + struct_label.len(), + }), + "Popover range should match the struct label part" + ); + assert_eq!( + popover.parsed_content.text, + format!("A tooltip for {struct_label}"), + "Rendered markdown element should remove backticks from text" + ); + }); + } +} diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs new file mode 100644 index 0000000..f9db17a --- /dev/null +++ b/crates/editor/src/hunk_diff.rs @@ -0,0 +1,678 @@ +use std::{ + ops::{Range, RangeInclusive}, + sync::Arc, +}; + +use collections::{hash_map, HashMap, HashSet}; +use git::diff::{DiffHunk, DiffHunkStatus}; +use gpui::{AppContext, Hsla, Model, Task, View}; +use language::Buffer; +use multi_buffer::{ + Anchor, ExcerptRange, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, ToPoint, +}; +use settings::{Settings, SettingsStore}; +use text::{BufferId, Point}; +use ui::{ + div, ActiveTheme, Context as _, IntoElement, ParentElement, Styled, ViewContext, VisualContext, +}; +use util::{debug_panic, RangeExt}; + +use crate::{ + editor_settings::CurrentLineHighlight, + git::{diff_hunk_to_display, DisplayDiffHunk}, + hunk_status, hunks_for_selections, BlockDisposition, BlockId, BlockProperties, BlockStyle, + DiffRowHighlight, Editor, EditorSettings, EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, + RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, +}; + +#[derive(Debug, Clone)] +pub(super) struct HunkToExpand { + pub multi_buffer_range: Range, + pub status: DiffHunkStatus, + pub diff_base_byte_range: Range, +} + +#[derive(Debug, Default)] +pub(super) struct ExpandedHunks { + hunks: Vec, + diff_base: HashMap, + hunk_update_tasks: HashMap, Task<()>>, +} + +#[derive(Debug)] +struct DiffBaseBuffer { + buffer: Model, + diff_base_version: usize, +} + +impl ExpandedHunks { + pub fn hunks(&self, include_folded: bool) -> impl Iterator { + self.hunks + .iter() + .filter(move |hunk| include_folded || !hunk.folded) + } +} + +#[derive(Debug, Clone)] +pub(super) struct ExpandedHunk { + pub block: Option, + pub hunk_range: Range, + pub diff_base_byte_range: Range, + pub status: DiffHunkStatus, + pub folded: bool, +} + +impl Editor { + pub fn toggle_hunk_diff(&mut self, _: &ToggleHunkDiff, cx: &mut ViewContext) { + let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + let selections = self.selections.disjoint_anchors(); + self.toggle_hunks_expanded( + hunks_for_selections(&multi_buffer_snapshot, &selections), + cx, + ); + } + + pub fn expand_all_hunk_diffs(&mut self, _: &ExpandAllHunkDiffs, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx); + let display_rows_with_expanded_hunks = self + .expanded_hunks + .hunks(false) + .map(|hunk| &hunk.hunk_range) + .map(|anchor_range| { + ( + anchor_range + .start + .to_display_point(&snapshot.display_snapshot) + .row(), + anchor_range + .end + .to_display_point(&snapshot.display_snapshot) + .row(), + ) + }) + .collect::>(); + let hunks = snapshot + .display_snapshot + .buffer_snapshot + .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + .filter(|hunk| { + let hunk_display_row_range = Point::new(hunk.associated_range.start.0, 0) + .to_display_point(&snapshot.display_snapshot) + ..Point::new(hunk.associated_range.end.0, 0) + .to_display_point(&snapshot.display_snapshot); + let row_range_end = + display_rows_with_expanded_hunks.get(&hunk_display_row_range.start.row()); + row_range_end.is_none() || row_range_end != Some(&hunk_display_row_range.end.row()) + }); + self.toggle_hunks_expanded(hunks.collect(), cx); + } + + fn toggle_hunks_expanded( + &mut self, + hunks_to_toggle: Vec>, + cx: &mut ViewContext, + ) { + let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); + let new_toggle_task = cx.spawn(move |editor, mut cx| async move { + if let Some(task) = previous_toggle_task { + task.await; + } + + editor + .update(&mut cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let mut hunks_to_toggle = hunks_to_toggle.into_iter().fuse().peekable(); + let mut highlights_to_remove = + Vec::with_capacity(editor.expanded_hunks.hunks.len()); + let mut blocks_to_remove = HashSet::default(); + let mut hunks_to_expand = Vec::new(); + editor.expanded_hunks.hunks.retain(|expanded_hunk| { + if expanded_hunk.folded { + return true; + } + let expanded_hunk_row_range = expanded_hunk + .hunk_range + .start + .to_display_point(&snapshot) + .row() + ..expanded_hunk + .hunk_range + .end + .to_display_point(&snapshot) + .row(); + let mut retain = true; + while let Some(hunk_to_toggle) = hunks_to_toggle.peek() { + match diff_hunk_to_display(hunk_to_toggle, &snapshot) { + DisplayDiffHunk::Folded { .. } => { + hunks_to_toggle.next(); + continue; + } + DisplayDiffHunk::Unfolded { + diff_base_byte_range, + display_row_range, + multi_buffer_range, + status, + } => { + let hunk_to_toggle_row_range = display_row_range; + if hunk_to_toggle_row_range.start > expanded_hunk_row_range.end + { + break; + } else if expanded_hunk_row_range == hunk_to_toggle_row_range { + highlights_to_remove.push(expanded_hunk.hunk_range.clone()); + blocks_to_remove.extend(expanded_hunk.block); + hunks_to_toggle.next(); + retain = false; + break; + } else { + hunks_to_expand.push(HunkToExpand { + status, + multi_buffer_range, + diff_base_byte_range, + }); + hunks_to_toggle.next(); + continue; + } + } + } + } + + retain + }); + for remaining_hunk in hunks_to_toggle { + let remaining_hunk_point_range = + Point::new(remaining_hunk.associated_range.start.0, 0) + ..Point::new(remaining_hunk.associated_range.end.0, 0); + hunks_to_expand.push(HunkToExpand { + status: hunk_status(&remaining_hunk), + multi_buffer_range: remaining_hunk_point_range + .to_anchors(&snapshot.buffer_snapshot), + diff_base_byte_range: remaining_hunk.diff_base_byte_range.clone(), + }); + } + + for removed_rows in highlights_to_remove { + editor.highlight_rows::( + to_inclusive_row_range(removed_rows, &snapshot), + None, + false, + cx, + ); + } + editor.remove_blocks(blocks_to_remove, None, cx); + for hunk in hunks_to_expand { + editor.expand_diff_hunk(None, &hunk, cx); + } + cx.notify(); + }) + .ok(); + }); + + self.expanded_hunks + .hunk_update_tasks + .insert(None, cx.background_executor().spawn(new_toggle_task)); + } + + pub(super) fn expand_diff_hunk( + &mut self, + diff_base_buffer: Option>, + hunk: &HunkToExpand, + cx: &mut ViewContext<'_, Editor>, + ) -> Option<()> { + let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + let multi_buffer_row_range = hunk + .multi_buffer_range + .start + .to_point(&multi_buffer_snapshot) + ..hunk.multi_buffer_range.end.to_point(&multi_buffer_snapshot); + let hunk_start = hunk.multi_buffer_range.start; + let hunk_end = hunk.multi_buffer_range.end; + + let buffer = self.buffer().clone(); + let snapshot = self.snapshot(cx); + let (diff_base_buffer, deleted_text_lines) = buffer.update(cx, |buffer, cx| { + let hunk = buffer_diff_hunk(&snapshot.buffer_snapshot, multi_buffer_row_range.clone())?; + let mut buffer_ranges = buffer.range_to_buffer_ranges(multi_buffer_row_range, cx); + if buffer_ranges.len() == 1 { + let (buffer, _, _) = buffer_ranges.pop()?; + let diff_base_buffer = diff_base_buffer + .or_else(|| self.current_diff_base_buffer(&buffer, cx)) + .or_else(|| create_diff_base_buffer(&buffer, cx))?; + let buffer = buffer.read(cx); + let deleted_text_lines = buffer.diff_base().map(|diff_base| { + let diff_start_row = diff_base + .offset_to_point(hunk.diff_base_byte_range.start) + .row; + let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row; + let line_count = diff_end_row - diff_start_row; + line_count as u8 + })?; + Some((diff_base_buffer, deleted_text_lines)) + } else { + None + } + })?; + + let block_insert_index = match self.expanded_hunks.hunks.binary_search_by(|probe| { + probe + .hunk_range + .start + .cmp(&hunk_start, &multi_buffer_snapshot) + }) { + Ok(_already_present) => return None, + Err(ix) => ix, + }; + + let block = match hunk.status { + DiffHunkStatus::Removed => { + self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, &hunk, cx) + } + DiffHunkStatus::Added => { + self.highlight_rows::( + to_inclusive_row_range(hunk_start..hunk_end, &snapshot), + Some(added_hunk_color(cx)), + false, + cx, + ); + None + } + DiffHunkStatus::Modified => { + self.highlight_rows::( + to_inclusive_row_range(hunk_start..hunk_end, &snapshot), + Some(added_hunk_color(cx)), + false, + cx, + ); + self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, &hunk, cx) + } + }; + self.expanded_hunks.hunks.insert( + block_insert_index, + ExpandedHunk { + block, + hunk_range: hunk_start..hunk_end, + status: hunk.status, + folded: false, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + }, + ); + + Some(()) + } + + fn insert_deleted_text_block( + &mut self, + diff_base_buffer: Model, + deleted_text_height: u8, + hunk: &HunkToExpand, + cx: &mut ViewContext<'_, Self>, + ) -> Option { + let deleted_hunk_color = deleted_hunk_color(cx); + let (editor_height, editor_with_deleted_text) = + editor_with_deleted_text(diff_base_buffer, deleted_hunk_color, hunk, cx); + let parent_gutter_offset = self.gutter_dimensions.width + self.gutter_dimensions.margin; + let mut new_block_ids = self.insert_blocks( + Some(BlockProperties { + position: hunk.multi_buffer_range.start, + height: editor_height.max(deleted_text_height), + style: BlockStyle::Flex, + render: Box::new(move |_| { + div() + .bg(deleted_hunk_color) + .size_full() + .pl(parent_gutter_offset) + .child(editor_with_deleted_text.clone()) + .into_any_element() + }), + disposition: BlockDisposition::Above, + }), + None, + cx, + ); + if new_block_ids.len() == 1 { + new_block_ids.pop() + } else { + debug_panic!( + "Inserted one editor block but did not receive exactly one block id: {new_block_ids:?}" + ); + None + } + } + + pub(super) fn clear_expanded_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) { + self.expanded_hunks.hunk_update_tasks.clear(); + let to_remove = self + .expanded_hunks + .hunks + .drain(..) + .filter_map(|expanded_hunk| expanded_hunk.block) + .collect(); + self.clear_row_highlights::(); + self.remove_blocks(to_remove, None, cx); + } + + pub(super) fn sync_expanded_diff_hunks( + &mut self, + buffer: Model, + cx: &mut ViewContext<'_, Self>, + ) { + let buffer_id = buffer.read(cx).remote_id(); + let buffer_diff_base_version = buffer.read(cx).diff_base_version(); + self.expanded_hunks + .hunk_update_tasks + .remove(&Some(buffer_id)); + let diff_base_buffer = self.current_diff_base_buffer(&buffer, cx); + let new_sync_task = cx.spawn(move |editor, mut cx| async move { + let diff_base_buffer_unchanged = diff_base_buffer.is_some(); + let Ok(diff_base_buffer) = + cx.update(|cx| diff_base_buffer.or_else(|| create_diff_base_buffer(&buffer, cx))) + else { + return; + }; + editor + .update(&mut cx, |editor, cx| { + if let Some(diff_base_buffer) = &diff_base_buffer { + editor.expanded_hunks.diff_base.insert( + buffer_id, + DiffBaseBuffer { + buffer: diff_base_buffer.clone(), + diff_base_version: buffer_diff_base_version, + }, + ); + } + + let snapshot = editor.snapshot(cx); + let mut recalculated_hunks = snapshot + .buffer_snapshot + .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + .filter(|hunk| hunk.buffer_id == buffer_id) + .fuse() + .peekable(); + let mut highlights_to_remove = + Vec::with_capacity(editor.expanded_hunks.hunks.len()); + let mut blocks_to_remove = HashSet::default(); + let mut hunks_to_reexpand = + Vec::with_capacity(editor.expanded_hunks.hunks.len()); + editor.expanded_hunks.hunks.retain_mut(|expanded_hunk| { + if expanded_hunk.hunk_range.start.buffer_id != Some(buffer_id) { + return true; + }; + + let mut retain = false; + if diff_base_buffer_unchanged { + let expanded_hunk_display_range = expanded_hunk + .hunk_range + .start + .to_display_point(&snapshot) + .row() + ..expanded_hunk + .hunk_range + .end + .to_display_point(&snapshot) + .row(); + while let Some(buffer_hunk) = recalculated_hunks.peek() { + match diff_hunk_to_display(&buffer_hunk, &snapshot) { + DisplayDiffHunk::Folded { display_row } => { + recalculated_hunks.next(); + if !expanded_hunk.folded + && expanded_hunk_display_range + .to_inclusive() + .contains(&display_row) + { + retain = true; + expanded_hunk.folded = true; + highlights_to_remove + .push(expanded_hunk.hunk_range.clone()); + if let Some(block) = expanded_hunk.block.take() { + blocks_to_remove.insert(block); + } + break; + } else { + continue; + } + } + DisplayDiffHunk::Unfolded { + diff_base_byte_range, + display_row_range, + multi_buffer_range, + status, + } => { + let hunk_display_range = display_row_range; + if expanded_hunk_display_range.start + > hunk_display_range.end + { + recalculated_hunks.next(); + continue; + } else if expanded_hunk_display_range.end + < hunk_display_range.start + { + break; + } else { + if !expanded_hunk.folded + && expanded_hunk_display_range == hunk_display_range + && expanded_hunk.status == hunk_status(buffer_hunk) + && expanded_hunk.diff_base_byte_range + == buffer_hunk.diff_base_byte_range + { + recalculated_hunks.next(); + retain = true; + } else { + hunks_to_reexpand.push(HunkToExpand { + status, + multi_buffer_range, + diff_base_byte_range, + }); + } + break; + } + } + } + } + } + if !retain { + blocks_to_remove.extend(expanded_hunk.block); + highlights_to_remove.push(expanded_hunk.hunk_range.clone()); + } + retain + }); + + for removed_rows in highlights_to_remove { + editor.highlight_rows::( + to_inclusive_row_range(removed_rows, &snapshot), + None, + false, + cx, + ); + } + editor.remove_blocks(blocks_to_remove, None, cx); + + if let Some(diff_base_buffer) = &diff_base_buffer { + for hunk in hunks_to_reexpand { + editor.expand_diff_hunk(Some(diff_base_buffer.clone()), &hunk, cx); + } + } + }) + .ok(); + }); + + self.expanded_hunks.hunk_update_tasks.insert( + Some(buffer_id), + cx.background_executor().spawn(new_sync_task), + ); + } + + fn current_diff_base_buffer( + &mut self, + buffer: &Model, + cx: &mut AppContext, + ) -> Option> { + buffer.update(cx, |buffer, _| { + match self.expanded_hunks.diff_base.entry(buffer.remote_id()) { + hash_map::Entry::Occupied(o) => { + if o.get().diff_base_version != buffer.diff_base_version() { + o.remove(); + None + } else { + Some(o.get().buffer.clone()) + } + } + hash_map::Entry::Vacant(_) => None, + } + }) + } +} + +fn create_diff_base_buffer(buffer: &Model, cx: &mut AppContext) -> Option> { + buffer + .update(cx, |buffer, _| { + let language = buffer.language().cloned(); + let diff_base = buffer.diff_base()?.clone(); + Some((buffer.line_ending(), diff_base, language)) + }) + .map(|(line_ending, diff_base, language)| { + cx.new_model(|cx| { + let buffer = Buffer::local_normalized(diff_base, line_ending, cx); + match language { + Some(language) => buffer.with_language(language, cx), + None => buffer, + } + }) + }) +} + +fn added_hunk_color(cx: &AppContext) -> Hsla { + let mut created_color = cx.theme().status().git().created; + created_color.fade_out(0.7); + created_color +} + +fn deleted_hunk_color(cx: &AppContext) -> Hsla { + let mut deleted_color = cx.theme().status().git().deleted; + deleted_color.fade_out(0.7); + deleted_color +} + +fn editor_with_deleted_text( + diff_base_buffer: Model, + deleted_color: Hsla, + hunk: &HunkToExpand, + cx: &mut ViewContext<'_, Editor>, +) -> (u8, View) { + let parent_editor = cx.view().downgrade(); + let editor = cx.new_view(|cx| { + let multi_buffer = + cx.new_model(|_| MultiBuffer::without_headers(0, language::Capability::ReadOnly)); + multi_buffer.update(cx, |multi_buffer, cx| { + multi_buffer.push_excerpts( + diff_base_buffer, + Some(ExcerptRange { + context: hunk.diff_base_byte_range.clone(), + primary: None, + }), + cx, + ); + }); + + let mut editor = Editor::for_multibuffer(multi_buffer, None, cx); + editor.soft_wrap_mode_override = Some(language::language_settings::SoftWrap::None); + editor.show_wrap_guides = Some(false); + editor.show_gutter = false; + editor.scroll_manager.set_forbid_vertical_scroll(true); + editor.set_read_only(true); + + let editor_snapshot = editor.snapshot(cx); + let start = editor_snapshot.buffer_snapshot.anchor_before(0); + let end = editor_snapshot + .buffer_snapshot + .anchor_after(editor.buffer.read(cx).len(cx)); + + editor.highlight_rows::(start..=end, Some(deleted_color), false, cx); + + let subscription_editor = parent_editor.clone(); + editor._subscriptions.extend([ + cx.on_blur(&editor.focus_handle, |editor, cx| { + editor.set_current_line_highlight(CurrentLineHighlight::None); + editor.change_selections(None, cx, |s| { + s.try_cancel(); + }); + cx.notify(); + }), + cx.on_focus(&editor.focus_handle, move |editor, cx| { + let restored_highlight = if let Some(parent_editor) = subscription_editor.upgrade() + { + parent_editor.read(cx).current_line_highlight + } else { + EditorSettings::get_global(cx).current_line_highlight + }; + editor.set_current_line_highlight(restored_highlight); + cx.notify(); + }), + cx.observe_global::(|editor, cx| { + if !editor.is_focused(cx) { + editor.set_current_line_highlight(CurrentLineHighlight::None); + } + }), + ]); + let original_multi_buffer_range = hunk.multi_buffer_range.clone(); + let diff_base_range = hunk.diff_base_byte_range.clone(); + editor.register_action::(move |_, cx| { + parent_editor + .update(cx, |editor, cx| { + let Some((buffer, original_text)) = editor.buffer().update(cx, |buffer, cx| { + let (_, buffer, _) = + buffer.excerpt_containing(original_multi_buffer_range.start, cx)?; + let original_text = + buffer.read(cx).diff_base()?.slice(diff_base_range.clone()); + Some((buffer, Arc::from(original_text.to_string()))) + }) else { + return; + }; + buffer.update(cx, |buffer, cx| { + buffer.edit( + Some(( + original_multi_buffer_range.start.text_anchor + ..original_multi_buffer_range.end.text_anchor, + original_text, + )), + None, + cx, + ) + }); + }) + .ok(); + }); + editor + }); + + let editor_height = editor.update(cx, |editor, cx| editor.max_point(cx).row().0 as u8); + (editor_height, editor) +} + +fn buffer_diff_hunk( + buffer_snapshot: &MultiBufferSnapshot, + row_range: Range, +) -> Option> { + let mut hunks = buffer_snapshot.git_diff_hunks_in_range( + MultiBufferRow(row_range.start.row)..MultiBufferRow(row_range.end.row), + ); + let hunk = hunks.next()?; + let second_hunk = hunks.next(); + if second_hunk.is_none() { + return Some(hunk); + } + None +} + +fn to_inclusive_row_range( + row_range: Range, + snapshot: &EditorSnapshot, +) -> RangeInclusive { + let mut display_row_range = + row_range.start.to_display_point(snapshot)..row_range.end.to_display_point(snapshot); + if display_row_range.end.row() > display_row_range.start.row() { + *display_row_range.end.row_mut() -= 1; + } + let point_range = display_row_range.start.to_point(&snapshot.display_snapshot) + ..display_row_range.end.to_point(&snapshot.display_snapshot); + let new_range = point_range.to_anchors(&snapshot.buffer_snapshot); + new_range.start..=new_range.end +} diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs new file mode 100644 index 0000000..8a934ec --- /dev/null +++ b/crates/editor/src/inlay_hint_cache.rs @@ -0,0 +1,3446 @@ +/// Stores and updates all data received from LSP textDocument/inlayHint requests. +/// Has nothing to do with other inlays, e.g. copilot suggestions — those are stored elsewhere. +/// On every update, cache may query for more inlay hints and update inlays on the screen. +/// +/// Inlays stored on screen are in [`crate::display_map::inlay_map`] and this cache is the only way to update any inlay hint data in the visible hints in the inlay map. +/// For determining the update to the `inlay_map`, the cache requires a list of visible inlay hints — all other hints are not relevant and their separate updates are not influencing the cache work. +/// +/// Due to the way the data is stored for both visible inlays and the cache, every inlay (and inlay hint) collection is editor-specific, so a single buffer may have multiple sets of inlays of open on different panes. +use std::{ + cmp, + ops::{ControlFlow, Range}, + sync::Arc, + time::Duration, +}; + +use crate::{ + display_map::Inlay, Anchor, Editor, ExcerptId, InlayId, MultiBuffer, MultiBufferSnapshot, +}; +use anyhow::Context; +use clock::Global; +use futures::future; +use gpui::{AsyncWindowContext, Model, ModelContext, Task, ViewContext}; +use language::{language_settings::InlayHintKind, Buffer, BufferSnapshot}; +use parking_lot::RwLock; +use project::{InlayHint, ResolveState}; + +use collections::{hash_map, HashMap, HashSet}; +use language::language_settings::InlayHintSettings; +use smol::lock::Semaphore; +use sum_tree::Bias; +use text::{BufferId, ToOffset, ToPoint}; +use util::{post_inc, ResultExt}; + +pub struct InlayHintCache { + hints: HashMap>>, + allowed_hint_kinds: HashSet>, + version: usize, + pub(super) enabled: bool, + update_tasks: HashMap, + refresh_task: Option>, + invalidate_debounce: Option, + append_debounce: Option, + lsp_request_limiter: Arc, +} + +#[derive(Debug)] +struct TasksForRanges { + tasks: Vec>, + sorted_ranges: Vec>, +} + +#[derive(Debug)] +struct CachedExcerptHints { + version: usize, + buffer_version: Global, + buffer_id: BufferId, + ordered_hints: Vec, + hints_by_id: HashMap, +} + +/// A logic to apply when querying for new inlay hints and deciding what to do with the old entries in the cache in case of conflicts. +#[derive(Debug, Clone, Copy)] +pub(super) enum InvalidationStrategy { + /// Hints reset is requested by the LSP server. + /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation. + /// + /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise. + RefreshRequested, + /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place. + /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence. + BufferEdited, + /// A new file got opened/new excerpt was added to a multibuffer/a [multi]buffer was scrolled to a new position. + /// No invalidation should be done at all, all new hints are added to the cache. + /// + /// A special case is the settings change: in addition to LSP capabilities, Zed allows omitting certain hint kinds (defined by the corresponding LSP part: type/parameter/other). + /// This does not lead to cache invalidation, but would require cache usage for determining which hints are not displayed and issuing an update to inlays on the screen. + None, +} + +/// A splice to send into the `inlay_map` for updating the visible inlays on the screen. +/// "Visible" inlays may not be displayed in the buffer right away, but those are ready to be displayed on further buffer scroll, pane item activations, etc. right away without additional LSP queries or settings changes. +/// The data in the cache is never used directly for displaying inlays on the screen, to avoid races with updates from LSP queries and sync overhead. +/// Splice is picked to help avoid extra hint flickering and "jumps" on the screen. +#[derive(Debug, Default)] +pub(super) struct InlaySplice { + pub to_remove: Vec, + pub to_insert: Vec, +} + +#[derive(Debug)] +struct ExcerptHintsUpdate { + excerpt_id: ExcerptId, + remove_from_visible: HashSet, + remove_from_cache: HashSet, + add_to_cache: Vec, +} + +#[derive(Debug, Clone, Copy)] +struct ExcerptQuery { + buffer_id: BufferId, + excerpt_id: ExcerptId, + cache_version: usize, + invalidate: InvalidationStrategy, + reason: &'static str, +} + +impl InvalidationStrategy { + fn should_invalidate(&self) -> bool { + matches!( + self, + InvalidationStrategy::RefreshRequested | InvalidationStrategy::BufferEdited + ) + } +} + +impl TasksForRanges { + fn new(query_ranges: QueryRanges, task: Task<()>) -> Self { + let mut sorted_ranges = Vec::new(); + sorted_ranges.extend(query_ranges.before_visible); + sorted_ranges.extend(query_ranges.visible); + sorted_ranges.extend(query_ranges.after_visible); + Self { + tasks: vec![task], + sorted_ranges, + } + } + + fn update_cached_tasks( + &mut self, + buffer_snapshot: &BufferSnapshot, + query_ranges: QueryRanges, + invalidate: InvalidationStrategy, + spawn_task: impl FnOnce(QueryRanges) -> Task<()>, + ) { + let query_ranges = if invalidate.should_invalidate() { + self.tasks.clear(); + self.sorted_ranges.clear(); + query_ranges + } else { + let mut non_cached_query_ranges = query_ranges; + non_cached_query_ranges.before_visible = non_cached_query_ranges + .before_visible + .into_iter() + .flat_map(|query_range| { + self.remove_cached_ranges_from_query(buffer_snapshot, query_range) + }) + .collect(); + non_cached_query_ranges.visible = non_cached_query_ranges + .visible + .into_iter() + .flat_map(|query_range| { + self.remove_cached_ranges_from_query(buffer_snapshot, query_range) + }) + .collect(); + non_cached_query_ranges.after_visible = non_cached_query_ranges + .after_visible + .into_iter() + .flat_map(|query_range| { + self.remove_cached_ranges_from_query(buffer_snapshot, query_range) + }) + .collect(); + non_cached_query_ranges + }; + + if !query_ranges.is_empty() { + self.tasks.push(spawn_task(query_ranges)); + } + } + + fn remove_cached_ranges_from_query( + &mut self, + buffer_snapshot: &BufferSnapshot, + query_range: Range, + ) -> Vec> { + let mut ranges_to_query = Vec::new(); + let mut latest_cached_range = None::<&mut Range>; + for cached_range in self + .sorted_ranges + .iter_mut() + .skip_while(|cached_range| { + cached_range + .end + .cmp(&query_range.start, buffer_snapshot) + .is_lt() + }) + .take_while(|cached_range| { + cached_range + .start + .cmp(&query_range.end, buffer_snapshot) + .is_le() + }) + { + match latest_cached_range { + Some(latest_cached_range) => { + if latest_cached_range.end.offset.saturating_add(1) < cached_range.start.offset + { + ranges_to_query.push(latest_cached_range.end..cached_range.start); + cached_range.start = latest_cached_range.end; + } + } + None => { + if query_range + .start + .cmp(&cached_range.start, buffer_snapshot) + .is_lt() + { + ranges_to_query.push(query_range.start..cached_range.start); + cached_range.start = query_range.start; + } + } + } + latest_cached_range = Some(cached_range); + } + + match latest_cached_range { + Some(latest_cached_range) => { + if latest_cached_range.end.offset.saturating_add(1) < query_range.end.offset { + ranges_to_query.push(latest_cached_range.end..query_range.end); + latest_cached_range.end = query_range.end; + } + } + None => { + ranges_to_query.push(query_range.clone()); + self.sorted_ranges.push(query_range); + self.sorted_ranges + .sort_by(|range_a, range_b| range_a.start.cmp(&range_b.start, buffer_snapshot)); + } + } + + ranges_to_query + } + + fn invalidate_range(&mut self, buffer: &BufferSnapshot, range: &Range) { + self.sorted_ranges = self + .sorted_ranges + .drain(..) + .filter_map(|mut cached_range| { + if cached_range.start.cmp(&range.end, buffer).is_gt() + || cached_range.end.cmp(&range.start, buffer).is_lt() + { + Some(vec![cached_range]) + } else if cached_range.start.cmp(&range.start, buffer).is_ge() + && cached_range.end.cmp(&range.end, buffer).is_le() + { + None + } else if range.start.cmp(&cached_range.start, buffer).is_ge() + && range.end.cmp(&cached_range.end, buffer).is_le() + { + Some(vec![ + cached_range.start..range.start, + range.end..cached_range.end, + ]) + } else if cached_range.start.cmp(&range.start, buffer).is_ge() { + cached_range.start = range.end; + Some(vec![cached_range]) + } else { + cached_range.end = range.start; + Some(vec![cached_range]) + } + }) + .flatten() + .collect(); + } +} + +impl InlayHintCache { + pub(super) fn new(inlay_hint_settings: InlayHintSettings) -> Self { + Self { + allowed_hint_kinds: inlay_hint_settings.enabled_inlay_hint_kinds(), + enabled: inlay_hint_settings.enabled, + hints: HashMap::default(), + update_tasks: HashMap::default(), + refresh_task: None, + invalidate_debounce: debounce_value(inlay_hint_settings.edit_debounce_ms), + append_debounce: debounce_value(inlay_hint_settings.scroll_debounce_ms), + version: 0, + lsp_request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_LSP_REQUESTS)), + } + } + + /// Checks inlay hint settings for enabled hint kinds and general enabled state. + /// Generates corresponding inlay_map splice updates on settings changes. + /// Does not update inlay hint cache state on disabling or inlay hint kinds change: only reenabling forces new LSP queries. + pub(super) fn update_settings( + &mut self, + multi_buffer: &Model, + new_hint_settings: InlayHintSettings, + visible_hints: Vec, + cx: &mut ViewContext, + ) -> ControlFlow> { + self.invalidate_debounce = debounce_value(new_hint_settings.edit_debounce_ms); + self.append_debounce = debounce_value(new_hint_settings.scroll_debounce_ms); + let new_allowed_hint_kinds = new_hint_settings.enabled_inlay_hint_kinds(); + match (self.enabled, new_hint_settings.enabled) { + (false, false) => { + self.allowed_hint_kinds = new_allowed_hint_kinds; + ControlFlow::Break(None) + } + (true, true) => { + if new_allowed_hint_kinds == self.allowed_hint_kinds { + ControlFlow::Break(None) + } else { + let new_splice = self.new_allowed_hint_kinds_splice( + multi_buffer, + &visible_hints, + &new_allowed_hint_kinds, + cx, + ); + if new_splice.is_some() { + self.version += 1; + self.allowed_hint_kinds = new_allowed_hint_kinds; + } + ControlFlow::Break(new_splice) + } + } + (true, false) => { + self.enabled = new_hint_settings.enabled; + self.allowed_hint_kinds = new_allowed_hint_kinds; + if self.hints.is_empty() { + ControlFlow::Break(None) + } else { + self.clear(); + ControlFlow::Break(Some(InlaySplice { + to_remove: visible_hints.iter().map(|inlay| inlay.id).collect(), + to_insert: Vec::new(), + })) + } + } + (false, true) => { + self.enabled = new_hint_settings.enabled; + self.allowed_hint_kinds = new_allowed_hint_kinds; + ControlFlow::Continue(()) + } + } + } + + /// If needed, queries LSP for new inlay hints, using the invalidation strategy given. + /// To reduce inlay hint jumping, attempts to query a visible range of the editor(s) first, + /// followed by the delayed queries of the same range above and below the visible one. + /// This way, concequent refresh invocations are less likely to trigger LSP queries for the invisible ranges. + pub(super) fn spawn_hint_refresh( + &mut self, + reason_description: &'static str, + excerpts_to_query: HashMap, Global, Range)>, + invalidate: InvalidationStrategy, + ignore_debounce: bool, + cx: &mut ViewContext, + ) -> Option { + if !self.enabled { + return None; + } + let mut invalidated_hints = Vec::new(); + if invalidate.should_invalidate() { + self.update_tasks + .retain(|task_excerpt_id, _| excerpts_to_query.contains_key(task_excerpt_id)); + self.hints.retain(|cached_excerpt, cached_hints| { + let retain = excerpts_to_query.contains_key(cached_excerpt); + if !retain { + invalidated_hints.extend(cached_hints.read().ordered_hints.iter().copied()); + } + retain + }); + } + if excerpts_to_query.is_empty() && invalidated_hints.is_empty() { + return None; + } + + let cache_version = self.version + 1; + let debounce_duration = if ignore_debounce { + None + } else if invalidate.should_invalidate() { + self.invalidate_debounce + } else { + self.append_debounce + }; + self.refresh_task = Some(cx.spawn(|editor, mut cx| async move { + if let Some(debounce_duration) = debounce_duration { + cx.background_executor().timer(debounce_duration).await; + } + + editor + .update(&mut cx, |editor, cx| { + spawn_new_update_tasks( + editor, + reason_description, + excerpts_to_query, + invalidate, + cache_version, + cx, + ) + }) + .ok(); + })); + + if invalidated_hints.is_empty() { + None + } else { + Some(InlaySplice { + to_remove: invalidated_hints, + to_insert: Vec::new(), + }) + } + } + + fn new_allowed_hint_kinds_splice( + &self, + multi_buffer: &Model, + visible_hints: &[Inlay], + new_kinds: &HashSet>, + cx: &mut ViewContext, + ) -> Option { + let old_kinds = &self.allowed_hint_kinds; + if new_kinds == old_kinds { + return None; + } + + let mut to_remove = Vec::new(); + let mut to_insert = Vec::new(); + let mut shown_hints_to_remove = visible_hints.iter().fold( + HashMap::>::default(), + |mut current_hints, inlay| { + current_hints + .entry(inlay.position.excerpt_id) + .or_default() + .push((inlay.position, inlay.id)); + current_hints + }, + ); + + let multi_buffer = multi_buffer.read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + + for (excerpt_id, excerpt_cached_hints) in &self.hints { + let shown_excerpt_hints_to_remove = + shown_hints_to_remove.entry(*excerpt_id).or_default(); + let excerpt_cached_hints = excerpt_cached_hints.read(); + let mut excerpt_cache = excerpt_cached_hints.ordered_hints.iter().fuse().peekable(); + shown_excerpt_hints_to_remove.retain(|(shown_anchor, shown_hint_id)| { + let Some(buffer) = shown_anchor + .buffer_id + .and_then(|buffer_id| multi_buffer.buffer(buffer_id)) + else { + return false; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + loop { + match excerpt_cache.peek() { + Some(&cached_hint_id) => { + let cached_hint = &excerpt_cached_hints.hints_by_id[cached_hint_id]; + if cached_hint_id == shown_hint_id { + excerpt_cache.next(); + return !new_kinds.contains(&cached_hint.kind); + } + + match cached_hint + .position + .cmp(&shown_anchor.text_anchor, &buffer_snapshot) + { + cmp::Ordering::Less | cmp::Ordering::Equal => { + if !old_kinds.contains(&cached_hint.kind) + && new_kinds.contains(&cached_hint.kind) + { + if let Some(anchor) = multi_buffer_snapshot + .anchor_in_excerpt(*excerpt_id, cached_hint.position) + { + to_insert.push(Inlay::hint( + cached_hint_id.id(), + anchor, + &cached_hint, + )); + } + } + excerpt_cache.next(); + } + cmp::Ordering::Greater => return true, + } + } + None => return true, + } + } + }); + + for cached_hint_id in excerpt_cache { + let maybe_missed_cached_hint = &excerpt_cached_hints.hints_by_id[cached_hint_id]; + let cached_hint_kind = maybe_missed_cached_hint.kind; + if !old_kinds.contains(&cached_hint_kind) && new_kinds.contains(&cached_hint_kind) { + if let Some(anchor) = multi_buffer_snapshot + .anchor_in_excerpt(*excerpt_id, maybe_missed_cached_hint.position) + { + to_insert.push(Inlay::hint( + cached_hint_id.id(), + anchor, + &maybe_missed_cached_hint, + )); + } + } + } + } + + to_remove.extend( + shown_hints_to_remove + .into_values() + .flatten() + .map(|(_, hint_id)| hint_id), + ); + if to_remove.is_empty() && to_insert.is_empty() { + None + } else { + Some(InlaySplice { + to_remove, + to_insert, + }) + } + } + + /// Completely forget of certain excerpts that were removed from the multibuffer. + pub(super) fn remove_excerpts( + &mut self, + excerpts_removed: Vec, + ) -> Option { + let mut to_remove = Vec::new(); + for excerpt_to_remove in excerpts_removed { + self.update_tasks.remove(&excerpt_to_remove); + if let Some(cached_hints) = self.hints.remove(&excerpt_to_remove) { + let cached_hints = cached_hints.read(); + to_remove.extend(cached_hints.ordered_hints.iter().copied()); + } + } + if to_remove.is_empty() { + None + } else { + self.version += 1; + Some(InlaySplice { + to_remove, + to_insert: Vec::new(), + }) + } + } + + pub(super) fn clear(&mut self) { + if !self.update_tasks.is_empty() || !self.hints.is_empty() { + self.version += 1; + } + self.update_tasks.clear(); + self.hints.clear(); + } + + pub(super) fn hint_by_id(&self, excerpt_id: ExcerptId, hint_id: InlayId) -> Option { + self.hints + .get(&excerpt_id)? + .read() + .hints_by_id + .get(&hint_id) + .cloned() + } + + pub fn hints(&self) -> Vec { + let mut hints = Vec::new(); + for excerpt_hints in self.hints.values() { + let excerpt_hints = excerpt_hints.read(); + hints.extend( + excerpt_hints + .ordered_hints + .iter() + .map(|id| &excerpt_hints.hints_by_id[id]) + .cloned(), + ); + } + hints + } + + pub fn version(&self) -> usize { + self.version + } + + /// Queries a certain hint from the cache for extra data via the LSP resolve request. + pub(super) fn spawn_hint_resolve( + &self, + buffer_id: BufferId, + excerpt_id: ExcerptId, + id: InlayId, + cx: &mut ViewContext<'_, Editor>, + ) { + if let Some(excerpt_hints) = self.hints.get(&excerpt_id) { + let mut guard = excerpt_hints.write(); + if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) { + if let ResolveState::CanResolve(server_id, _) = &cached_hint.resolve_state { + let hint_to_resolve = cached_hint.clone(); + let server_id = *server_id; + cached_hint.resolve_state = ResolveState::Resolving; + drop(guard); + cx.spawn(|editor, mut cx| async move { + let resolved_hint_task = editor.update(&mut cx, |editor, cx| { + editor + .buffer() + .read(cx) + .buffer(buffer_id) + .and_then(|buffer| { + let project = editor.project.as_ref()?; + Some(project.update(cx, |project, cx| { + project.resolve_inlay_hint( + hint_to_resolve, + buffer, + server_id, + cx, + ) + })) + }) + })?; + if let Some(resolved_hint_task) = resolved_hint_task { + let mut resolved_hint = + resolved_hint_task.await.context("hint resolve task")?; + editor.update(&mut cx, |editor, _| { + if let Some(excerpt_hints) = + editor.inlay_hint_cache.hints.get(&excerpt_id) + { + let mut guard = excerpt_hints.write(); + if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) { + if cached_hint.resolve_state == ResolveState::Resolving { + resolved_hint.resolve_state = ResolveState::Resolved; + *cached_hint = resolved_hint; + } + } + } + })?; + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + } + } + } +} + +fn debounce_value(debounce_ms: u64) -> Option { + if debounce_ms > 0 { + Some(Duration::from_millis(debounce_ms)) + } else { + None + } +} + +fn spawn_new_update_tasks( + editor: &mut Editor, + reason: &'static str, + excerpts_to_query: HashMap, Global, Range)>, + invalidate: InvalidationStrategy, + update_cache_version: usize, + cx: &mut ViewContext<'_, Editor>, +) { + for (excerpt_id, (excerpt_buffer, new_task_buffer_version, excerpt_visible_range)) in + excerpts_to_query + { + if excerpt_visible_range.is_empty() { + continue; + } + let buffer = excerpt_buffer.read(cx); + let buffer_id = buffer.remote_id(); + let buffer_snapshot = buffer.snapshot(); + if buffer_snapshot + .version() + .changed_since(&new_task_buffer_version) + { + continue; + } + + if let Some(cached_excerpt_hints) = editor.inlay_hint_cache.hints.get(&excerpt_id) { + let cached_excerpt_hints = cached_excerpt_hints.read(); + let cached_buffer_version = &cached_excerpt_hints.buffer_version; + if cached_excerpt_hints.version > update_cache_version + || cached_buffer_version.changed_since(&new_task_buffer_version) + { + continue; + } + }; + + let Some(query_ranges) = editor.buffer.update(cx, |multi_buffer, cx| { + determine_query_ranges( + multi_buffer, + excerpt_id, + &excerpt_buffer, + excerpt_visible_range, + cx, + ) + }) else { + return; + }; + let query = ExcerptQuery { + buffer_id, + excerpt_id, + cache_version: update_cache_version, + invalidate, + reason, + }; + + let mut new_update_task = + |query_ranges| new_update_task(query, query_ranges, excerpt_buffer.clone(), cx); + + match editor.inlay_hint_cache.update_tasks.entry(excerpt_id) { + hash_map::Entry::Occupied(mut o) => { + o.get_mut().update_cached_tasks( + &buffer_snapshot, + query_ranges, + invalidate, + new_update_task, + ); + } + hash_map::Entry::Vacant(v) => { + v.insert(TasksForRanges::new( + query_ranges.clone(), + new_update_task(query_ranges), + )); + } + } + } +} + +#[derive(Debug, Clone)] +struct QueryRanges { + before_visible: Vec>, + visible: Vec>, + after_visible: Vec>, +} + +impl QueryRanges { + fn is_empty(&self) -> bool { + self.before_visible.is_empty() && self.visible.is_empty() && self.after_visible.is_empty() + } +} + +fn determine_query_ranges( + multi_buffer: &mut MultiBuffer, + excerpt_id: ExcerptId, + excerpt_buffer: &Model, + excerpt_visible_range: Range, + cx: &mut ModelContext<'_, MultiBuffer>, +) -> Option { + let full_excerpt_range = multi_buffer + .excerpts_for_buffer(excerpt_buffer, cx) + .into_iter() + .find(|(id, _)| id == &excerpt_id) + .map(|(_, range)| range.context)?; + let buffer = excerpt_buffer.read(cx); + let snapshot = buffer.snapshot(); + let excerpt_visible_len = excerpt_visible_range.end - excerpt_visible_range.start; + + let visible_range = if excerpt_visible_range.start == excerpt_visible_range.end { + return None; + } else { + vec![ + buffer.anchor_before(snapshot.clip_offset(excerpt_visible_range.start, Bias::Left)) + ..buffer.anchor_after(snapshot.clip_offset(excerpt_visible_range.end, Bias::Right)), + ] + }; + + let full_excerpt_range_end_offset = full_excerpt_range.end.to_offset(&snapshot); + let after_visible_range_start = excerpt_visible_range + .end + .saturating_add(1) + .min(full_excerpt_range_end_offset) + .min(buffer.len()); + let after_visible_range = if after_visible_range_start == full_excerpt_range_end_offset { + Vec::new() + } else { + let after_range_end_offset = after_visible_range_start + .saturating_add(excerpt_visible_len) + .min(full_excerpt_range_end_offset) + .min(buffer.len()); + vec![ + buffer.anchor_before(snapshot.clip_offset(after_visible_range_start, Bias::Left)) + ..buffer.anchor_after(snapshot.clip_offset(after_range_end_offset, Bias::Right)), + ] + }; + + let full_excerpt_range_start_offset = full_excerpt_range.start.to_offset(&snapshot); + let before_visible_range_end = excerpt_visible_range + .start + .saturating_sub(1) + .max(full_excerpt_range_start_offset); + let before_visible_range = if before_visible_range_end == full_excerpt_range_start_offset { + Vec::new() + } else { + let before_range_start_offset = before_visible_range_end + .saturating_sub(excerpt_visible_len) + .max(full_excerpt_range_start_offset); + vec![ + buffer.anchor_before(snapshot.clip_offset(before_range_start_offset, Bias::Left)) + ..buffer.anchor_after(snapshot.clip_offset(before_visible_range_end, Bias::Right)), + ] + }; + + Some(QueryRanges { + before_visible: before_visible_range, + visible: visible_range, + after_visible: after_visible_range, + }) +} + +const MAX_CONCURRENT_LSP_REQUESTS: usize = 5; +const INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS: u64 = 400; + +fn new_update_task( + query: ExcerptQuery, + query_ranges: QueryRanges, + excerpt_buffer: Model, + cx: &mut ViewContext<'_, Editor>, +) -> Task<()> { + cx.spawn(move |editor, mut cx| async move { + let visible_range_update_results = future::join_all( + query_ranges + .visible + .into_iter() + .filter_map(|visible_range| { + let fetch_task = editor + .update(&mut cx, |_, cx| { + fetch_and_update_hints( + excerpt_buffer.clone(), + query, + visible_range.clone(), + query.invalidate.should_invalidate(), + cx, + ) + }) + .log_err()?; + Some(async move { (visible_range, fetch_task.await) }) + }), + ) + .await; + + let hint_delay = cx.background_executor().timer(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS, + )); + + let query_range_failed = + |range: &Range, e: anyhow::Error, cx: &mut AsyncWindowContext| { + log::error!("inlay hint update task for range {range:?} failed: {e:#}"); + editor + .update(cx, |editor, cx| { + if let Some(task_ranges) = editor + .inlay_hint_cache + .update_tasks + .get_mut(&query.excerpt_id) + { + let buffer_snapshot = excerpt_buffer.read(cx).snapshot(); + task_ranges.invalidate_range(&buffer_snapshot, &range); + } + }) + .ok() + }; + + for (range, result) in visible_range_update_results { + if let Err(e) = result { + query_range_failed(&range, e, &mut cx); + } + } + + hint_delay.await; + let invisible_range_update_results = future::join_all( + query_ranges + .before_visible + .into_iter() + .chain(query_ranges.after_visible.into_iter()) + .filter_map(|invisible_range| { + let fetch_task = editor + .update(&mut cx, |_, cx| { + fetch_and_update_hints( + excerpt_buffer.clone(), + query, + invisible_range.clone(), + false, // visible screen request already invalidated the entries + cx, + ) + }) + .log_err()?; + Some(async move { (invisible_range, fetch_task.await) }) + }), + ) + .await; + for (range, result) in invisible_range_update_results { + if let Err(e) = result { + query_range_failed(&range, e, &mut cx); + } + } + }) +} + +fn fetch_and_update_hints( + excerpt_buffer: Model, + query: ExcerptQuery, + fetch_range: Range, + invalidate: bool, + cx: &mut ViewContext, +) -> Task> { + cx.spawn(|editor, mut cx| async move { + let buffer_snapshot = excerpt_buffer.update(&mut cx, |buffer, _| buffer.snapshot())?; + let (lsp_request_limiter, multi_buffer_snapshot) = editor.update(&mut cx, |editor, cx| { + let multi_buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx)); + let lsp_request_limiter = Arc::clone(&editor.inlay_hint_cache.lsp_request_limiter); + (lsp_request_limiter, multi_buffer_snapshot) + })?; + + let (lsp_request_guard, got_throttled) = if query.invalidate.should_invalidate() { + (None, false) + } else { + match lsp_request_limiter.try_acquire() { + Some(guard) => (Some(guard), false), + None => (Some(lsp_request_limiter.acquire().await), true), + } + }; + let fetch_range_to_log = + fetch_range.start.to_point(&buffer_snapshot)..fetch_range.end.to_point(&buffer_snapshot); + let inlay_hints_fetch_task = editor + .update(&mut cx, |editor, cx| { + if got_throttled { + let query_not_around_visible_range = match editor.excerpts_for_inlay_hints_query(None, cx).remove(&query.excerpt_id) { + Some((_, _, current_visible_range)) => { + let visible_offset_length = current_visible_range.len(); + let double_visible_range = current_visible_range + .start + .saturating_sub(visible_offset_length) + ..current_visible_range + .end + .saturating_add(visible_offset_length) + .min(buffer_snapshot.len()); + !double_visible_range + .contains(&fetch_range.start.to_offset(&buffer_snapshot)) + && !double_visible_range + .contains(&fetch_range.end.to_offset(&buffer_snapshot)) + }, + None => true, + }; + if query_not_around_visible_range { + log::trace!("Fetching inlay hints for range {fetch_range_to_log:?} got throttled and fell off the current visible range, skipping."); + if let Some(task_ranges) = editor + .inlay_hint_cache + .update_tasks + .get_mut(&query.excerpt_id) + { + task_ranges.invalidate_range(&buffer_snapshot, &fetch_range); + } + return None; + } + } + editor + .buffer() + .read(cx) + .buffer(query.buffer_id) + .and_then(|buffer| { + let project = editor.project.as_ref()?; + Some(project.update(cx, |project, cx| { + project.inlay_hints(buffer, fetch_range.clone(), cx) + })) + }) + }) + .ok() + .flatten(); + + let cached_excerpt_hints = editor.update(&mut cx, |editor, _| { + editor + .inlay_hint_cache + .hints + .get(&query.excerpt_id) + .cloned() + })?; + + let visible_hints = editor.update(&mut cx, |editor, cx| editor.visible_inlay_hints(cx))?; + let new_hints = match inlay_hints_fetch_task { + Some(fetch_task) => { + log::debug!( + "Fetching inlay hints for range {fetch_range_to_log:?}, reason: {query_reason}, invalidate: {invalidate}", + query_reason = query.reason, + ); + log::trace!( + "Currently visible hints: {visible_hints:?}, cached hints present: {}", + cached_excerpt_hints.is_some(), + ); + fetch_task.await.context("inlay hint fetch task")? + } + None => return Ok(()), + }; + drop(lsp_request_guard); + log::debug!( + "Fetched {} hints for range {fetch_range_to_log:?}", + new_hints.len() + ); + log::trace!("Fetched hints: {new_hints:?}"); + + let background_task_buffer_snapshot = buffer_snapshot.clone(); + let background_fetch_range = fetch_range.clone(); + let new_update = cx + .background_executor() + .spawn(async move { + calculate_hint_updates( + query.excerpt_id, + invalidate, + background_fetch_range, + new_hints, + &background_task_buffer_snapshot, + cached_excerpt_hints, + &visible_hints, + ) + }) + .await; + if let Some(new_update) = new_update { + log::debug!( + "Applying update for range {fetch_range_to_log:?}: remove from editor: {}, remove from cache: {}, add to cache: {}", + new_update.remove_from_visible.len(), + new_update.remove_from_cache.len(), + new_update.add_to_cache.len() + ); + log::trace!("New update: {new_update:?}"); + editor + .update(&mut cx, |editor, cx| { + apply_hint_update( + editor, + new_update, + query, + invalidate, + buffer_snapshot, + multi_buffer_snapshot, + cx, + ); + }) + .ok(); + } + anyhow::Ok(()) + }) +} + +fn calculate_hint_updates( + excerpt_id: ExcerptId, + invalidate: bool, + fetch_range: Range, + new_excerpt_hints: Vec, + buffer_snapshot: &BufferSnapshot, + cached_excerpt_hints: Option>>, + visible_hints: &[Inlay], +) -> Option { + let mut add_to_cache = Vec::::new(); + let mut excerpt_hints_to_persist = HashMap::default(); + for new_hint in new_excerpt_hints { + if !contains_position(&fetch_range, new_hint.position, buffer_snapshot) { + continue; + } + let missing_from_cache = match &cached_excerpt_hints { + Some(cached_excerpt_hints) => { + let cached_excerpt_hints = cached_excerpt_hints.read(); + match cached_excerpt_hints + .ordered_hints + .binary_search_by(|probe| { + cached_excerpt_hints.hints_by_id[probe] + .position + .cmp(&new_hint.position, buffer_snapshot) + }) { + Ok(ix) => { + let mut missing_from_cache = true; + for id in &cached_excerpt_hints.ordered_hints[ix..] { + let cached_hint = &cached_excerpt_hints.hints_by_id[id]; + if new_hint + .position + .cmp(&cached_hint.position, buffer_snapshot) + .is_gt() + { + break; + } + if cached_hint == &new_hint { + excerpt_hints_to_persist.insert(*id, cached_hint.kind); + missing_from_cache = false; + } + } + missing_from_cache + } + Err(_) => true, + } + } + None => true, + }; + if missing_from_cache { + add_to_cache.push(new_hint); + } + } + + let mut remove_from_visible = HashSet::default(); + let mut remove_from_cache = HashSet::default(); + if invalidate { + remove_from_visible.extend( + visible_hints + .iter() + .filter(|hint| hint.position.excerpt_id == excerpt_id) + .map(|inlay_hint| inlay_hint.id) + .filter(|hint_id| !excerpt_hints_to_persist.contains_key(hint_id)), + ); + + if let Some(cached_excerpt_hints) = &cached_excerpt_hints { + let cached_excerpt_hints = cached_excerpt_hints.read(); + remove_from_cache.extend( + cached_excerpt_hints + .ordered_hints + .iter() + .filter(|cached_inlay_id| { + !excerpt_hints_to_persist.contains_key(cached_inlay_id) + }) + .copied(), + ); + remove_from_visible.extend(remove_from_cache.iter().cloned()); + } + } + + if remove_from_visible.is_empty() && remove_from_cache.is_empty() && add_to_cache.is_empty() { + None + } else { + Some(ExcerptHintsUpdate { + excerpt_id, + remove_from_visible, + remove_from_cache, + add_to_cache, + }) + } +} + +fn contains_position( + range: &Range, + position: language::Anchor, + buffer_snapshot: &BufferSnapshot, +) -> bool { + range.start.cmp(&position, buffer_snapshot).is_le() + && range.end.cmp(&position, buffer_snapshot).is_ge() +} + +fn apply_hint_update( + editor: &mut Editor, + new_update: ExcerptHintsUpdate, + query: ExcerptQuery, + invalidate: bool, + buffer_snapshot: BufferSnapshot, + multi_buffer_snapshot: MultiBufferSnapshot, + cx: &mut ViewContext<'_, Editor>, +) { + let cached_excerpt_hints = editor + .inlay_hint_cache + .hints + .entry(new_update.excerpt_id) + .or_insert_with(|| { + Arc::new(RwLock::new(CachedExcerptHints { + version: query.cache_version, + buffer_version: buffer_snapshot.version().clone(), + buffer_id: query.buffer_id, + ordered_hints: Vec::new(), + hints_by_id: HashMap::default(), + })) + }); + let mut cached_excerpt_hints = cached_excerpt_hints.write(); + match query.cache_version.cmp(&cached_excerpt_hints.version) { + cmp::Ordering::Less => return, + cmp::Ordering::Greater | cmp::Ordering::Equal => { + cached_excerpt_hints.version = query.cache_version; + } + } + + let mut cached_inlays_changed = !new_update.remove_from_cache.is_empty(); + cached_excerpt_hints + .ordered_hints + .retain(|hint_id| !new_update.remove_from_cache.contains(hint_id)); + cached_excerpt_hints + .hints_by_id + .retain(|hint_id, _| !new_update.remove_from_cache.contains(hint_id)); + let mut splice = InlaySplice::default(); + splice.to_remove.extend(new_update.remove_from_visible); + for new_hint in new_update.add_to_cache { + let insert_position = match cached_excerpt_hints + .ordered_hints + .binary_search_by(|probe| { + cached_excerpt_hints.hints_by_id[probe] + .position + .cmp(&new_hint.position, &buffer_snapshot) + }) { + Ok(i) => { + let mut insert_position = Some(i); + for id in &cached_excerpt_hints.ordered_hints[i..] { + let cached_hint = &cached_excerpt_hints.hints_by_id[id]; + if new_hint + .position + .cmp(&cached_hint.position, &buffer_snapshot) + .is_gt() + { + break; + } + if cached_hint.text() == new_hint.text() { + insert_position = None; + break; + } + } + insert_position + } + Err(i) => Some(i), + }; + + if let Some(insert_position) = insert_position { + let new_inlay_id = post_inc(&mut editor.next_inlay_id); + if editor + .inlay_hint_cache + .allowed_hint_kinds + .contains(&new_hint.kind) + { + if let Some(new_hint_position) = + multi_buffer_snapshot.anchor_in_excerpt(query.excerpt_id, new_hint.position) + { + splice + .to_insert + .push(Inlay::hint(new_inlay_id, new_hint_position, &new_hint)); + } + } + let new_id = InlayId::Hint(new_inlay_id); + cached_excerpt_hints.hints_by_id.insert(new_id, new_hint); + cached_excerpt_hints + .ordered_hints + .insert(insert_position, new_id); + cached_inlays_changed = true; + } + } + cached_excerpt_hints.buffer_version = buffer_snapshot.version().clone(); + drop(cached_excerpt_hints); + + if invalidate { + let mut outdated_excerpt_caches = HashSet::default(); + for (excerpt_id, excerpt_hints) in &editor.inlay_hint_cache().hints { + let excerpt_hints = excerpt_hints.read(); + if excerpt_hints.buffer_id == query.buffer_id + && excerpt_id != &query.excerpt_id + && buffer_snapshot + .version() + .changed_since(&excerpt_hints.buffer_version) + { + outdated_excerpt_caches.insert(*excerpt_id); + splice + .to_remove + .extend(excerpt_hints.ordered_hints.iter().copied()); + } + } + cached_inlays_changed |= !outdated_excerpt_caches.is_empty(); + editor + .inlay_hint_cache + .hints + .retain(|excerpt_id, _| !outdated_excerpt_caches.contains(excerpt_id)); + } + + let InlaySplice { + to_remove, + to_insert, + } = splice; + let displayed_inlays_changed = !to_remove.is_empty() || !to_insert.is_empty(); + if cached_inlays_changed || displayed_inlays_changed { + editor.inlay_hint_cache.version += 1; + } + if displayed_inlays_changed { + editor.splice_inlays(to_remove, to_insert, cx) + } +} + +#[cfg(test)] +pub mod tests { + use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering}; + + use crate::{ + scroll::{scroll_amount::ScrollAmount, Autoscroll}, + ExcerptRange, + }; + use futures::StreamExt; + use gpui::{Context, TestAppContext, WindowHandle}; + use itertools::Itertools; + use language::{ + language_settings::AllLanguageSettingsContent, Capability, FakeLspAdapter, Language, + LanguageConfig, LanguageMatcher, + }; + use lsp::FakeLanguageServer; + use parking_lot::Mutex; + use project::{FakeFs, Project}; + use serde_json::json; + use settings::SettingsStore; + use text::{Point, ToPoint}; + + use crate::editor_tests::update_test_language_settings; + + use super::*; + + #[gpui::test] + async fn test_basic_cache_update_with_duplicate_hints(cx: &mut gpui::TestAppContext) { + let allowed_hint_kinds = HashSet::from_iter([None, Some(InlayHintKind::Type)]); + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), + show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)), + show_other_hints: allowed_hint_kinds.contains(&None), + }) + }); + + let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + let current_call_id = + Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + let mut new_hints = Vec::with_capacity(2 * current_call_id as usize); + for _ in 0..2 { + let mut i = current_call_id; + loop { + new_hints.push(lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }); + if i == 0 { + break; + } + i -= 1; + } + } + + Ok(Some(new_hints)) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + + let mut edits_made = 1; + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get its first hints when opening the editor" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds" + ); + assert_eq!( + inlay_cache.version, edits_made, + "The editor update the cache version after every cache/view change" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input("some change", cx); + edits_made += 1; + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string(), "1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get new hints after an edit" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds" + ); + assert_eq!( + inlay_cache.version, edits_made, + "The editor update the cache version after every cache/view change" + ); + }) + .unwrap(); + + fake_server + .request::(()) + .await + .expect("inlay refresh request failed"); + edits_made += 1; + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string(), "1".to_string(), "2".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get new hints after hint refresh/ request" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds" + ); + assert_eq!( + inlay_cache.version, edits_made, + "The editor update the cache version after every cache/view change" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_cache_update_on_lsp_completion_tasks(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + let current_call_id = + Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, current_call_id), + label: lsp::InlayHintLabel::String(current_call_id.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + + let mut edits_made = 1; + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get its first hints when opening the editor" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + edits_made, + "The editor update the cache version after every cache/view change" + ); + }) + .unwrap(); + + let progress_token = "test_progress_token"; + fake_server + .request::(lsp::WorkDoneProgressCreateParams { + token: lsp::ProgressToken::String(progress_token.to_string()), + }) + .await + .expect("work done progress create request failed"); + cx.executor().run_until_parked(); + fake_server.notify::(lsp::ProgressParams { + token: lsp::ProgressToken::String(progress_token.to_string()), + value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin( + lsp::WorkDoneProgressBegin::default(), + )), + }); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should not update hints while the work task is running" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + edits_made, + "Should not update the cache while the work task is running" + ); + }) + .unwrap(); + + fake_server.notify::(lsp::ProgressParams { + token: lsp::ProgressToken::String(progress_token.to_string()), + value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End( + lsp::WorkDoneProgressEnd::default(), + )), + }); + cx.executor().run_until_parked(); + + edits_made += 1; + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "New hints should be queried after the work task is done" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + edits_made, + "Cache version should update once after the work task is done" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_no_hint_updates_for_unrelated_language_files(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out", + "other.md": "Test md file with some text", + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let mut rs_fake_servers = None; + let mut md_fake_servers = None; + for (name, path_suffix) in [("Rust", "rs"), ("Markdown", "md")] { + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: name.into(), + matcher: LanguageMatcher { + path_suffixes: vec![path_suffix.to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + let fake_servers = language_registry.register_fake_lsp_adapter( + name, + FakeLspAdapter { + name, + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + match name { + "Rust" => rs_fake_servers = Some(fake_servers), + "Markdown" => md_fake_servers = Some(fake_servers), + _ => unreachable!(), + } + } + + let rs_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap(); + let rs_editor = + cx.add_window(|cx| Editor::for_buffer(rs_buffer, Some(project.clone()), cx)); + let rs_lsp_request_count = Arc::new(AtomicU32::new(0)); + rs_fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&rs_lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + rs_editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get its first hints when opening the editor" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + 1, + "Rust editor update the cache version after every cache/view change" + ); + }) + .unwrap(); + + cx.executor().run_until_parked(); + let md_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/other.md", cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let md_fake_server = md_fake_servers.unwrap().next().await.unwrap(); + let md_editor = cx.add_window(|cx| Editor::for_buffer(md_buffer, Some(project), cx)); + let md_lsp_request_count = Arc::new(AtomicU32::new(0)); + md_fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&md_lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/other.md").unwrap(), + ); + let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + md_editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Markdown editor should have a separate version, repeating Rust editor rules" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 1); + }) + .unwrap(); + + rs_editor + .update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input("some rs change", cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + rs_editor + .update(cx, |editor, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Rust inlay cache should change after the edit" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + 2, + "Every time hint cache changes, cache version should be incremented" + ); + }) + .unwrap(); + md_editor + .update(cx, |editor, cx| { + let expected_hints = vec!["0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Markdown editor should not be affected by Rust editor changes" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 1); + }) + .unwrap(); + + md_editor + .update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input("some md change", cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + md_editor + .update(cx, |editor, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Rust editor should not be affected by Markdown editor changes" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 2); + }) + .unwrap(); + rs_editor + .update(cx, |editor, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Markdown editor should also change independently" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 2); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_hint_setting_changes(cx: &mut gpui::TestAppContext) { + let allowed_hint_kinds = HashSet::from_iter([None, Some(InlayHintKind::Type)]); + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), + show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)), + show_other_hints: allowed_hint_kinds.contains(&None), + }) + }); + + let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; + let lsp_request_count = Arc::new(AtomicU32::new(0)); + let another_lsp_request_count = Arc::clone(&lsp_request_count); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&another_lsp_request_count); + async move { + Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + Ok(Some(vec![ + lsp::InlayHint { + position: lsp::Position::new(0, 1), + label: lsp::InlayHintLabel::String("type hint".to_string()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(0, 2), + label: lsp::InlayHintLabel::String("parameter hint".to_string()), + kind: Some(lsp::InlayHintKind::PARAMETER), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(0, 3), + label: lsp::InlayHintLabel::String("other hint".to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + ])) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + + let mut edits_made = 1; + editor + .update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 1, + "Should query new hints once" + ); + assert_eq!( + vec![ + "other hint".to_string(), + "parameter hint".to_string(), + "type hint".to_string(), + ], + cached_hint_labels(editor), + "Should get its first hints when opening the editor" + ); + assert_eq!( + vec!["other hint".to_string(), "type hint".to_string()], + visible_hint_labels(editor, cx) + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds" + ); + assert_eq!( + inlay_cache.version, edits_made, + "The editor update the cache version after every cache/view change" + ); + }) + .unwrap(); + + fake_server + .request::(()) + .await + .expect("inlay refresh request failed"); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 2, + "Should load new hints twice" + ); + assert_eq!( + vec![ + "other hint".to_string(), + "parameter hint".to_string(), + "type hint".to_string(), + ], + cached_hint_labels(editor), + "Cached hints should not change due to allowed hint kinds settings update" + ); + assert_eq!( + vec!["other hint".to_string(), "type hint".to_string()], + visible_hint_labels(editor, cx) + ); + assert_eq!( + editor.inlay_hint_cache().version, + edits_made, + "Should not update cache version due to new loaded hints being the same" + ); + }) + .unwrap(); + + for (new_allowed_hint_kinds, expected_visible_hints) in [ + (HashSet::from_iter([None]), vec!["other hint".to_string()]), + ( + HashSet::from_iter([Some(InlayHintKind::Type)]), + vec!["type hint".to_string()], + ), + ( + HashSet::from_iter([Some(InlayHintKind::Parameter)]), + vec!["parameter hint".to_string()], + ), + ( + HashSet::from_iter([None, Some(InlayHintKind::Type)]), + vec!["other hint".to_string(), "type hint".to_string()], + ), + ( + HashSet::from_iter([None, Some(InlayHintKind::Parameter)]), + vec!["other hint".to_string(), "parameter hint".to_string()], + ), + ( + HashSet::from_iter([Some(InlayHintKind::Type), Some(InlayHintKind::Parameter)]), + vec!["parameter hint".to_string(), "type hint".to_string()], + ), + ( + HashSet::from_iter([ + None, + Some(InlayHintKind::Type), + Some(InlayHintKind::Parameter), + ]), + vec![ + "other hint".to_string(), + "parameter hint".to_string(), + "type hint".to_string(), + ], + ), + ] { + edits_made += 1; + update_test_language_settings(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: new_allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), + show_parameter_hints: new_allowed_hint_kinds + .contains(&Some(InlayHintKind::Parameter)), + show_other_hints: new_allowed_hint_kinds.contains(&None), + }) + }); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 2, + "Should not load new hints on allowed hint kinds change for hint kinds {new_allowed_hint_kinds:?}" + ); + assert_eq!( + vec![ + "other hint".to_string(), + "parameter hint".to_string(), + "type hint".to_string(), + ], + cached_hint_labels(editor), + "Should get its cached hints unchanged after the settings change for hint kinds {new_allowed_hint_kinds:?}" + ); + assert_eq!( + expected_visible_hints, + visible_hint_labels(editor, cx), + "Should get its visible hints filtered after the settings change for hint kinds {new_allowed_hint_kinds:?}" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, new_allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds for hint kinds {new_allowed_hint_kinds:?}" + ); + assert_eq!( + inlay_cache.version, edits_made, + "The editor should update the cache version after every cache/view change for hint kinds {new_allowed_hint_kinds:?} due to visible hints change" + ); + }).unwrap(); + } + + edits_made += 1; + let another_allowed_hint_kinds = HashSet::from_iter([Some(InlayHintKind::Type)]); + update_test_language_settings(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: false, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: another_allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), + show_parameter_hints: another_allowed_hint_kinds + .contains(&Some(InlayHintKind::Parameter)), + show_other_hints: another_allowed_hint_kinds.contains(&None), + }) + }); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 2, + "Should not load new hints when hints got disabled" + ); + assert!( + cached_hint_labels(editor).is_empty(), + "Should clear the cache when hints got disabled" + ); + assert!( + visible_hint_labels(editor, cx).is_empty(), + "Should clear visible hints when hints got disabled" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, another_allowed_hint_kinds, + "Should update its allowed hint kinds even when hints got disabled" + ); + assert_eq!( + inlay_cache.version, edits_made, + "The editor should update the cache version after hints got disabled" + ); + }) + .unwrap(); + + fake_server + .request::(()) + .await + .expect("inlay refresh request failed"); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 2, + "Should not load new hints when they got disabled" + ); + assert!(cached_hint_labels(editor).is_empty()); + assert!(visible_hint_labels(editor, cx).is_empty()); + assert_eq!( + editor.inlay_hint_cache().version, edits_made, + "The editor should not update the cache version after /refresh query without updates" + ); + }).unwrap(); + + let final_allowed_hint_kinds = HashSet::from_iter([Some(InlayHintKind::Parameter)]); + edits_made += 1; + update_test_language_settings(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: final_allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), + show_parameter_hints: final_allowed_hint_kinds + .contains(&Some(InlayHintKind::Parameter)), + show_other_hints: final_allowed_hint_kinds.contains(&None), + }) + }); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 3, + "Should query for new hints when they got re-enabled" + ); + assert_eq!( + vec![ + "other hint".to_string(), + "parameter hint".to_string(), + "type hint".to_string(), + ], + cached_hint_labels(editor), + "Should get its cached hints fully repopulated after the hints got re-enabled" + ); + assert_eq!( + vec!["parameter hint".to_string()], + visible_hint_labels(editor, cx), + "Should get its visible hints repopulated and filtered after the h" + ); + let inlay_cache = editor.inlay_hint_cache(); + assert_eq!( + inlay_cache.allowed_hint_kinds, final_allowed_hint_kinds, + "Cache should update editor settings when hints got re-enabled" + ); + assert_eq!( + inlay_cache.version, edits_made, + "Cache should update its version after hints got re-enabled" + ); + }) + .unwrap(); + + fake_server + .request::(()) + .await + .expect("inlay refresh request failed"); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 4, + "Should query for new hints again" + ); + assert_eq!( + vec![ + "other hint".to_string(), + "parameter hint".to_string(), + "type hint".to_string(), + ], + cached_hint_labels(editor), + ); + assert_eq!( + vec!["parameter hint".to_string()], + visible_hint_labels(editor, cx), + ); + assert_eq!(editor.inlay_hint_cache().version, edits_made); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_hint_request_cancellation(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; + let fake_server = Arc::new(fake_server); + let lsp_request_count = Arc::new(AtomicU32::new(0)); + let another_lsp_request_count = Arc::clone(&lsp_request_count); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&another_lsp_request_count); + async move { + let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst) + 1; + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + + let mut expected_changes = Vec::new(); + for change_after_opening in [ + "initial change #1", + "initial change #2", + "initial change #3", + ] { + editor + .update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input(change_after_opening, cx); + }) + .unwrap(); + expected_changes.push(change_after_opening); + } + + cx.executor().run_until_parked(); + + editor.update(cx, |editor, cx| { + let current_text = editor.text(cx); + for change in &expected_changes { + assert!( + current_text.contains(change), + "Should apply all changes made" + ); + } + assert_eq!( + lsp_request_count.load(Ordering::Relaxed), + 2, + "Should query new hints twice: for editor init and for the last edit that interrupted all others" + ); + let expected_hints = vec!["2".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get hints from the last edit landed only" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, 1, + "Only one update should be registered in the cache after all cancellations" + ); + }).unwrap(); + + let mut edits = Vec::new(); + for async_later_change in [ + "another change #1", + "another change #2", + "another change #3", + ] { + expected_changes.push(async_later_change); + let task_editor = editor; + edits.push(cx.spawn(|mut cx| async move { + task_editor + .update(&mut cx, |editor, cx| { + editor.change_selections(None, cx, |s| s.select_ranges([13..13])); + editor.handle_input(async_later_change, cx); + }) + .unwrap(); + })); + } + let _ = future::join_all(edits).await; + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, cx| { + let current_text = editor.text(cx); + for change in &expected_changes { + assert!( + current_text.contains(change), + "Should apply all changes made" + ); + } + assert_eq!( + lsp_request_count.load(Ordering::SeqCst), + 3, + "Should query new hints one more time, for the last edit only" + ); + let expected_hints = vec!["3".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should get hints from the last edit landed only" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + 2, + "Should update the cache version once more, for the new change" + ); + }) + .unwrap(); + } + + #[gpui::test(iterations = 10)] + async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/a", + json!({ + "main.rs": format!("fn main() {{\n{}\n}}", "let i = 5;\n".repeat(500)), + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(crate::editor_tests::rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); + let lsp_request_ranges = Arc::new(Mutex::new(Vec::new())); + let lsp_request_count = Arc::new(AtomicUsize::new(0)); + let closure_lsp_request_ranges = Arc::clone(&lsp_request_ranges); + let closure_lsp_request_count = Arc::clone(&lsp_request_count); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_ranges = Arc::clone(&closure_lsp_request_ranges); + let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + + task_lsp_request_ranges.lock().push(params.range); + let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::Release) + 1; + Ok(Some(vec![lsp::InlayHint { + position: params.range.end, + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + + fn editor_visible_range( + editor: &WindowHandle, + cx: &mut gpui::TestAppContext, + ) -> Range { + let ranges = editor + .update(cx, |editor, cx| { + editor.excerpts_for_inlay_hints_query(None, cx) + }) + .unwrap(); + assert_eq!( + ranges.len(), + 1, + "Single buffer should produce a single excerpt with visible range" + ); + let (_, (excerpt_buffer, _, excerpt_visible_range)) = + ranges.into_iter().next().unwrap(); + excerpt_buffer.update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + let start = buffer + .anchor_before(excerpt_visible_range.start) + .to_point(&snapshot); + let end = buffer + .anchor_after(excerpt_visible_range.end) + .to_point(&snapshot); + start..end + }) + } + + // in large buffers, requests are made for more than visible range of a buffer. + // invisible parts are queried later, to avoid excessive requests on quick typing. + // wait the timeout needed to get all requests. + cx.executor().advance_clock(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + )); + cx.executor().run_until_parked(); + let initial_visible_range = editor_visible_range(&editor, cx); + let lsp_initial_visible_range = lsp::Range::new( + lsp::Position::new( + initial_visible_range.start.row, + initial_visible_range.start.column, + ), + lsp::Position::new( + initial_visible_range.end.row, + initial_visible_range.end.column, + ), + ); + let expected_initial_query_range_end = + lsp::Position::new(initial_visible_range.end.row * 2, 2); + let mut expected_invisible_query_start = lsp_initial_visible_range.end; + expected_invisible_query_start.character += 1; + editor.update(cx, |editor, cx| { + let ranges = lsp_request_ranges.lock().drain(..).collect::>(); + assert_eq!(ranges.len(), 2, + "When scroll is at the edge of a big document, its visible part and the same range further should be queried in order, but got: {ranges:?}"); + let visible_query_range = &ranges[0]; + assert_eq!(visible_query_range.start, lsp_initial_visible_range.start); + assert_eq!(visible_query_range.end, lsp_initial_visible_range.end); + let invisible_query_range = &ranges[1]; + + assert_eq!(invisible_query_range.start, expected_invisible_query_start, "Should initially query visible edge of the document"); + assert_eq!(invisible_query_range.end, expected_initial_query_range_end, "Should initially query visible edge of the document"); + + let requests_count = lsp_request_count.load(Ordering::Acquire); + assert_eq!(requests_count, 2, "Visible + invisible request"); + let expected_hints = vec!["1".to_string(), "2".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should have hints from both LSP requests made for a big file" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx), "Should display only hints from the visible range"); + assert_eq!( + editor.inlay_hint_cache().version, requests_count, + "LSP queries should've bumped the cache version" + ); + }).unwrap(); + + editor + .update(cx, |editor, cx| { + editor.scroll_screen(&ScrollAmount::Page(1.0), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + editor.scroll_screen(&ScrollAmount::Page(1.0), cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + )); + cx.executor().run_until_parked(); + let visible_range_after_scrolls = editor_visible_range(&editor, cx); + let visible_line_count = editor + .update(cx, |editor, _| editor.visible_line_count().unwrap()) + .unwrap(); + let selection_in_cached_range = editor + .update(cx, |editor, cx| { + let ranges = lsp_request_ranges + .lock() + .drain(..) + .sorted_by_key(|r| r.start) + .collect::>(); + assert_eq!( + ranges.len(), + 2, + "Should query 2 ranges after both scrolls, but got: {ranges:?}" + ); + let first_scroll = &ranges[0]; + let second_scroll = &ranges[1]; + assert_eq!( + first_scroll.end, second_scroll.start, + "Should query 2 adjacent ranges after the scrolls, but got: {ranges:?}" + ); + assert_eq!( + first_scroll.start, expected_initial_query_range_end, + "First scroll should start the query right after the end of the original scroll", + ); + assert_eq!( + second_scroll.end, + lsp::Position::new( + visible_range_after_scrolls.end.row + + visible_line_count.ceil() as u32, + 1, + ), + "Second scroll should query one more screen down after the end of the visible range" + ); + + let lsp_requests = lsp_request_count.load(Ordering::Acquire); + assert_eq!(lsp_requests, 4, "Should query for hints after every scroll"); + let expected_hints = vec![ + "1".to_string(), + "2".to_string(), + "3".to_string(), + "4".to_string(), + ]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should have hints from the new LSP response after the edit" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + lsp_requests, + "Should update the cache for every LSP response with hints added" + ); + + let mut selection_in_cached_range = visible_range_after_scrolls.end; + selection_in_cached_range.row -= visible_line_count.ceil() as u32; + selection_in_cached_range + }) + .unwrap(); + + editor + .update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::center()), cx, |s| { + s.select_ranges([selection_in_cached_range..selection_in_cached_range]) + }); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + )); + cx.executor().run_until_parked(); + editor.update(cx, |_, _| { + let ranges = lsp_request_ranges + .lock() + .drain(..) + .sorted_by_key(|r| r.start) + .collect::>(); + assert!(ranges.is_empty(), "No new ranges or LSP queries should be made after returning to the selection with cached hints"); + assert_eq!(lsp_request_count.load(Ordering::Acquire), 4); + }).unwrap(); + + editor + .update(cx, |editor, cx| { + editor.handle_input("++++more text++++", cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + )); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + let mut ranges = lsp_request_ranges.lock().drain(..).collect::>(); + ranges.sort_by_key(|r| r.start); + + assert_eq!(ranges.len(), 3, + "On edit, should scroll to selection and query a range around it: visible + same range above and below. Instead, got query ranges {ranges:?}"); + let above_query_range = &ranges[0]; + let visible_query_range = &ranges[1]; + let below_query_range = &ranges[2]; + assert!(above_query_range.end.character < visible_query_range.start.character || above_query_range.end.line + 1 == visible_query_range.start.line, + "Above range {above_query_range:?} should be before visible range {visible_query_range:?}"); + assert!(visible_query_range.end.character < below_query_range.start.character || visible_query_range.end.line + 1 == below_query_range.start.line, + "Visible range {visible_query_range:?} should be before below range {below_query_range:?}"); + assert!(above_query_range.start.line < selection_in_cached_range.row, + "Hints should be queried with the selected range after the query range start"); + assert!(below_query_range.end.line > selection_in_cached_range.row, + "Hints should be queried with the selected range before the query range end"); + assert!(above_query_range.start.line <= selection_in_cached_range.row - (visible_line_count * 3.0 / 2.0) as u32, + "Hints query range should contain one more screen before"); + assert!(below_query_range.end.line >= selection_in_cached_range.row + (visible_line_count * 3.0 / 2.0) as u32, + "Hints query range should contain one more screen after"); + + let lsp_requests = lsp_request_count.load(Ordering::Acquire); + assert_eq!(lsp_requests, 7, "There should be a visible range and two ranges above and below it queried"); + let expected_hints = vec!["5".to_string(), "6".to_string(), "7".to_string()]; + assert_eq!(expected_hints, cached_hint_labels(editor), + "Should have hints from the new LSP response after the edit"); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, lsp_requests, "Should update the cache for every LSP response with hints added"); + }).unwrap(); + } + + #[gpui::test(iterations = 30)] + async fn test_multiple_excerpts_large_multibuffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/a", + json!({ + "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::>().join("")), + "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::>().join("")), + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let language = crate::editor_tests::rust_lang(); + language_registry.add(language); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let worktree_id = project.update(cx, |project, cx| { + project.worktrees().next().unwrap().read(cx).id() + }); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "main.rs"), cx) + }) + .await + .unwrap(); + let buffer_2 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "other.rs"), cx) + }) + .await + .unwrap(); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(4, 0)..Point::new(11, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(22, 0)..Point::new(33, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(44, 0)..Point::new(55, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(56, 0)..Point::new(66, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(67, 0)..Point::new(77, 0), + primary: None, + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + ExcerptRange { + context: Point::new(0, 1)..Point::new(2, 1), + primary: None, + }, + ExcerptRange { + context: Point::new(4, 1)..Point::new(11, 1), + primary: None, + }, + ExcerptRange { + context: Point::new(22, 1)..Point::new(33, 1), + primary: None, + }, + ExcerptRange { + context: Point::new(44, 1)..Point::new(55, 1), + primary: None, + }, + ExcerptRange { + context: Point::new(56, 1)..Point::new(66, 1), + primary: None, + }, + ExcerptRange { + context: Point::new(67, 1)..Point::new(77, 1), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + + cx.executor().run_until_parked(); + let editor = + cx.add_window(|cx| Editor::for_multibuffer(multibuffer, Some(project.clone()), cx)); + + let editor_edited = Arc::new(AtomicBool::new(false)); + let fake_server = fake_servers.next().await.unwrap(); + let closure_editor_edited = Arc::clone(&editor_edited); + fake_server + .handle_request::(move |params, _| { + let task_editor_edited = Arc::clone(&closure_editor_edited); + async move { + let hint_text = if params.text_document.uri + == lsp::Url::from_file_path("/a/main.rs").unwrap() + { + "main hint" + } else if params.text_document.uri + == lsp::Url::from_file_path("/a/other.rs").unwrap() + { + "other hint" + } else { + panic!("unexpected uri: {:?}", params.text_document.uri); + }; + + // one hint per excerpt + let positions = [ + lsp::Position::new(0, 2), + lsp::Position::new(4, 2), + lsp::Position::new(22, 2), + lsp::Position::new(44, 2), + lsp::Position::new(56, 2), + lsp::Position::new(67, 2), + ]; + let out_of_range_hint = lsp::InlayHint { + position: lsp::Position::new( + params.range.start.line + 99, + params.range.start.character + 99, + ), + label: lsp::InlayHintLabel::String( + "out of excerpt range, should be ignored".to_string(), + ), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }; + + let edited = task_editor_edited.load(Ordering::Acquire); + Ok(Some( + std::iter::once(out_of_range_hint) + .chain(positions.into_iter().enumerate().map(|(i, position)| { + lsp::InlayHint { + position, + label: lsp::InlayHintLabel::String(format!( + "{hint_text}{} #{i}", + if edited { "(edited)" } else { "" }, + )), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + } + })) + .collect(), + )) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + + editor.update(cx, |editor, cx| { + let expected_hints = vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "main hint #4".to_string(), + "main hint #5".to_string(), + ]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the version"); + }).unwrap(); + + editor + .update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges([Point::new(4, 0)..Point::new(4, 0)]) + }); + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges([Point::new(22, 0)..Point::new(22, 0)]) + }); + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges([Point::new(50, 0)..Point::new(50, 0)]) + }); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + let expected_hints = vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "main hint #4".to_string(), + "main hint #5".to_string(), + "other hint #0".to_string(), + "other hint #1".to_string(), + "other hint #2".to_string(), + ]; + assert_eq!(expected_hints, cached_hint_labels(editor), + "With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits"); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), + "Due to every excerpt having one hint, we update cache per new excerpt scrolled"); + }).unwrap(); + + editor + .update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges([Point::new(100, 0)..Point::new(100, 0)]) + }); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + )); + cx.executor().run_until_parked(); + let last_scroll_update_version = editor.update(cx, |editor, cx| { + let expected_hints = vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "main hint #4".to_string(), + "main hint #5".to_string(), + "other hint #0".to_string(), + "other hint #1".to_string(), + "other hint #2".to_string(), + "other hint #3".to_string(), + "other hint #4".to_string(), + "other hint #5".to_string(), + ]; + assert_eq!(expected_hints, cached_hint_labels(editor), + "After multibuffer was scrolled to the end, all hints for all excerpts should be fetched"); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, expected_hints.len()); + expected_hints.len() + }).unwrap(); + + editor + .update(cx, |editor, cx| { + editor.change_selections(Some(Autoscroll::Next), cx, |s| { + s.select_ranges([Point::new(4, 0)..Point::new(4, 0)]) + }); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis( + INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, + )); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + let expected_hints = vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "main hint #4".to_string(), + "main hint #5".to_string(), + "other hint #0".to_string(), + "other hint #1".to_string(), + "other hint #2".to_string(), + "other hint #3".to_string(), + "other hint #4".to_string(), + "other hint #5".to_string(), + ]; + assert_eq!(expected_hints, cached_hint_labels(editor), + "After multibuffer was scrolled to the end, further scrolls up should not bring more hints"); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scrolled buffer"); + }).unwrap(); + + editor_edited.store(true, Ordering::Release); + editor + .update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(57, 0)..Point::new(57, 0)]) + }); + editor.handle_input("++++more text++++", cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + let expected_hints = vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "main hint #4".to_string(), + "main hint #5".to_string(), + "other hint(edited) #0".to_string(), + "other hint(edited) #1".to_string(), + ]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "After multibuffer edit, editor gets scrolled back to the last selection; \ + all hints should be invalidated and required for all of its visible excerpts" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + + let current_cache_version = editor.inlay_hint_cache().version; + // We expect two new hints for the excerpts from `other.rs`: + let expected_version = last_scroll_update_version + 2; + assert_eq!( + current_cache_version, + expected_version, + "We should have updated cache N times == N of new hints arrived (separately from each edited excerpt)" + ); + }).unwrap(); + } + + #[gpui::test] + async fn test_excerpts_removed(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: false, + show_parameter_hints: false, + show_other_hints: false, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/a", + json!({ + "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::>().join("")), + "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::>().join("")), + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(crate::editor_tests::rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let worktree_id = project.update(cx, |project, cx| { + project.worktrees().next().unwrap().read(cx).id() + }); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "main.rs"), cx) + }) + .await + .unwrap(); + let buffer_2 = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "other.rs"), cx) + }) + .await + .unwrap(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { + let buffer_1_excerpts = multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange { + context: Point::new(0, 0)..Point::new(2, 0), + primary: None, + }], + cx, + ); + let buffer_2_excerpts = multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange { + context: Point::new(0, 1)..Point::new(2, 1), + primary: None, + }], + cx, + ); + (buffer_1_excerpts, buffer_2_excerpts) + }); + + assert!(!buffer_1_excerpts.is_empty()); + assert!(!buffer_2_excerpts.is_empty()); + + cx.executor().run_until_parked(); + let editor = + cx.add_window(|cx| Editor::for_multibuffer(multibuffer, Some(project.clone()), cx)); + let editor_edited = Arc::new(AtomicBool::new(false)); + let fake_server = fake_servers.next().await.unwrap(); + let closure_editor_edited = Arc::clone(&editor_edited); + fake_server + .handle_request::(move |params, _| { + let task_editor_edited = Arc::clone(&closure_editor_edited); + async move { + let hint_text = if params.text_document.uri + == lsp::Url::from_file_path("/a/main.rs").unwrap() + { + "main hint" + } else if params.text_document.uri + == lsp::Url::from_file_path("/a/other.rs").unwrap() + { + "other hint" + } else { + panic!("unexpected uri: {:?}", params.text_document.uri); + }; + + let positions = [ + lsp::Position::new(0, 2), + lsp::Position::new(4, 2), + lsp::Position::new(22, 2), + lsp::Position::new(44, 2), + lsp::Position::new(56, 2), + lsp::Position::new(67, 2), + ]; + let out_of_range_hint = lsp::InlayHint { + position: lsp::Position::new( + params.range.start.line + 99, + params.range.start.character + 99, + ), + label: lsp::InlayHintLabel::String( + "out of excerpt range, should be ignored".to_string(), + ), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }; + + let edited = task_editor_edited.load(Ordering::Acquire); + Ok(Some( + std::iter::once(out_of_range_hint) + .chain(positions.into_iter().enumerate().map(|(i, position)| { + lsp::InlayHint { + position, + label: lsp::InlayHintLabel::String(format!( + "{hint_text}{} #{i}", + if edited { "(edited)" } else { "" }, + )), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + } + })) + .collect(), + )) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, cx| { + assert_eq!( + vec!["main hint #0".to_string(), "other hint #0".to_string()], + cached_hint_labels(editor), + "Cache should update for both excerpts despite hints display was disabled" + ); + assert!( + visible_hint_labels(editor, cx).is_empty(), + "All hints are disabled and should not be shown despite being present in the cache" + ); + assert_eq!( + editor.inlay_hint_cache().version, + 2, + "Cache should update once per excerpt query" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, cx| { + editor.buffer().update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts(buffer_2_excerpts, cx) + }) + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert_eq!( + vec!["main hint #0".to_string()], + cached_hint_labels(editor), + "For the removed excerpt, should clean corresponding cached hints" + ); + assert!( + visible_hint_labels(editor, cx).is_empty(), + "All hints are disabled and should not be shown despite being present in the cache" + ); + assert_eq!( + editor.inlay_hint_cache().version, + 3, + "Excerpt removal should trigger a cache update" + ); + }) + .unwrap(); + + update_test_language_settings(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["main hint #0".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Hint display settings change should not change the cache" + ); + assert_eq!( + expected_hints, + visible_hint_labels(editor, cx), + "Settings change should make cached hints visible" + ); + assert_eq!( + editor.inlay_hint_cache().version, + 4, + "Settings change should trigger a cache update" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_inside_char_boundary_range_hints(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/a", + json!({ + "main.rs": format!(r#"fn main() {{\n{}\n}}"#, format!("let i = {};\n", "√".repeat(10)).repeat(500)), + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(crate::editor_tests::rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); + let lsp_request_count = Arc::new(AtomicU32::new(0)); + let closure_lsp_request_count = Arc::clone(&lsp_request_count); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + let query_start = params.range.start; + let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::Release) + 1; + Ok(Some(vec![lsp::InlayHint { + position: query_start, + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + editor.change_selections(None, cx, |s| { + s.select_ranges([Point::new(10, 0)..Point::new(10, 0)]) + }) + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!(expected_hints, cached_hint_labels(editor)); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 1); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_toggle_inlay_hints(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: false, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + + let (file_with_hints, editor, fake_server) = prepare_test_objects(cx).await; + + editor + .update(cx, |editor, cx| { + editor.toggle_inlay_hints(&crate::ToggleInlayHints, cx) + }) + .unwrap(); + cx.executor().start_waiting(); + let lsp_request_count = Arc::new(AtomicU32::new(0)); + let closure_lsp_request_count = Arc::clone(&lsp_request_count); + fake_server + .handle_request::(move |params, _| { + let task_lsp_request_count = Arc::clone(&closure_lsp_request_count); + async move { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path(file_with_hints).unwrap(), + ); + + let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst) + 1; + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }) + .next() + .await; + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should display inlays after toggle despite them disabled in settings" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + editor.inlay_hint_cache().version, + 1, + "First toggle should be cache's first update" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, cx| { + editor.toggle_inlay_hints(&crate::ToggleInlayHints, cx) + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert!( + cached_hint_labels(editor).is_empty(), + "Should clear hints after 2nd toggle" + ); + assert!(visible_hint_labels(editor, cx).is_empty()); + assert_eq!(editor.inlay_hint_cache().version, 2); + }) + .unwrap(); + + update_test_language_settings(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettings { + enabled: true, + edit_debounce_ms: 0, + scroll_debounce_ms: 0, + show_type_hints: true, + show_parameter_hints: true, + show_other_hints: true, + }) + }); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + let expected_hints = vec!["2".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should query LSP hints for the 2nd time after enabling hints in settings" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 3); + }) + .unwrap(); + + editor + .update(cx, |editor, cx| { + editor.toggle_inlay_hints(&crate::ToggleInlayHints, cx) + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, cx| { + assert!( + cached_hint_labels(editor).is_empty(), + "Should clear hints after enabling in settings and a 3rd toggle" + ); + assert!(visible_hint_labels(editor, cx).is_empty()); + assert_eq!(editor.inlay_hint_cache().version, 4); + }) + .unwrap(); + + editor + .update(cx, |editor, cx| { + editor.toggle_inlay_hints(&crate::ToggleInlayHints, cx) + }) + .unwrap(); + cx.executor().run_until_parked(); + editor.update(cx, |editor, cx| { + let expected_hints = vec!["3".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor), + "Should query LSP hints for the 3rd time after enabling hints in settings and toggling them back on" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(editor.inlay_hint_cache().version, 5); + }).unwrap(); + } + + pub(crate) fn init_test(cx: &mut TestAppContext, f: impl Fn(&mut AllLanguageSettingsContent)) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + release_channel::init("0.0.0", cx); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + workspace::init_settings(cx); + crate::init(cx); + }); + + update_test_language_settings(cx, f); + } + + async fn prepare_test_objects( + cx: &mut TestAppContext, + ) -> (&'static str, WindowHandle, FakeLanguageServer) { + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/a", + json!({ + "main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out", + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, ["/a".as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(crate::editor_tests::rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/a/main.rs", cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + cx.executor().start_waiting(); + let fake_server = fake_servers.next().await.unwrap(); + let editor = cx.add_window(|cx| Editor::for_buffer(buffer, Some(project), cx)); + + editor + .update(cx, |editor, cx| { + assert!(cached_hint_labels(editor).is_empty()); + assert!(visible_hint_labels(editor, cx).is_empty()); + assert_eq!(editor.inlay_hint_cache().version, 0); + }) + .unwrap(); + + ("/a/main.rs", editor, fake_server) + } + + pub fn cached_hint_labels(editor: &Editor) -> Vec { + let mut labels = Vec::new(); + for (_, excerpt_hints) in &editor.inlay_hint_cache().hints { + let excerpt_hints = excerpt_hints.read(); + for id in &excerpt_hints.ordered_hints { + labels.push(excerpt_hints.hints_by_id[id].text()); + } + } + + labels.sort(); + labels + } + + pub fn visible_hint_labels(editor: &Editor, cx: &ViewContext<'_, Editor>) -> Vec { + let mut hints = editor + .visible_inlay_hints(cx) + .into_iter() + .map(|hint| hint.text.to_string()) + .collect::>(); + hints.sort(); + hints + } +} diff --git a/crates/editor/src/inline_completion_provider.rs b/crates/editor/src/inline_completion_provider.rs new file mode 100644 index 0000000..3fd9135 --- /dev/null +++ b/crates/editor/src/inline_completion_provider.rs @@ -0,0 +1,124 @@ +use crate::Direction; +use gpui::{AppContext, Model, ModelContext}; +use language::Buffer; + +pub trait InlineCompletionProvider: 'static + Sized { + fn name() -> &'static str; + fn is_enabled( + &self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &AppContext, + ) -> bool; + fn refresh( + &mut self, + buffer: Model, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut ModelContext, + ); + fn cycle( + &mut self, + buffer: Model, + cursor_position: language::Anchor, + direction: Direction, + cx: &mut ModelContext, + ); + fn accept(&mut self, cx: &mut ModelContext); + fn discard(&mut self, should_report_inline_completion_event: bool, cx: &mut ModelContext); + fn active_completion_text<'a>( + &'a self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &'a AppContext, + ) -> Option<&'a str>; +} + +pub trait InlineCompletionProviderHandle { + fn is_enabled( + &self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &AppContext, + ) -> bool; + fn refresh( + &self, + buffer: Model, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut AppContext, + ); + fn cycle( + &self, + buffer: Model, + cursor_position: language::Anchor, + direction: Direction, + cx: &mut AppContext, + ); + fn accept(&self, cx: &mut AppContext); + fn discard(&self, should_report_inline_completion_event: bool, cx: &mut AppContext); + fn active_completion_text<'a>( + &'a self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &'a AppContext, + ) -> Option<&'a str>; +} + +impl InlineCompletionProviderHandle for Model +where + T: InlineCompletionProvider, +{ + fn is_enabled( + &self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &AppContext, + ) -> bool { + self.read(cx).is_enabled(buffer, cursor_position, cx) + } + + fn refresh( + &self, + buffer: Model, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut AppContext, + ) { + self.update(cx, |this, cx| { + this.refresh(buffer, cursor_position, debounce, cx) + }) + } + + fn cycle( + &self, + buffer: Model, + cursor_position: language::Anchor, + direction: Direction, + cx: &mut AppContext, + ) { + self.update(cx, |this, cx| { + this.cycle(buffer, cursor_position, direction, cx) + }) + } + + fn accept(&self, cx: &mut AppContext) { + self.update(cx, |this, cx| this.accept(cx)) + } + + fn discard(&self, should_report_inline_completion_event: bool, cx: &mut AppContext) { + self.update(cx, |this, cx| { + this.discard(should_report_inline_completion_event, cx) + }) + } + + fn active_completion_text<'a>( + &'a self, + buffer: &Model, + cursor_position: language::Anchor, + cx: &'a AppContext, + ) -> Option<&'a str> { + self.read(cx) + .active_completion_text(buffer, cursor_position, cx) + } +} diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs new file mode 100644 index 0000000..2c10212 --- /dev/null +++ b/crates/editor/src/items.rs @@ -0,0 +1,1315 @@ +use crate::{ + editor_settings::SeedQuerySetting, persistence::DB, scroll::ScrollAnchor, Anchor, Autoscroll, + Editor, EditorEvent, EditorSettings, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, + NavigationData, SearchWithinRange, ToPoint as _, +}; +use anyhow::{anyhow, Context as _, Result}; +use collections::HashSet; +use futures::future::try_join_all; +use git::repository::GitFileStatus; +use gpui::{ + point, AnyElement, AppContext, AsyncWindowContext, Context, Entity, EntityId, EventEmitter, + IntoElement, Model, ParentElement, Pixels, SharedString, Styled, Task, View, ViewContext, + VisualContext, WeakView, WindowContext, +}; +use language::{ + proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, CharKind, OffsetRangeExt, + Point, SelectionGoal, +}; +use multi_buffer::AnchorRangeExt; +use project::{search::SearchQuery, FormatTrigger, Item as _, Project, ProjectPath}; +use rpc::proto::{self, update_view, PeerId}; +use settings::Settings; +use workspace::item::{ItemSettings, TabContentParams}; + +use std::{ + any::TypeId, + borrow::Cow, + cmp::{self, Ordering}, + iter, + ops::Range, + path::Path, + sync::Arc, +}; +use text::{BufferId, Selection}; +use theme::{Theme, ThemeSettings}; +use ui::{h_flex, prelude::*, Label}; +use util::{paths::PathExt, ResultExt, TryFutureExt}; +use workspace::item::{BreadcrumbText, FollowEvent, FollowableItemHandle}; +use workspace::{ + item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem}, + searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle}, + ItemId, ItemNavHistory, Pane, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, +}; + +pub const MAX_TAB_TITLE_LEN: usize = 24; + +impl FollowableItem for Editor { + fn remote_id(&self) -> Option { + self.remote_id + } + + fn from_state_proto( + pane: View, + workspace: View, + remote_id: ViewId, + state: &mut Option, + cx: &mut WindowContext, + ) -> Option>>> { + let project = workspace.read(cx).project().to_owned(); + let Some(proto::view::Variant::Editor(_)) = state else { + return None; + }; + let Some(proto::view::Variant::Editor(state)) = state.take() else { + unreachable!() + }; + + let client = project.read(cx).client(); + let replica_id = project.read(cx).replica_id(); + let buffer_ids = state + .excerpts + .iter() + .map(|excerpt| excerpt.buffer_id) + .collect::>(); + let buffers = project.update(cx, |project, cx| { + buffer_ids + .iter() + .map(|id| BufferId::new(*id).map(|id| project.open_buffer_by_id(id, cx))) + .collect::>>() + }); + + let pane = pane.downgrade(); + Some(cx.spawn(|mut cx| async move { + let mut buffers = futures::future::try_join_all(buffers?) + .await + .debug_assert_ok("leaders don't share views for unshared buffers")?; + + let editor = pane.update(&mut cx, |pane, cx| { + let mut editors = pane.items_of_type::(); + editors.find(|editor| { + let ids_match = editor.remote_id(&client, cx) == Some(remote_id); + let singleton_buffer_matches = state.singleton + && buffers.first() + == editor.read(cx).buffer.read(cx).as_singleton().as_ref(); + ids_match || singleton_buffer_matches + }) + })?; + + let editor = if let Some(editor) = editor { + editor + } else { + pane.update(&mut cx, |_, cx| { + let multibuffer = cx.new_model(|cx| { + let mut multibuffer; + if state.singleton && buffers.len() == 1 { + multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx) + } else { + multibuffer = + MultiBuffer::new(replica_id, project.read(cx).capability()); + let mut excerpts = state.excerpts.into_iter().peekable(); + while let Some(excerpt) = excerpts.peek() { + let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { + continue; + }; + let buffer_excerpts = iter::from_fn(|| { + let excerpt = excerpts.peek()?; + (excerpt.buffer_id == u64::from(buffer_id)) + .then(|| excerpts.next().unwrap()) + }); + let buffer = + buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id); + if let Some(buffer) = buffer { + multibuffer.push_excerpts( + buffer.clone(), + buffer_excerpts.filter_map(deserialize_excerpt_range), + cx, + ); + } + } + }; + + if let Some(title) = &state.title { + multibuffer = multibuffer.with_title(title.clone()) + } + + multibuffer + }); + + cx.new_view(|cx| { + let mut editor = + Editor::for_multibuffer(multibuffer, Some(project.clone()), cx); + editor.remote_id = Some(remote_id); + editor + }) + })? + }; + + update_editor_from_message( + editor.downgrade(), + project, + proto::update_view::Editor { + selections: state.selections, + pending_selection: state.pending_selection, + scroll_top_anchor: state.scroll_top_anchor, + scroll_x: state.scroll_x, + scroll_y: state.scroll_y, + ..Default::default() + }, + &mut cx, + ) + .await?; + + Ok(editor) + })) + } + + fn set_leader_peer_id(&mut self, leader_peer_id: Option, cx: &mut ViewContext) { + self.leader_peer_id = leader_peer_id; + if self.leader_peer_id.is_some() { + self.buffer.update(cx, |buffer, cx| { + buffer.remove_active_selections(cx); + }); + } else if self.focus_handle.is_focused(cx) { + self.buffer.update(cx, |buffer, cx| { + buffer.set_active_selections( + &self.selections.disjoint_anchors(), + self.selections.line_mode, + self.cursor_shape, + cx, + ); + }); + } + cx.notify(); + } + + fn to_state_proto(&self, cx: &WindowContext) -> Option { + let buffer = self.buffer.read(cx); + if buffer + .as_singleton() + .and_then(|buffer| buffer.read(cx).file()) + .map_or(false, |file| file.is_private()) + { + return None; + } + + let scroll_anchor = self.scroll_manager.anchor(); + let excerpts = buffer + .read(cx) + .excerpts() + .map(|(id, buffer, range)| proto::Excerpt { + id: id.to_proto(), + buffer_id: buffer.remote_id().into(), + context_start: Some(serialize_text_anchor(&range.context.start)), + context_end: Some(serialize_text_anchor(&range.context.end)), + primary_start: range + .primary + .as_ref() + .map(|range| serialize_text_anchor(&range.start)), + primary_end: range + .primary + .as_ref() + .map(|range| serialize_text_anchor(&range.end)), + }) + .collect(); + + Some(proto::view::Variant::Editor(proto::view::Editor { + singleton: buffer.is_singleton(), + title: (!buffer.is_singleton()).then(|| buffer.title(cx).into()), + excerpts, + scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor)), + scroll_x: scroll_anchor.offset.x, + scroll_y: scroll_anchor.offset.y, + selections: self + .selections + .disjoint_anchors() + .iter() + .map(serialize_selection) + .collect(), + pending_selection: self + .selections + .pending_anchor() + .as_ref() + .map(serialize_selection), + })) + } + + fn to_follow_event(event: &EditorEvent) -> Option { + match event { + EditorEvent::Edited => Some(FollowEvent::Unfollow), + EditorEvent::SelectionsChanged { local } + | EditorEvent::ScrollPositionChanged { local, .. } => { + if *local { + Some(FollowEvent::Unfollow) + } else { + None + } + } + _ => None, + } + } + + fn add_event_to_update_proto( + &self, + event: &EditorEvent, + update: &mut Option, + cx: &WindowContext, + ) -> bool { + let update = + update.get_or_insert_with(|| proto::update_view::Variant::Editor(Default::default())); + + match update { + proto::update_view::Variant::Editor(update) => match event { + EditorEvent::ExcerptsAdded { + buffer, + predecessor, + excerpts, + } => { + let buffer_id = buffer.read(cx).remote_id(); + let mut excerpts = excerpts.iter(); + if let Some((id, range)) = excerpts.next() { + update.inserted_excerpts.push(proto::ExcerptInsertion { + previous_excerpt_id: Some(predecessor.to_proto()), + excerpt: serialize_excerpt(buffer_id, id, range), + }); + update.inserted_excerpts.extend(excerpts.map(|(id, range)| { + proto::ExcerptInsertion { + previous_excerpt_id: None, + excerpt: serialize_excerpt(buffer_id, id, range), + } + })) + } + true + } + EditorEvent::ExcerptsRemoved { ids } => { + update + .deleted_excerpts + .extend(ids.iter().map(ExcerptId::to_proto)); + true + } + EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => { + let scroll_anchor = self.scroll_manager.anchor(); + update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.anchor)); + update.scroll_x = scroll_anchor.offset.x; + update.scroll_y = scroll_anchor.offset.y; + true + } + EditorEvent::SelectionsChanged { .. } => { + update.selections = self + .selections + .disjoint_anchors() + .iter() + .map(serialize_selection) + .collect(); + update.pending_selection = self + .selections + .pending_anchor() + .as_ref() + .map(serialize_selection); + true + } + _ => false, + }, + } + } + + fn apply_update_proto( + &mut self, + project: &Model, + message: update_view::Variant, + cx: &mut ViewContext, + ) -> Task> { + let update_view::Variant::Editor(message) = message; + let project = project.clone(); + cx.spawn(|this, mut cx| async move { + update_editor_from_message(this, project, message, &mut cx).await + }) + } + + fn is_project_item(&self, _cx: &WindowContext) -> bool { + true + } +} + +async fn update_editor_from_message( + this: WeakView, + project: Model, + message: proto::update_view::Editor, + cx: &mut AsyncWindowContext, +) -> Result<()> { + // Open all of the buffers of which excerpts were added to the editor. + let inserted_excerpt_buffer_ids = message + .inserted_excerpts + .iter() + .filter_map(|insertion| Some(insertion.excerpt.as_ref()?.buffer_id)) + .collect::>(); + let inserted_excerpt_buffers = project.update(cx, |project, cx| { + inserted_excerpt_buffer_ids + .into_iter() + .map(|id| BufferId::new(id).map(|id| project.open_buffer_by_id(id, cx))) + .collect::>>() + })??; + let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?; + + // Update the editor's excerpts. + this.update(cx, |editor, cx| { + editor.buffer.update(cx, |multibuffer, cx| { + let mut removed_excerpt_ids = message + .deleted_excerpts + .into_iter() + .map(ExcerptId::from_proto) + .collect::>(); + removed_excerpt_ids.sort_by({ + let multibuffer = multibuffer.read(cx); + move |a, b| a.cmp(&b, &multibuffer) + }); + + let mut insertions = message.inserted_excerpts.into_iter().peekable(); + while let Some(insertion) = insertions.next() { + let Some(excerpt) = insertion.excerpt else { + continue; + }; + let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { + continue; + }; + let buffer_id = BufferId::new(excerpt.buffer_id)?; + let Some(buffer) = project.read(cx).buffer_for_id(buffer_id) else { + continue; + }; + + let adjacent_excerpts = iter::from_fn(|| { + let insertion = insertions.peek()?; + if insertion.previous_excerpt_id.is_none() + && insertion.excerpt.as_ref()?.buffer_id == u64::from(buffer_id) + { + insertions.next()?.excerpt + } else { + None + } + }); + + multibuffer.insert_excerpts_with_ids_after( + ExcerptId::from_proto(previous_excerpt_id), + buffer, + [excerpt] + .into_iter() + .chain(adjacent_excerpts) + .filter_map(|excerpt| { + Some(( + ExcerptId::from_proto(excerpt.id), + deserialize_excerpt_range(excerpt)?, + )) + }), + cx, + ); + } + + multibuffer.remove_excerpts(removed_excerpt_ids, cx); + Result::<(), anyhow::Error>::Ok(()) + }) + })??; + + // Deserialize the editor state. + let (selections, pending_selection, scroll_top_anchor) = this.update(cx, |editor, cx| { + let buffer = editor.buffer.read(cx).read(cx); + let selections = message + .selections + .into_iter() + .filter_map(|selection| deserialize_selection(&buffer, selection)) + .collect::>(); + let pending_selection = message + .pending_selection + .and_then(|selection| deserialize_selection(&buffer, selection)); + let scroll_top_anchor = message + .scroll_top_anchor + .and_then(|anchor| deserialize_anchor(&buffer, anchor)); + anyhow::Ok((selections, pending_selection, scroll_top_anchor)) + })??; + + // Wait until the buffer has received all of the operations referenced by + // the editor's new state. + this.update(cx, |editor, cx| { + editor.buffer.update(cx, |buffer, cx| { + buffer.wait_for_anchors( + selections + .iter() + .chain(pending_selection.as_ref()) + .flat_map(|selection| [selection.start, selection.end]) + .chain(scroll_top_anchor), + cx, + ) + }) + })? + .await?; + + // Update the editor's state. + this.update(cx, |editor, cx| { + if !selections.is_empty() || pending_selection.is_some() { + editor.set_selections_from_remote(selections, pending_selection, cx); + editor.request_autoscroll_remotely(Autoscroll::newest(), cx); + } else if let Some(scroll_top_anchor) = scroll_top_anchor { + editor.set_scroll_anchor_remote( + ScrollAnchor { + anchor: scroll_top_anchor, + offset: point(message.scroll_x, message.scroll_y), + }, + cx, + ); + } + })?; + Ok(()) +} + +fn serialize_excerpt( + buffer_id: BufferId, + id: &ExcerptId, + range: &ExcerptRange, +) -> Option { + Some(proto::Excerpt { + id: id.to_proto(), + buffer_id: buffer_id.into(), + context_start: Some(serialize_text_anchor(&range.context.start)), + context_end: Some(serialize_text_anchor(&range.context.end)), + primary_start: range + .primary + .as_ref() + .map(|r| serialize_text_anchor(&r.start)), + primary_end: range + .primary + .as_ref() + .map(|r| serialize_text_anchor(&r.end)), + }) +} + +fn serialize_selection(selection: &Selection) -> proto::Selection { + proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, + } +} + +fn serialize_anchor(anchor: &Anchor) -> proto::EditorAnchor { + proto::EditorAnchor { + excerpt_id: anchor.excerpt_id.to_proto(), + anchor: Some(serialize_text_anchor(&anchor.text_anchor)), + } +} + +fn deserialize_excerpt_range(excerpt: proto::Excerpt) -> Option> { + let context = { + let start = language::proto::deserialize_anchor(excerpt.context_start?)?; + let end = language::proto::deserialize_anchor(excerpt.context_end?)?; + start..end + }; + let primary = excerpt + .primary_start + .zip(excerpt.primary_end) + .and_then(|(start, end)| { + let start = language::proto::deserialize_anchor(start)?; + let end = language::proto::deserialize_anchor(end)?; + Some(start..end) + }); + Some(ExcerptRange { context, primary }) +} + +fn deserialize_selection( + buffer: &MultiBufferSnapshot, + selection: proto::Selection, +) -> Option> { + Some(Selection { + id: selection.id as usize, + start: deserialize_anchor(buffer, selection.start?)?, + end: deserialize_anchor(buffer, selection.end?)?, + reversed: selection.reversed, + goal: SelectionGoal::None, + }) +} + +fn deserialize_anchor(buffer: &MultiBufferSnapshot, anchor: proto::EditorAnchor) -> Option { + let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id); + Some(Anchor { + excerpt_id, + text_anchor: language::proto::deserialize_anchor(anchor.anchor?)?, + buffer_id: buffer.buffer_id_for_excerpt(excerpt_id), + }) +} + +impl Item for Editor { + type Event = EditorEvent; + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { + if let Ok(data) = data.downcast::() { + let newest_selection = self.selections.newest::(cx); + let buffer = self.buffer.read(cx).read(cx); + let offset = if buffer.can_resolve(&data.cursor_anchor) { + data.cursor_anchor.to_point(&buffer) + } else { + buffer.clip_point(data.cursor_position, Bias::Left) + }; + + let mut scroll_anchor = data.scroll_anchor; + if !buffer.can_resolve(&scroll_anchor.anchor) { + scroll_anchor.anchor = buffer.anchor_before( + buffer.clip_point(Point::new(data.scroll_top_row, 0), Bias::Left), + ); + } + + drop(buffer); + + if newest_selection.head() == offset { + false + } else { + let nav_history = self.nav_history.take(); + self.set_scroll_anchor(scroll_anchor, cx); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges([offset..offset]) + }); + self.nav_history = nav_history; + true + } + } else { + false + } + } + + fn tab_tooltip_text(&self, cx: &AppContext) -> Option { + let file_path = self + .buffer() + .read(cx) + .as_singleton()? + .read(cx) + .file() + .and_then(|f| f.as_local())? + .abs_path(cx); + + let file_path = file_path.compact().to_string_lossy().to_string(); + + Some(file_path.into()) + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + None + } + + fn tab_description(&self, detail: usize, cx: &AppContext) -> Option { + let path = path_for_buffer(&self.buffer, detail, true, cx)?; + Some(path.to_string_lossy().to_string().into()) + } + + fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { + let label_color = if ItemSettings::get_global(cx).git_status { + self.buffer() + .read(cx) + .as_singleton() + .and_then(|buffer| buffer.read(cx).project_path(cx)) + .and_then(|path| self.project.as_ref()?.read(cx).entry_for_path(&path, cx)) + .map(|entry| { + entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected) + }) + .unwrap_or_else(|| entry_label_color(params.selected)) + } else { + entry_label_color(params.selected) + }; + + let description = params.detail.and_then(|detail| { + let path = path_for_buffer(&self.buffer, detail, false, cx)?; + let description = path.to_string_lossy(); + let description = description.trim(); + + if description.is_empty() { + return None; + } + + Some(util::truncate_and_trailoff(&description, MAX_TAB_TITLE_LEN)) + }); + + h_flex() + .gap_2() + .child( + Label::new(self.title(cx).to_string()) + .color(label_color) + .italic(params.preview), + ) + .when_some(description, |this, description| { + this.child( + Label::new(description) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + }) + .into_any_element() + } + + fn for_each_project_item( + &self, + cx: &AppContext, + f: &mut dyn FnMut(EntityId, &dyn project::Item), + ) { + self.buffer + .read(cx) + .for_each_buffer(|buffer| f(buffer.entity_id(), buffer.read(cx))); + } + + fn is_singleton(&self, cx: &AppContext) -> bool { + self.buffer.read(cx).is_singleton() + } + + fn clone_on_split( + &self, + _workspace_id: WorkspaceId, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(|cx| self.clone(cx))) + } + + fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext) { + self.nav_history = Some(history); + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + let selection = self.selections.newest_anchor(); + self.push_to_nav_history(selection.head(), None, cx); + } + + fn workspace_deactivated(&mut self, cx: &mut ViewContext) { + self.hide_hovered_link(cx); + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.buffer().read(cx).read(cx).is_dirty() + } + + fn has_conflict(&self, cx: &AppContext) -> bool { + self.buffer().read(cx).read(cx).has_conflict() + } + + fn can_save(&self, cx: &AppContext) -> bool { + let buffer = &self.buffer().read(cx); + if let Some(buffer) = buffer.as_singleton() { + buffer.read(cx).project_path(cx).is_some() + } else { + true + } + } + + fn save( + &mut self, + format: bool, + project: Model, + cx: &mut ViewContext, + ) -> Task> { + self.report_editor_event("save", None, cx); + let buffers = self.buffer().clone().read(cx).all_buffers(); + cx.spawn(|this, mut cx| async move { + if format { + this.update(&mut cx, |editor, cx| { + editor.perform_format(project.clone(), FormatTrigger::Save, cx) + })? + .await?; + } + + if buffers.len() == 1 { + // Apply full save routine for singleton buffers, to allow to `touch` the file via the editor. + project + .update(&mut cx, |project, cx| project.save_buffers(buffers, cx))? + .await?; + } else { + // For multi-buffers, only format and save the buffers with changes. + // For clean buffers, we simulate saving by calling `Buffer::did_save`, + // so that language servers or other downstream listeners of save events get notified. + let (dirty_buffers, clean_buffers) = buffers.into_iter().partition(|buffer| { + buffer + .update(&mut cx, |buffer, _| { + buffer.is_dirty() || buffer.has_conflict() + }) + .unwrap_or(false) + }); + + project + .update(&mut cx, |project, cx| { + project.save_buffers(dirty_buffers, cx) + })? + .await?; + for buffer in clean_buffers { + buffer + .update(&mut cx, |buffer, cx| { + let version = buffer.saved_version().clone(); + let mtime = buffer.saved_mtime(); + buffer.did_save(version, mtime, cx); + }) + .ok(); + } + } + + Ok(()) + }) + } + + fn save_as( + &mut self, + project: Model, + path: ProjectPath, + cx: &mut ViewContext, + ) -> Task> { + let buffer = self + .buffer() + .read(cx) + .as_singleton() + .expect("cannot call save_as on an excerpt list"); + + let file_extension = path + .path + .extension() + .map(|a| a.to_string_lossy().to_string()); + self.report_editor_event("save", file_extension, cx); + + project.update(cx, |project, cx| project.save_buffer_as(buffer, path, cx)) + } + + fn reload(&mut self, project: Model, cx: &mut ViewContext) -> Task> { + let buffer = self.buffer().clone(); + let buffers = self.buffer.read(cx).all_buffers(); + let reload_buffers = + project.update(cx, |project, cx| project.reload_buffers(buffers, true, cx)); + cx.spawn(|this, mut cx| async move { + let transaction = reload_buffers.log_err().await; + this.update(&mut cx, |editor, cx| { + editor.request_autoscroll(Autoscroll::fit(), cx) + })?; + buffer + .update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !buffer.is_singleton() { + buffer.push_transaction(&transaction.0, cx); + } + } + }) + .ok(); + Ok(()) + }) + } + + fn as_searchable(&self, handle: &View) -> Option> { + Some(Box::new(handle.clone())) + } + + fn pixel_position_of_cursor(&self, _: &AppContext) -> Option> { + self.pixel_position_of_newest_cursor + } + + fn breadcrumb_location(&self) -> ToolbarItemLocation { + if self.show_breadcrumbs { + ToolbarItemLocation::PrimaryLeft + } else { + ToolbarItemLocation::Hidden + } + } + + fn breadcrumbs(&self, variant: &Theme, cx: &AppContext) -> Option> { + let cursor = self.selections.newest_anchor().head(); + let multibuffer = &self.buffer().read(cx); + let (buffer_id, symbols) = + multibuffer.symbols_containing(cursor, Some(&variant.syntax()), cx)?; + let buffer = multibuffer.buffer(buffer_id)?; + + let buffer = buffer.read(cx); + let filename = buffer + .snapshot() + .resolve_file_path( + cx, + self.project + .as_ref() + .map(|project| project.read(cx).visible_worktrees(cx).count() > 1) + .unwrap_or_default(), + ) + .map(|path| path.to_string_lossy().to_string()) + .unwrap_or_else(|| "untitled".to_string()); + + let settings = ThemeSettings::get_global(cx); + + let mut breadcrumbs = vec![BreadcrumbText { + text: filename, + highlights: None, + font: Some(settings.buffer_font.clone()), + }]; + + breadcrumbs.extend(symbols.into_iter().map(|symbol| BreadcrumbText { + text: symbol.text, + highlights: Some(symbol.highlight_ranges), + font: Some(settings.buffer_font.clone()), + })); + Some(breadcrumbs) + } + + fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + let workspace_id = workspace.database_id(); + let item_id = cx.view().item_id().as_u64() as ItemId; + self.workspace = Some((workspace.weak_handle(), workspace.database_id())); + + fn serialize( + buffer: Model, + workspace_id: WorkspaceId, + item_id: ItemId, + cx: &mut AppContext, + ) { + if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) { + let path = file.abs_path(cx); + + cx.background_executor() + .spawn(async move { + DB.save_path(item_id, workspace_id, path.clone()) + .await + .log_err() + }) + .detach(); + } + } + + if let Some(buffer) = self.buffer().read(cx).as_singleton() { + serialize(buffer.clone(), workspace_id, item_id, cx); + + cx.subscribe(&buffer, |this, buffer, event, cx| { + if let Some((_, workspace_id)) = this.workspace.as_ref() { + if let language::Event::FileHandleChanged = event { + serialize( + buffer, + *workspace_id, + cx.view().item_id().as_u64() as ItemId, + cx, + ); + } + } + }) + .detach(); + } + } + + fn serialized_item_kind() -> Option<&'static str> { + Some("Editor") + } + + fn to_item_events(event: &EditorEvent, mut f: impl FnMut(ItemEvent)) { + match event { + EditorEvent::Closed => f(ItemEvent::CloseItem), + + EditorEvent::Saved | EditorEvent::TitleChanged => { + f(ItemEvent::UpdateTab); + f(ItemEvent::UpdateBreadcrumbs); + } + + EditorEvent::Reparsed => { + f(ItemEvent::UpdateBreadcrumbs); + } + + EditorEvent::SelectionsChanged { local } if *local => { + f(ItemEvent::UpdateBreadcrumbs); + } + + EditorEvent::DirtyChanged => { + f(ItemEvent::UpdateTab); + } + + EditorEvent::BufferEdited => { + f(ItemEvent::Edit); + f(ItemEvent::UpdateBreadcrumbs); + } + + EditorEvent::ExcerptsAdded { .. } | EditorEvent::ExcerptsRemoved { .. } => { + f(ItemEvent::Edit); + } + + _ => {} + } + } + + fn deserialize( + project: Model, + _workspace: WeakView, + workspace_id: workspace::WorkspaceId, + item_id: ItemId, + cx: &mut ViewContext, + ) -> Task>> { + let project_item: Result<_> = project.update(cx, |project, cx| { + // Look up the path with this key associated, create a self with that path + let path = DB + .get_path(item_id, workspace_id)? + .context("No path stored for this editor")?; + + let (worktree, path) = project + .find_local_worktree(&path, cx) + .with_context(|| format!("No worktree for path: {path:?}"))?; + let project_path = ProjectPath { + worktree_id: worktree.read(cx).id(), + path: path.into(), + }; + + Ok(project.open_path(project_path, cx)) + }); + + project_item + .map(|project_item| { + cx.spawn(|pane, mut cx| async move { + let (_, project_item) = project_item.await?; + let buffer = project_item + .downcast::() + .map_err(|_| anyhow!("Project item at stored path was not a buffer"))?; + pane.update(&mut cx, |_, cx| { + cx.new_view(|cx| { + let mut editor = Editor::for_buffer(buffer, Some(project), cx); + + editor.read_scroll_position_from_db(item_id, workspace_id, cx); + editor + }) + }) + }) + }) + .unwrap_or_else(|error| Task::ready(Err(error))) + } +} + +impl ProjectItem for Editor { + type Item = Buffer; + + fn for_project_item( + project: Model, + buffer: Model, + cx: &mut ViewContext, + ) -> Self { + Self::for_buffer(buffer, Some(project), cx) + } +} + +impl EventEmitter for Editor {} + +pub(crate) enum BufferSearchHighlights {} +impl SearchableItem for Editor { + type Match = Range; + + fn clear_matches(&mut self, cx: &mut ViewContext) { + self.clear_background_highlights::(cx); + } + + fn update_matches(&mut self, matches: &[Range], cx: &mut ViewContext) { + self.highlight_background::( + matches, + |theme| theme.search_match_background, + cx, + ); + } + + fn has_filtered_search_ranges(&mut self) -> bool { + self.has_background_highlights::() + } + + fn query_suggestion(&mut self, cx: &mut ViewContext) -> String { + let setting = EditorSettings::get_global(cx).seed_search_query_from_cursor; + let snapshot = &self.snapshot(cx).buffer_snapshot; + let selection = self.selections.newest::(cx); + + match setting { + SeedQuerySetting::Never => String::new(), + SeedQuerySetting::Selection | SeedQuerySetting::Always if !selection.is_empty() => { + snapshot + .text_for_range(selection.start..selection.end) + .collect() + } + SeedQuerySetting::Selection => String::new(), + SeedQuerySetting::Always => { + let (range, kind) = snapshot.surrounding_word(selection.start); + if kind == Some(CharKind::Word) { + let text: String = snapshot.text_for_range(range).collect(); + if !text.trim().is_empty() { + return text; + } + } + String::new() + } + } + } + + fn activate_match( + &mut self, + index: usize, + matches: &[Range], + cx: &mut ViewContext, + ) { + self.unfold_ranges([matches[index].clone()], false, true, cx); + let range = self.range_for_match(&matches[index]); + self.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges([range]); + }) + } + + fn select_matches(&mut self, matches: &[Self::Match], cx: &mut ViewContext) { + self.unfold_ranges(matches.to_vec(), false, false, cx); + let mut ranges = Vec::new(); + for m in matches { + ranges.push(self.range_for_match(&m)) + } + self.change_selections(None, cx, |s| s.select_ranges(ranges)); + } + fn replace( + &mut self, + identifier: &Self::Match, + query: &SearchQuery, + cx: &mut ViewContext, + ) { + let text = self.buffer.read(cx); + let text = text.snapshot(cx); + let text = text.text_for_range(identifier.clone()).collect::>(); + let text: Cow<_> = if text.len() == 1 { + text.first().cloned().unwrap().into() + } else { + let joined_chunks = text.join(""); + joined_chunks.into() + }; + + if let Some(replacement) = query.replacement_for(&text) { + self.transact(cx, |this, cx| { + this.edit([(identifier.clone(), Arc::from(&*replacement))], cx); + }); + } + } + fn match_index_for_direction( + &mut self, + matches: &[Range], + current_index: usize, + direction: Direction, + count: usize, + cx: &mut ViewContext, + ) -> usize { + let buffer = self.buffer().read(cx).snapshot(cx); + let current_index_position = if self.selections.disjoint_anchors().len() == 1 { + self.selections.newest_anchor().head() + } else { + matches[current_index].start + }; + + let mut count = count % matches.len(); + if count == 0 { + return current_index; + } + match direction { + Direction::Next => { + if matches[current_index] + .start + .cmp(¤t_index_position, &buffer) + .is_gt() + { + count = count - 1 + } + + (current_index + count) % matches.len() + } + Direction::Prev => { + if matches[current_index] + .end + .cmp(¤t_index_position, &buffer) + .is_lt() + { + count = count - 1; + } + + if current_index >= count { + current_index - count + } else { + matches.len() - (count - current_index) + } + } + } + } + + fn find_matches( + &mut self, + query: Arc, + cx: &mut ViewContext, + ) -> Task>> { + let buffer = self.buffer().read(cx).snapshot(cx); + let search_within_ranges = self + .background_highlights + .get(&TypeId::of::()) + .map(|(_color, ranges)| { + ranges + .iter() + .map(|range| range.to_offset(&buffer)) + .collect::>() + }); + cx.background_executor().spawn(async move { + let mut ranges = Vec::new(); + if let Some((_, _, excerpt_buffer)) = buffer.as_singleton() { + if let Some(search_within_ranges) = search_within_ranges { + for range in search_within_ranges { + let offset = range.start; + ranges.extend( + query + .search(excerpt_buffer, Some(range)) + .await + .into_iter() + .map(|range| { + buffer.anchor_after(range.start + offset) + ..buffer.anchor_before(range.end + offset) + }), + ); + } + } else { + ranges.extend(query.search(excerpt_buffer, None).await.into_iter().map( + |range| buffer.anchor_after(range.start)..buffer.anchor_before(range.end), + )); + } + } else { + for excerpt in buffer.excerpt_boundaries_in_range(0..buffer.len()) { + let excerpt_range = excerpt.range.context.to_offset(&excerpt.buffer); + ranges.extend( + query + .search(&excerpt.buffer, Some(excerpt_range.clone())) + .await + .into_iter() + .map(|range| { + let start = excerpt + .buffer + .anchor_after(excerpt_range.start + range.start); + let end = excerpt + .buffer + .anchor_before(excerpt_range.start + range.end); + buffer.anchor_in_excerpt(excerpt.id, start).unwrap() + ..buffer.anchor_in_excerpt(excerpt.id, end).unwrap() + }), + ); + } + } + ranges + }) + } + + fn active_match_index( + &mut self, + matches: &[Range], + cx: &mut ViewContext, + ) -> Option { + active_match_index( + matches, + &self.selections.newest_anchor().head(), + &self.buffer().read(cx).snapshot(cx), + ) + } + + fn search_bar_visibility_changed(&mut self, _visible: bool, _cx: &mut ViewContext) { + self.expect_bounds_change = self.last_bounds; + } +} + +pub fn active_match_index( + ranges: &[Range], + cursor: &Anchor, + buffer: &MultiBufferSnapshot, +) -> Option { + if ranges.is_empty() { + None + } else { + match ranges.binary_search_by(|probe| { + if probe.end.cmp(cursor, buffer).is_lt() { + Ordering::Less + } else if probe.start.cmp(cursor, buffer).is_gt() { + Ordering::Greater + } else { + Ordering::Equal + } + }) { + Ok(i) | Err(i) => Some(cmp::min(i, ranges.len() - 1)), + } + } +} + +pub fn entry_label_color(selected: bool) -> Color { + if selected { + Color::Default + } else { + Color::Muted + } +} + +pub fn entry_git_aware_label_color( + git_status: Option, + ignored: bool, + selected: bool, +) -> Color { + if ignored { + Color::Ignored + } else { + match git_status { + Some(GitFileStatus::Added) => Color::Created, + Some(GitFileStatus::Modified) => Color::Modified, + Some(GitFileStatus::Conflict) => Color::Conflict, + None => entry_label_color(selected), + } + } +} + +fn path_for_buffer<'a>( + buffer: &Model, + height: usize, + include_filename: bool, + cx: &'a AppContext, +) -> Option> { + let file = buffer.read(cx).as_singleton()?.read(cx).file()?; + path_for_file(file.as_ref(), height, include_filename, cx) +} + +fn path_for_file<'a>( + file: &'a dyn language::File, + mut height: usize, + include_filename: bool, + cx: &'a AppContext, +) -> Option> { + // Ensure we always render at least the filename. + height += 1; + + let mut prefix = file.path().as_ref(); + while height > 0 { + if let Some(parent) = prefix.parent() { + prefix = parent; + height -= 1; + } else { + break; + } + } + + // Here we could have just always used `full_path`, but that is very + // allocation-heavy and so we try to use a `Cow` if we haven't + // traversed all the way up to the worktree's root. + if height > 0 { + let full_path = file.full_path(cx); + if include_filename { + Some(full_path.into()) + } else { + Some(full_path.parent()?.to_path_buf().into()) + } + } else { + let mut path = file.path().strip_prefix(prefix).ok()?; + if !include_filename { + path = path.parent()?; + } + Some(path.into()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::AppContext; + use language::TestFile; + use std::path::Path; + + #[gpui::test] + fn test_path_for_file(cx: &mut AppContext) { + let file = TestFile { + path: Path::new("").into(), + root_name: String::new(), + }; + assert_eq!(path_for_file(&file, 0, false, cx), None); + } +} diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs new file mode 100644 index 0000000..09cefc0 --- /dev/null +++ b/crates/editor/src/mouse_context_menu.rs @@ -0,0 +1,145 @@ +use crate::{ + Copy, Cut, DisplayPoint, Editor, EditorMode, FindAllReferences, GoToDefinition, + GoToImplementation, GoToTypeDefinition, Paste, Rename, RevealInFinder, SelectMode, + ToggleCodeActions, +}; +use gpui::{DismissEvent, Pixels, Point, Subscription, View, ViewContext}; +use workspace::OpenInTerminal; + +pub struct MouseContextMenu { + pub(crate) position: Point, + pub(crate) context_menu: View, + _subscription: Subscription, +} + +impl MouseContextMenu { + pub(crate) fn new( + position: Point, + context_menu: View, + cx: &mut ViewContext, + ) -> Self { + let context_menu_focus = context_menu.focus_handle(cx); + cx.focus(&context_menu_focus); + + let _subscription = + cx.subscribe(&context_menu, move |this, _, _event: &DismissEvent, cx| { + this.mouse_context_menu.take(); + if context_menu_focus.contains_focused(cx) { + this.focus(cx); + } + }); + + Self { + position, + context_menu, + _subscription, + } + } +} + +pub fn deploy_context_menu( + editor: &mut Editor, + position: Point, + point: DisplayPoint, + cx: &mut ViewContext, +) { + if !editor.is_focused(cx) { + editor.focus(cx); + } + + // Don't show context menu for inline editors + if editor.mode() != EditorMode::Full { + return; + } + + let context_menu = if let Some(custom) = editor.custom_context_menu.take() { + let menu = custom(editor, point, cx); + editor.custom_context_menu = Some(custom); + if menu.is_none() { + return; + } + menu.unwrap() + } else { + // Don't show the context menu if there isn't a project associated with this editor + if editor.project.is_none() { + return; + } + + // Move the cursor to the clicked location so that dispatched actions make sense + editor.change_selections(None, cx, |s| { + s.clear_disjoint(); + s.set_pending_display_range(point..point, SelectMode::Character); + }); + + let focus = cx.focused(); + ui::ContextMenu::build(cx, |menu, _cx| { + let builder = menu + .action("Rename Symbol", Box::new(Rename)) + .action("Go to Definition", Box::new(GoToDefinition)) + .action("Go to Type Definition", Box::new(GoToTypeDefinition)) + .action("Go to Implementation", Box::new(GoToImplementation)) + .action("Find All References", Box::new(FindAllReferences)) + .action( + "Code Actions", + Box::new(ToggleCodeActions { + deployed_from_indicator: None, + }), + ) + .separator() + .action("Cut", Box::new(Cut)) + .action("Copy", Box::new(Copy)) + .action("Paste", Box::new(Paste)) + .separator() + .action("Reveal in Finder", Box::new(RevealInFinder)) + .action("Open in Terminal", Box::new(OpenInTerminal)); + match focus { + Some(focus) => builder.context(focus), + None => builder, + } + }) + }; + let mouse_context_menu = MouseContextMenu::new(position, context_menu, cx); + editor.mouse_context_menu = Some(mouse_context_menu); + cx.notify(); +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext}; + use indoc::indoc; + + #[gpui::test] + async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn teˇst() { + do_work(); + } + "}); + let point = cx.display_point(indoc! {" + fn test() { + do_wˇork(); + } + "}); + cx.editor(|editor, _app| assert!(editor.mouse_context_menu.is_none())); + cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx)); + + cx.assert_editor_state(indoc! {" + fn test() { + do_wˇork(); + } + "}); + cx.editor(|editor, _app| assert!(editor.mouse_context_menu.is_some())); + } +} diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs new file mode 100644 index 0000000..d750b70 --- /dev/null +++ b/crates/editor/src/movement.rs @@ -0,0 +1,1044 @@ +//! Movement module contains helper functions for calculating intended position +//! in editor given a given motion (e.g. it handles converting a "move left" command into coordinates in editor). It is exposed mostly for use by vim crate. + +use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint}; +use crate::{ + char_kind, scroll::ScrollAnchor, CharKind, DisplayRow, EditorStyle, RowExt, ToOffset, ToPoint, +}; +use gpui::{px, Pixels, WindowTextSystem}; +use language::Point; +use multi_buffer::{MultiBufferRow, MultiBufferSnapshot}; +use serde::Deserialize; + +use std::{ops::Range, sync::Arc}; + +/// Defines search strategy for items in `movement` module. +/// `FindRange::SingeLine` only looks for a match on a single line at a time, whereas +/// `FindRange::MultiLine` keeps going until the end of a string. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize)] +pub enum FindRange { + SingleLine, + MultiLine, +} + +/// TextLayoutDetails encompasses everything we need to move vertically +/// taking into account variable width characters. +pub struct TextLayoutDetails { + pub(crate) text_system: Arc, + pub(crate) editor_style: EditorStyle, + pub(crate) rem_size: Pixels, + pub scroll_anchor: ScrollAnchor, + pub visible_rows: Option, + pub vertical_scroll_margin: f32, +} + +/// Returns a column to the left of the current point, wrapping +/// to the previous line if that point is at the start of line. +pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint { + if point.column() > 0 { + *point.column_mut() -= 1; + } else if point.row().0 > 0 { + *point.row_mut() -= 1; + *point.column_mut() = map.line_len(point.row()); + } + map.clip_point(point, Bias::Left) +} + +/// Returns a column to the left of the current point, doing nothing if +/// that point is already at the start of line. +pub fn saturating_left(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint { + if point.column() > 0 { + *point.column_mut() -= 1; + } else if point.column() == 0 { + // If the current sofr_wrap mode is used, the column corresponding to the display is 0, + // which does not necessarily mean that the actual beginning of a paragraph + if map.display_point_to_fold_point(point, Bias::Left).column() > 0 { + return left(map, point); + } + } + map.clip_point(point, Bias::Left) +} + +/// Returns a column to the right of the current point, doing nothing +// if that point is at the end of the line. +pub fn right(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint { + if point.column() < map.line_len(point.row()) { + *point.column_mut() += 1; + } else if point.row() < map.max_point().row() { + *point.row_mut() += 1; + *point.column_mut() = 0; + } + map.clip_point(point, Bias::Right) +} + +/// Returns a column to the right of the current point, not performing any wrapping +/// if that point is already at the end of line. +pub fn saturating_right(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint { + *point.column_mut() += 1; + map.clip_point(point, Bias::Right) +} + +/// Returns a display point for the preceding displayed line (which might be a soft-wrapped line). +pub fn up( + map: &DisplaySnapshot, + start: DisplayPoint, + goal: SelectionGoal, + preserve_column_at_start: bool, + text_layout_details: &TextLayoutDetails, +) -> (DisplayPoint, SelectionGoal) { + up_by_rows( + map, + start, + 1, + goal, + preserve_column_at_start, + text_layout_details, + ) +} + +/// Returns a display point for the next displayed line (which might be a soft-wrapped line). +pub fn down( + map: &DisplaySnapshot, + start: DisplayPoint, + goal: SelectionGoal, + preserve_column_at_end: bool, + text_layout_details: &TextLayoutDetails, +) -> (DisplayPoint, SelectionGoal) { + down_by_rows( + map, + start, + 1, + goal, + preserve_column_at_end, + text_layout_details, + ) +} + +pub(crate) fn up_by_rows( + map: &DisplaySnapshot, + start: DisplayPoint, + row_count: u32, + goal: SelectionGoal, + preserve_column_at_start: bool, + text_layout_details: &TextLayoutDetails, +) -> (DisplayPoint, SelectionGoal) { + let mut goal_x = match goal { + SelectionGoal::HorizontalPosition(x) => x.into(), + SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(), + SelectionGoal::HorizontalRange { end, .. } => end.into(), + _ => map.x_for_display_point(start, text_layout_details), + }; + + let prev_row = DisplayRow(start.row().0.saturating_sub(row_count)); + let mut point = map.clip_point( + DisplayPoint::new(prev_row, map.line_len(prev_row)), + Bias::Left, + ); + if point.row() < start.row() { + *point.column_mut() = map.display_column_for_x(point.row(), goal_x, text_layout_details) + } else if preserve_column_at_start { + return (start, goal); + } else { + point = DisplayPoint::new(DisplayRow(0), 0); + goal_x = px(0.); + } + + let mut clipped_point = map.clip_point(point, Bias::Left); + if clipped_point.row() < point.row() { + clipped_point = map.clip_point(point, Bias::Right); + } + ( + clipped_point, + SelectionGoal::HorizontalPosition(goal_x.into()), + ) +} + +pub(crate) fn down_by_rows( + map: &DisplaySnapshot, + start: DisplayPoint, + row_count: u32, + goal: SelectionGoal, + preserve_column_at_end: bool, + text_layout_details: &TextLayoutDetails, +) -> (DisplayPoint, SelectionGoal) { + let mut goal_x = match goal { + SelectionGoal::HorizontalPosition(x) => x.into(), + SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(), + SelectionGoal::HorizontalRange { end, .. } => end.into(), + _ => map.x_for_display_point(start, text_layout_details), + }; + + let new_row = DisplayRow(start.row().0 + row_count); + let mut point = map.clip_point(DisplayPoint::new(new_row, 0), Bias::Right); + if point.row() > start.row() { + *point.column_mut() = map.display_column_for_x(point.row(), goal_x, text_layout_details) + } else if preserve_column_at_end { + return (start, goal); + } else { + point = map.max_point(); + goal_x = map.x_for_display_point(point, text_layout_details) + } + + let mut clipped_point = map.clip_point(point, Bias::Right); + if clipped_point.row() > point.row() { + clipped_point = map.clip_point(point, Bias::Left); + } + ( + clipped_point, + SelectionGoal::HorizontalPosition(goal_x.into()), + ) +} + +/// Returns a position of the start of line. +/// If `stop_at_soft_boundaries` is true, the returned position is that of the +/// displayed line (e.g. it could actually be in the middle of a text line if that line is soft-wrapped). +/// Otherwise it's always going to be the start of a logical line. +pub fn line_beginning( + map: &DisplaySnapshot, + display_point: DisplayPoint, + stop_at_soft_boundaries: bool, +) -> DisplayPoint { + let point = display_point.to_point(map); + let soft_line_start = map.clip_point(DisplayPoint::new(display_point.row(), 0), Bias::Right); + let line_start = map.prev_line_boundary(point).1; + + if stop_at_soft_boundaries && display_point != soft_line_start { + soft_line_start + } else { + line_start + } +} + +/// Returns the last indented position on a given line. +/// If `stop_at_soft_boundaries` is true, the returned [`DisplayPoint`] is that of a +/// displayed line (e.g. if there's soft wrap it's gonna be returned), +/// otherwise it's always going to be a start of a logical line. +pub fn indented_line_beginning( + map: &DisplaySnapshot, + display_point: DisplayPoint, + stop_at_soft_boundaries: bool, +) -> DisplayPoint { + let point = display_point.to_point(map); + let soft_line_start = map.clip_point(DisplayPoint::new(display_point.row(), 0), Bias::Right); + let indent_start = Point::new( + point.row, + map.buffer_snapshot + .indent_size_for_line(MultiBufferRow(point.row)) + .len, + ) + .to_display_point(map); + let line_start = map.prev_line_boundary(point).1; + + if stop_at_soft_boundaries && soft_line_start > indent_start && display_point != soft_line_start + { + soft_line_start + } else if stop_at_soft_boundaries && display_point != indent_start { + indent_start + } else { + line_start + } +} + +/// Returns a position of the end of line. + +/// If `stop_at_soft_boundaries` is true, the returned position is that of the +/// displayed line (e.g. it could actually be in the middle of a text line if that line is soft-wrapped). +/// Otherwise it's always going to be the end of a logical line. +pub fn line_end( + map: &DisplaySnapshot, + display_point: DisplayPoint, + stop_at_soft_boundaries: bool, +) -> DisplayPoint { + let soft_line_end = map.clip_point( + DisplayPoint::new(display_point.row(), map.line_len(display_point.row())), + Bias::Left, + ); + if stop_at_soft_boundaries && display_point != soft_line_end { + soft_line_end + } else { + map.next_line_boundary(display_point.to_point(map)).1 + } +} + +/// Returns a position of the previous word boundary, where a word character is defined as either +/// uppercase letter, lowercase letter, '_' character or language-specific word character (like '-' in CSS). +pub fn previous_word_start(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint { + let raw_point = point.to_point(map); + let scope = map.buffer_snapshot.language_scope_at(raw_point); + + find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| { + (char_kind(&scope, left) != char_kind(&scope, right) && !right.is_whitespace()) + || left == '\n' + }) +} + +/// Returns a position of the previous subword boundary, where a subword is defined as a run of +/// word characters of the same "subkind" - where subcharacter kinds are '_' character, +/// lowerspace characters and uppercase characters. +pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint { + let raw_point = point.to_point(map); + let scope = map.buffer_snapshot.language_scope_at(raw_point); + + find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| { + let is_word_start = + char_kind(&scope, left) != char_kind(&scope, right) && !right.is_whitespace(); + let is_subword_start = + left == '_' && right != '_' || left.is_lowercase() && right.is_uppercase(); + is_word_start || is_subword_start || left == '\n' + }) +} + +/// Returns a position of the next word boundary, where a word character is defined as either +/// uppercase letter, lowercase letter, '_' character or language-specific word character (like '-' in CSS). +pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint { + let raw_point = point.to_point(map); + let scope = map.buffer_snapshot.language_scope_at(raw_point); + + find_boundary(map, point, FindRange::MultiLine, |left, right| { + (char_kind(&scope, left) != char_kind(&scope, right) && !left.is_whitespace()) + || right == '\n' + }) +} + +/// Returns a position of the next subword boundary, where a subword is defined as a run of +/// word characters of the same "subkind" - where subcharacter kinds are '_' character, +/// lowerspace characters and uppercase characters. +pub fn next_subword_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint { + let raw_point = point.to_point(map); + let scope = map.buffer_snapshot.language_scope_at(raw_point); + + find_boundary(map, point, FindRange::MultiLine, |left, right| { + let is_word_end = + (char_kind(&scope, left) != char_kind(&scope, right)) && !left.is_whitespace(); + let is_subword_end = + left != '_' && right == '_' || left.is_lowercase() && right.is_uppercase(); + is_word_end || is_subword_end || right == '\n' + }) +} + +/// Returns a position of the start of the current paragraph, where a paragraph +/// is defined as a run of non-blank lines. +pub fn start_of_paragraph( + map: &DisplaySnapshot, + display_point: DisplayPoint, + mut count: usize, +) -> DisplayPoint { + let point = display_point.to_point(map); + if point.row == 0 { + return DisplayPoint::zero(); + } + + let mut found_non_blank_line = false; + for row in (0..point.row + 1).rev() { + let blank = map.buffer_snapshot.is_line_blank(MultiBufferRow(row)); + if found_non_blank_line && blank { + if count <= 1 { + return Point::new(row, 0).to_display_point(map); + } + count -= 1; + found_non_blank_line = false; + } + + found_non_blank_line |= !blank; + } + + DisplayPoint::zero() +} + +/// Returns a position of the end of the current paragraph, where a paragraph +/// is defined as a run of non-blank lines. +pub fn end_of_paragraph( + map: &DisplaySnapshot, + display_point: DisplayPoint, + mut count: usize, +) -> DisplayPoint { + let point = display_point.to_point(map); + if point.row == map.max_buffer_row().0 { + return map.max_point(); + } + + let mut found_non_blank_line = false; + for row in point.row..map.max_buffer_row().next_row().0 { + let blank = map.buffer_snapshot.is_line_blank(MultiBufferRow(row)); + if found_non_blank_line && blank { + if count <= 1 { + return Point::new(row, 0).to_display_point(map); + } + count -= 1; + found_non_blank_line = false; + } + + found_non_blank_line |= !blank; + } + + map.max_point() +} + +/// Scans for a boundary preceding the given start point `from` until a boundary is found, +/// indicated by the given predicate returning true. +/// The predicate is called with the character to the left and right of the candidate boundary location. +/// If FindRange::SingleLine is specified and no boundary is found before the start of the current line, the start of the current line will be returned. +pub fn find_preceding_boundary_point( + buffer_snapshot: &MultiBufferSnapshot, + from: Point, + find_range: FindRange, + mut is_boundary: impl FnMut(char, char) -> bool, +) -> Point { + let mut prev_ch = None; + let mut offset = from.to_offset(&buffer_snapshot); + + for ch in buffer_snapshot.reversed_chars_at(offset) { + if find_range == FindRange::SingleLine && ch == '\n' { + break; + } + if let Some(prev_ch) = prev_ch { + if is_boundary(ch, prev_ch) { + break; + } + } + + offset -= ch.len_utf8(); + prev_ch = Some(ch); + } + + offset.to_point(&buffer_snapshot) +} + +/// Scans for a boundary preceding the given start point `from` until a boundary is found, +/// indicated by the given predicate returning true. +/// The predicate is called with the character to the left and right of the candidate boundary location. +/// If FindRange::SingleLine is specified and no boundary is found before the start of the current line, the start of the current line will be returned. +pub fn find_preceding_boundary_display_point( + map: &DisplaySnapshot, + from: DisplayPoint, + find_range: FindRange, + is_boundary: impl FnMut(char, char) -> bool, +) -> DisplayPoint { + let result = find_preceding_boundary_point( + &map.buffer_snapshot, + from.to_point(map), + find_range, + is_boundary, + ); + map.clip_point(result.to_display_point(map), Bias::Left) +} + +/// Scans for a boundary following the given start point until a boundary is found, indicated by the +/// given predicate returning true. The predicate is called with the character to the left and right +/// of the candidate boundary location, and will be called with `\n` characters indicating the start +/// or end of a line. The function supports optionally returning the point just before the boundary +/// is found via return_point_before_boundary. +pub fn find_boundary_point( + map: &DisplaySnapshot, + from: DisplayPoint, + find_range: FindRange, + mut is_boundary: impl FnMut(char, char) -> bool, + return_point_before_boundary: bool, +) -> DisplayPoint { + let mut offset = from.to_offset(&map, Bias::Right); + let mut prev_offset = offset; + let mut prev_ch = None; + + for ch in map.buffer_snapshot.chars_at(offset) { + if find_range == FindRange::SingleLine && ch == '\n' { + break; + } + if let Some(prev_ch) = prev_ch { + if is_boundary(prev_ch, ch) { + if return_point_before_boundary { + return map.clip_point(prev_offset.to_display_point(map), Bias::Right); + } else { + break; + } + } + } + prev_offset = offset; + offset += ch.len_utf8(); + prev_ch = Some(ch); + } + map.clip_point(offset.to_display_point(map), Bias::Right) +} + +pub fn find_boundary( + map: &DisplaySnapshot, + from: DisplayPoint, + find_range: FindRange, + is_boundary: impl FnMut(char, char) -> bool, +) -> DisplayPoint { + return find_boundary_point(map, from, find_range, is_boundary, false); +} + +pub fn find_boundary_exclusive( + map: &DisplaySnapshot, + from: DisplayPoint, + find_range: FindRange, + is_boundary: impl FnMut(char, char) -> bool, +) -> DisplayPoint { + return find_boundary_point(map, from, find_range, is_boundary, true); +} + +/// Returns an iterator over the characters following a given offset in the [`DisplaySnapshot`]. +/// The returned value also contains a range of the start/end of a returned character in +/// the [`DisplaySnapshot`]. The offsets are relative to the start of a buffer. +pub fn chars_after( + map: &DisplaySnapshot, + mut offset: usize, +) -> impl Iterator)> + '_ { + map.buffer_snapshot.chars_at(offset).map(move |ch| { + let before = offset; + offset = offset + ch.len_utf8(); + (ch, before..offset) + }) +} + +/// Returns a reverse iterator over the characters following a given offset in the [`DisplaySnapshot`]. +/// The returned value also contains a range of the start/end of a returned character in +/// the [`DisplaySnapshot`]. The offsets are relative to the start of a buffer. +pub fn chars_before( + map: &DisplaySnapshot, + mut offset: usize, +) -> impl Iterator)> + '_ { + map.buffer_snapshot + .reversed_chars_at(offset) + .map(move |ch| { + let after = offset; + offset = offset - ch.len_utf8(); + (ch, offset..after) + }) +} + +pub(crate) fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool { + let raw_point = point.to_point(map); + let scope = map.buffer_snapshot.language_scope_at(raw_point); + let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left); + let text = &map.buffer_snapshot; + let next_char_kind = text.chars_at(ix).next().map(|c| char_kind(&scope, c)); + let prev_char_kind = text + .reversed_chars_at(ix) + .next() + .map(|c| char_kind(&scope, c)); + prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word)) +} + +pub(crate) fn surrounding_word( + map: &DisplaySnapshot, + position: DisplayPoint, +) -> Range { + let position = map + .clip_point(position, Bias::Left) + .to_offset(map, Bias::Left); + let (range, _) = map.buffer_snapshot.surrounding_word(position); + let start = range + .start + .to_point(&map.buffer_snapshot) + .to_display_point(map); + let end = range + .end + .to_point(&map.buffer_snapshot) + .to_display_point(map); + start..end +} + +/// Returns a list of lines (represented as a [`DisplayPoint`] range) contained +/// within a passed range. +/// +/// The line ranges are **always* going to be in bounds of a requested range, which means that +/// the first and the last lines might not necessarily represent the +/// full range of a logical line (as their `.start`/`.end` values are clipped to those of a passed in range). +pub fn split_display_range_by_lines( + map: &DisplaySnapshot, + range: Range, +) -> Vec> { + let mut result = Vec::new(); + + let mut start = range.start; + // Loop over all the covered rows until the one containing the range end + for row in range.start.row().0..range.end.row().0 { + let row_end_column = map.line_len(DisplayRow(row)); + let end = map.clip_point( + DisplayPoint::new(DisplayRow(row), row_end_column), + Bias::Left, + ); + if start != end { + result.push(start..end); + } + start = map.clip_point(DisplayPoint::new(DisplayRow(row + 1), 0), Bias::Left); + } + + // Add the final range from the start of the last end to the original range end. + result.push(start..range.end); + + result +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + display_map::Inlay, + test::{editor_test_context::EditorTestContext, marked_display_snapshot}, + Buffer, DisplayMap, DisplayRow, ExcerptRange, InlayId, MultiBuffer, + }; + use gpui::{font, Context as _}; + use language::Capability; + use project::Project; + use settings::SettingsStore; + use util::post_inc; + + #[gpui::test] + fn test_previous_word_start(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert(marked_text: &str, cx: &mut gpui::AppContext) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + previous_word_start(&snapshot, display_points[1]), + display_points[0] + ); + } + + assert("\nˇ ˇlorem", cx); + assert("ˇ\nˇ lorem", cx); + assert(" ˇloremˇ", cx); + assert("ˇ ˇlorem", cx); + assert(" ˇlorˇem", cx); + assert("\nlorem\nˇ ˇipsum", cx); + assert("\n\nˇ\nˇ", cx); + assert(" ˇlorem ˇipsum", cx); + assert("loremˇ-ˇipsum", cx); + assert("loremˇ-#$@ˇipsum", cx); + assert("ˇlorem_ˇipsum", cx); + assert(" ˇdefγˇ", cx); + assert(" ˇbcΔˇ", cx); + assert(" abˇ——ˇcd", cx); + } + + #[gpui::test] + fn test_previous_subword_start(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert(marked_text: &str, cx: &mut gpui::AppContext) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + previous_subword_start(&snapshot, display_points[1]), + display_points[0] + ); + } + + // Subword boundaries are respected + assert("lorem_ˇipˇsum", cx); + assert("lorem_ˇipsumˇ", cx); + assert("ˇlorem_ˇipsum", cx); + assert("lorem_ˇipsum_ˇdolor", cx); + assert("loremˇIpˇsum", cx); + assert("loremˇIpsumˇ", cx); + + // Word boundaries are still respected + assert("\nˇ ˇlorem", cx); + assert(" ˇloremˇ", cx); + assert(" ˇlorˇem", cx); + assert("\nlorem\nˇ ˇipsum", cx); + assert("\n\nˇ\nˇ", cx); + assert(" ˇlorem ˇipsum", cx); + assert("loremˇ-ˇipsum", cx); + assert("loremˇ-#$@ˇipsum", cx); + assert(" ˇdefγˇ", cx); + assert(" bcˇΔˇ", cx); + assert(" ˇbcδˇ", cx); + assert(" abˇ——ˇcd", cx); + } + + #[gpui::test] + fn test_find_preceding_boundary(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert( + marked_text: &str, + cx: &mut gpui::AppContext, + is_boundary: impl FnMut(char, char) -> bool, + ) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + find_preceding_boundary_display_point( + &snapshot, + display_points[1], + FindRange::MultiLine, + is_boundary + ), + display_points[0] + ); + } + + assert("abcˇdef\ngh\nijˇk", cx, |left, right| { + left == 'c' && right == 'd' + }); + assert("abcdef\nˇgh\nijˇk", cx, |left, right| { + left == '\n' && right == 'g' + }); + let mut line_count = 0; + assert("abcdef\nˇgh\nijˇk", cx, |left, _| { + if left == '\n' { + line_count += 1; + line_count == 2 + } else { + false + } + }); + } + + #[gpui::test] + fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) { + init_test(cx); + + let input_text = "abcdefghijklmnopqrstuvwxys"; + let font = font("Helvetica"); + let font_size = px(14.0); + let buffer = MultiBuffer::build_simple(input_text, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let display_map = + cx.new_model(|cx| DisplayMap::new(buffer, font, font_size, None, 1, 1, cx)); + + // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary + let mut id = 0; + let inlays = (0..buffer_snapshot.len()) + .flat_map(|offset| { + [ + Inlay { + id: InlayId::Suggestion(post_inc(&mut id)), + position: buffer_snapshot.anchor_at(offset, Bias::Left), + text: "test".into(), + }, + Inlay { + id: InlayId::Suggestion(post_inc(&mut id)), + position: buffer_snapshot.anchor_at(offset, Bias::Right), + text: "test".into(), + }, + Inlay { + id: InlayId::Hint(post_inc(&mut id)), + position: buffer_snapshot.anchor_at(offset, Bias::Left), + text: "test".into(), + }, + Inlay { + id: InlayId::Hint(post_inc(&mut id)), + position: buffer_snapshot.anchor_at(offset, Bias::Right), + text: "test".into(), + }, + ] + }) + .collect(); + let snapshot = display_map.update(cx, |map, cx| { + map.splice_inlays(Vec::new(), inlays, cx); + map.snapshot(cx) + }); + + assert_eq!( + find_preceding_boundary_display_point( + &snapshot, + buffer_snapshot.len().to_display_point(&snapshot), + FindRange::MultiLine, + |left, _| left == 'e', + ), + snapshot + .buffer_snapshot + .offset_to_point(5) + .to_display_point(&snapshot), + "Should not stop at inlays when looking for boundaries" + ); + } + + #[gpui::test] + fn test_next_word_end(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert(marked_text: &str, cx: &mut gpui::AppContext) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + next_word_end(&snapshot, display_points[0]), + display_points[1] + ); + } + + assert("\nˇ loremˇ", cx); + assert(" ˇloremˇ", cx); + assert(" lorˇemˇ", cx); + assert(" loremˇ ˇ\nipsum\n", cx); + assert("\nˇ\nˇ\n\n", cx); + assert("loremˇ ipsumˇ ", cx); + assert("loremˇ-ˇipsum", cx); + assert("loremˇ#$@-ˇipsum", cx); + assert("loremˇ_ipsumˇ", cx); + assert(" ˇbcΔˇ", cx); + assert(" abˇ——ˇcd", cx); + } + + #[gpui::test] + fn test_next_subword_end(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert(marked_text: &str, cx: &mut gpui::AppContext) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + next_subword_end(&snapshot, display_points[0]), + display_points[1] + ); + } + + // Subword boundaries are respected + assert("loˇremˇ_ipsum", cx); + assert("ˇloremˇ_ipsum", cx); + assert("loremˇ_ipsumˇ", cx); + assert("loremˇ_ipsumˇ_dolor", cx); + assert("loˇremˇIpsum", cx); + assert("loremˇIpsumˇDolor", cx); + + // Word boundaries are still respected + assert("\nˇ loremˇ", cx); + assert(" ˇloremˇ", cx); + assert(" lorˇemˇ", cx); + assert(" loremˇ ˇ\nipsum\n", cx); + assert("\nˇ\nˇ\n\n", cx); + assert("loremˇ ipsumˇ ", cx); + assert("loremˇ-ˇipsum", cx); + assert("loremˇ#$@-ˇipsum", cx); + assert("loremˇ_ipsumˇ", cx); + assert(" ˇbcˇΔ", cx); + assert(" abˇ——ˇcd", cx); + } + + #[gpui::test] + fn test_find_boundary(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert( + marked_text: &str, + cx: &mut gpui::AppContext, + is_boundary: impl FnMut(char, char) -> bool, + ) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + find_boundary( + &snapshot, + display_points[0], + FindRange::MultiLine, + is_boundary, + ), + display_points[1] + ); + } + + assert("abcˇdef\ngh\nijˇk", cx, |left, right| { + left == 'j' && right == 'k' + }); + assert("abˇcdef\ngh\nˇijk", cx, |left, right| { + left == '\n' && right == 'i' + }); + let mut line_count = 0; + assert("abcˇdef\ngh\nˇijk", cx, |left, _| { + if left == '\n' { + line_count += 1; + line_count == 2 + } else { + false + } + }); + } + + #[gpui::test] + fn test_surrounding_word(cx: &mut gpui::AppContext) { + init_test(cx); + + fn assert(marked_text: &str, cx: &mut gpui::AppContext) { + let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); + assert_eq!( + surrounding_word(&snapshot, display_points[1]), + display_points[0]..display_points[2], + "{}", + marked_text + ); + } + + assert("ˇˇloremˇ ipsum", cx); + assert("ˇloˇremˇ ipsum", cx); + assert("ˇloremˇˇ ipsum", cx); + assert("loremˇ ˇ ˇipsum", cx); + assert("lorem\nˇˇˇ\nipsum", cx); + assert("lorem\nˇˇipsumˇ", cx); + assert("loremˇ,ˇˇ ipsum", cx); + assert("ˇloremˇˇ, ipsum", cx); + } + + #[gpui::test] + async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + init_test(cx); + }); + + let mut cx = EditorTestContext::new(cx).await; + let editor = cx.editor.clone(); + let window = cx.window; + _ = cx.update_window(window, |_, cx| { + let text_layout_details = + editor.update(cx, |editor, cx| editor.text_layout_details(cx)); + + let font = font("Helvetica"); + + let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx)); + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + multibuffer.push_excerpts( + buffer.clone(), + [ + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 4), + primary: None, + }, + ExcerptRange { + context: Point::new(2, 0)..Point::new(3, 2), + primary: None, + }, + ], + cx, + ); + multibuffer + }); + let display_map = + cx.new_model(|cx| DisplayMap::new(multibuffer, font, px(14.0), None, 2, 2, cx)); + let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); + + assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn"); + + let col_2_x = snapshot + .x_for_display_point(DisplayPoint::new(DisplayRow(2), 2), &text_layout_details); + + // Can't move up into the first excerpt's header + assert_eq!( + up( + &snapshot, + DisplayPoint::new(DisplayRow(2), 2), + SelectionGoal::HorizontalPosition(col_2_x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(2), 0), + SelectionGoal::HorizontalPosition(0.0) + ), + ); + assert_eq!( + up( + &snapshot, + DisplayPoint::new(DisplayRow(2), 0), + SelectionGoal::None, + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(2), 0), + SelectionGoal::HorizontalPosition(0.0) + ), + ); + + let col_4_x = snapshot + .x_for_display_point(DisplayPoint::new(DisplayRow(3), 4), &text_layout_details); + + // Move up and down within first excerpt + assert_eq!( + up( + &snapshot, + DisplayPoint::new(DisplayRow(3), 4), + SelectionGoal::HorizontalPosition(col_4_x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(2), 3), + SelectionGoal::HorizontalPosition(col_4_x.0) + ), + ); + assert_eq!( + down( + &snapshot, + DisplayPoint::new(DisplayRow(2), 3), + SelectionGoal::HorizontalPosition(col_4_x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(3), 4), + SelectionGoal::HorizontalPosition(col_4_x.0) + ), + ); + + let col_5_x = snapshot + .x_for_display_point(DisplayPoint::new(DisplayRow(6), 5), &text_layout_details); + + // Move up and down across second excerpt's header + assert_eq!( + up( + &snapshot, + DisplayPoint::new(DisplayRow(6), 5), + SelectionGoal::HorizontalPosition(col_5_x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(3), 4), + SelectionGoal::HorizontalPosition(col_5_x.0) + ), + ); + assert_eq!( + down( + &snapshot, + DisplayPoint::new(DisplayRow(3), 4), + SelectionGoal::HorizontalPosition(col_5_x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(6), 5), + SelectionGoal::HorizontalPosition(col_5_x.0) + ), + ); + + let max_point_x = snapshot + .x_for_display_point(DisplayPoint::new(DisplayRow(7), 2), &text_layout_details); + + // Can't move down off the end + assert_eq!( + down( + &snapshot, + DisplayPoint::new(DisplayRow(7), 0), + SelectionGoal::HorizontalPosition(0.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(7), 2), + SelectionGoal::HorizontalPosition(max_point_x.0) + ), + ); + assert_eq!( + down( + &snapshot, + DisplayPoint::new(DisplayRow(7), 2), + SelectionGoal::HorizontalPosition(max_point_x.0), + false, + &text_layout_details + ), + ( + DisplayPoint::new(DisplayRow(7), 2), + SelectionGoal::HorizontalPosition(max_point_x.0) + ), + ); + }); + } + + fn init_test(cx: &mut gpui::AppContext) { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + language::init(cx); + crate::init(cx); + Project::init_settings(cx); + } +} diff --git a/crates/editor/src/persistence.rs b/crates/editor/src/persistence.rs new file mode 100644 index 0000000..6e37735 --- /dev/null +++ b/crates/editor/src/persistence.rs @@ -0,0 +1,83 @@ +use std::path::PathBuf; + +use db::sqlez_macros::sql; +use db::{define_connection, query}; + +use workspace::{ItemId, WorkspaceDb, WorkspaceId}; + +define_connection!( + // Current schema shape using pseudo-rust syntax: + // editors( + // item_id: usize, + // workspace_id: usize, + // path: PathBuf, + // scroll_top_row: usize, + // scroll_vertical_offset: f32, + // scroll_horizontal_offset: f32, + // ) + pub static ref DB: EditorDb = + &[sql! ( + CREATE TABLE editors( + item_id INTEGER NOT NULL, + workspace_id INTEGER NOT NULL, + path BLOB NOT NULL, + PRIMARY KEY(item_id, workspace_id), + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ON UPDATE CASCADE + ) STRICT; + ), + sql! ( + ALTER TABLE editors ADD COLUMN scroll_top_row INTEGER NOT NULL DEFAULT 0; + ALTER TABLE editors ADD COLUMN scroll_horizontal_offset REAL NOT NULL DEFAULT 0; + ALTER TABLE editors ADD COLUMN scroll_vertical_offset REAL NOT NULL DEFAULT 0; + )]; +); + +impl EditorDb { + query! { + pub fn get_path(item_id: ItemId, workspace_id: WorkspaceId) -> Result> { + SELECT path FROM editors + WHERE item_id = ? AND workspace_id = ? + } + } + + query! { + pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> { + INSERT INTO editors + (item_id, workspace_id, path) + VALUES + (?1, ?2, ?3) + ON CONFLICT DO UPDATE SET + item_id = ?1, + workspace_id = ?2, + path = ?3 + } + } + + // Returns the scroll top row, and offset + query! { + pub fn get_scroll_position(item_id: ItemId, workspace_id: WorkspaceId) -> Result> { + SELECT scroll_top_row, scroll_horizontal_offset, scroll_vertical_offset + FROM editors + WHERE item_id = ? AND workspace_id = ? + } + } + + query! { + pub async fn save_scroll_position( + item_id: ItemId, + workspace_id: WorkspaceId, + top_row: u32, + vertical_offset: f32, + horizontal_offset: f32 + ) -> Result<()> { + UPDATE OR IGNORE editors + SET + scroll_top_row = ?3, + scroll_horizontal_offset = ?4, + scroll_vertical_offset = ?5 + WHERE item_id = ?1 AND workspace_id = ?2 + } + } +} diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs new file mode 100644 index 0000000..32fd03a --- /dev/null +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -0,0 +1,123 @@ +use std::sync::Arc; + +use anyhow::Context as _; +use gpui::{Context, View, ViewContext, VisualContext, WindowContext}; +use language::Language; +use multi_buffer::MultiBuffer; +use project::lsp_ext_command::ExpandMacro; +use text::ToPointUtf16; + +use crate::{element::register_action, Editor, ExpandMacroRecursively}; + +pub fn apply_related_actions(editor: &View, cx: &mut WindowContext) { + let is_rust_related = editor.update(cx, |editor, cx| { + editor + .buffer() + .read(cx) + .all_buffers() + .iter() + .any(|b| match b.read(cx).language() { + Some(l) => is_rust_language(l), + None => false, + }) + }); + + if is_rust_related { + register_action(editor, cx, expand_macro_recursively); + } +} + +pub fn expand_macro_recursively( + editor: &mut Editor, + _: &ExpandMacroRecursively, + cx: &mut ViewContext<'_, Editor>, +) { + if editor.selections.count() == 0 { + return; + } + let Some(project) = &editor.project else { + return; + }; + let Some(workspace) = editor.workspace() else { + return; + }; + + let multibuffer = editor.buffer().read(cx); + + let Some((trigger_anchor, rust_language, server_to_query, buffer)) = editor + .selections + .disjoint_anchors() + .into_iter() + .filter(|selection| selection.start == selection.end) + .filter_map(|selection| Some((selection.start.buffer_id?, selection.start))) + .filter_map(|(buffer_id, trigger_anchor)| { + let buffer = multibuffer.buffer(buffer_id)?; + let rust_language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?; + if !is_rust_language(&rust_language) { + return None; + } + Some((trigger_anchor, rust_language, buffer)) + }) + .find_map(|(trigger_anchor, rust_language, buffer)| { + project + .read(cx) + .language_servers_for_buffer(buffer.read(cx), cx) + .find_map(|(adapter, server)| { + if adapter.name.0.as_ref() == "rust-analyzer" { + Some(( + trigger_anchor, + Arc::clone(&rust_language), + server.server_id(), + buffer.clone(), + )) + } else { + None + } + }) + }) + else { + return; + }; + + let project = project.clone(); + let buffer_snapshot = buffer.read(cx).snapshot(); + let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let expand_macro_task = project.update(cx, |project, cx| { + project.request_lsp( + buffer, + project::LanguageServerToQuery::Other(server_to_query), + ExpandMacro { position }, + cx, + ) + }); + cx.spawn(|_editor, mut cx| async move { + let macro_expansion = expand_macro_task.await.context("expand macro")?; + if macro_expansion.is_empty() { + log::info!("Empty macro expansion for position {position:?}"); + return Ok(()); + } + + let buffer = project + .update(&mut cx, |project, cx| project.create_buffer(cx))? + .await?; + workspace.update(&mut cx, |workspace, cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, macro_expansion.expansion)], None, cx); + buffer.set_language(Some(rust_language), cx) + }); + let multibuffer = cx.new_model(|cx| { + MultiBuffer::singleton(buffer, cx).with_title(macro_expansion.name) + }); + workspace.add_item_to_active_pane( + Box::new(cx.new_view(|cx| Editor::for_multibuffer(multibuffer, Some(project), cx))), + None, + cx, + ); + }) + }) + .detach_and_log_err(cx); +} + +fn is_rust_language(language: &Language) -> bool { + language.name().as_ref() == "Rust" +} diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs new file mode 100644 index 0000000..1107c97 --- /dev/null +++ b/crates/editor/src/scroll.rs @@ -0,0 +1,503 @@ +mod actions; +pub(crate) mod autoscroll; +pub(crate) mod scroll_amount; + +use crate::{ + display_map::{DisplaySnapshot, ToDisplayPoint}, + hover_popover::hide_hover, + persistence::DB, + Anchor, DisplayPoint, DisplayRow, Editor, EditorEvent, EditorMode, EditorSettings, + InlayHintRefreshReason, MultiBufferSnapshot, RowExt, ToPoint, +}; +pub use autoscroll::{Autoscroll, AutoscrollStrategy}; +use gpui::{point, px, AppContext, Entity, Global, Pixels, Task, ViewContext, WindowContext}; +use language::{Bias, Point}; +pub use scroll_amount::ScrollAmount; +use settings::Settings; +use std::{ + cmp::Ordering, + time::{Duration, Instant}, +}; +use util::ResultExt; +use workspace::{ItemId, WorkspaceId}; + +pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28); +const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1); + +#[derive(Default)] +pub struct ScrollbarAutoHide(pub bool); + +impl Global for ScrollbarAutoHide {} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct ScrollAnchor { + pub offset: gpui::Point, + pub anchor: Anchor, +} + +impl ScrollAnchor { + fn new() -> Self { + Self { + offset: gpui::Point::default(), + anchor: Anchor::min(), + } + } + + pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point { + let mut scroll_position = self.offset; + if self.anchor == Anchor::min() { + scroll_position.y = 0.; + } else { + let scroll_top = self.anchor.to_display_point(snapshot).row().as_f32(); + scroll_position.y = scroll_top + scroll_position.y; + } + scroll_position + } + + pub fn top_row(&self, buffer: &MultiBufferSnapshot) -> u32 { + self.anchor.to_point(buffer).row + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum Axis { + Vertical, + Horizontal, +} + +#[derive(Clone, Copy, Debug)] +pub struct OngoingScroll { + last_event: Instant, + axis: Option, +} + +impl OngoingScroll { + fn new() -> Self { + Self { + last_event: Instant::now() - SCROLL_EVENT_SEPARATION, + axis: None, + } + } + + pub fn filter(&self, delta: &mut gpui::Point) -> Option { + const UNLOCK_PERCENT: f32 = 1.9; + const UNLOCK_LOWER_BOUND: Pixels = px(6.); + let mut axis = self.axis; + + let x = delta.x.abs(); + let y = delta.y.abs(); + let duration = Instant::now().duration_since(self.last_event); + if duration > SCROLL_EVENT_SEPARATION { + //New ongoing scroll will start, determine axis + axis = if x <= y { + Some(Axis::Vertical) + } else { + Some(Axis::Horizontal) + }; + } else if x.max(y) >= UNLOCK_LOWER_BOUND { + //Check if the current ongoing will need to unlock + match axis { + Some(Axis::Vertical) => { + if x > y && x >= y * UNLOCK_PERCENT { + axis = None; + } + } + + Some(Axis::Horizontal) => { + if y > x && y >= x * UNLOCK_PERCENT { + axis = None; + } + } + + None => {} + } + } + + match axis { + Some(Axis::Vertical) => { + *delta = point(px(0.), delta.y); + } + Some(Axis::Horizontal) => { + *delta = point(delta.x, px(0.)); + } + None => {} + } + + axis + } +} + +pub struct ScrollManager { + pub(crate) vertical_scroll_margin: f32, + anchor: ScrollAnchor, + ongoing: OngoingScroll, + autoscroll_request: Option<(Autoscroll, bool)>, + last_autoscroll: Option<(gpui::Point, f32, f32, AutoscrollStrategy)>, + show_scrollbars: bool, + hide_scrollbar_task: Option>, + dragging_scrollbar: bool, + visible_line_count: Option, + forbid_vertical_scroll: bool, +} + +impl ScrollManager { + pub fn new(cx: &mut WindowContext) -> Self { + ScrollManager { + vertical_scroll_margin: EditorSettings::get_global(cx).vertical_scroll_margin, + anchor: ScrollAnchor::new(), + ongoing: OngoingScroll::new(), + autoscroll_request: None, + show_scrollbars: true, + hide_scrollbar_task: None, + dragging_scrollbar: false, + last_autoscroll: None, + visible_line_count: None, + forbid_vertical_scroll: false, + } + } + + pub fn clone_state(&mut self, other: &Self) { + self.anchor = other.anchor; + self.ongoing = other.ongoing; + } + + pub fn anchor(&self) -> ScrollAnchor { + self.anchor + } + + pub fn ongoing_scroll(&self) -> OngoingScroll { + self.ongoing + } + + pub fn update_ongoing_scroll(&mut self, axis: Option) { + self.ongoing.last_event = Instant::now(); + self.ongoing.axis = axis; + } + + pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point { + self.anchor.scroll_position(snapshot) + } + + fn set_scroll_position( + &mut self, + scroll_position: gpui::Point, + map: &DisplaySnapshot, + local: bool, + autoscroll: bool, + workspace_id: Option, + cx: &mut ViewContext, + ) { + if self.forbid_vertical_scroll { + return; + } + let (new_anchor, top_row) = if scroll_position.y <= 0. { + ( + ScrollAnchor { + anchor: Anchor::min(), + offset: scroll_position.max(&gpui::Point::default()), + }, + 0, + ) + } else { + let scroll_top_buffer_point = + DisplayPoint::new(DisplayRow(scroll_position.y as u32), 0).to_point(&map); + let top_anchor = map + .buffer_snapshot + .anchor_at(scroll_top_buffer_point, Bias::Right); + + ( + ScrollAnchor { + anchor: top_anchor, + offset: point( + scroll_position.x.max(0.), + scroll_position.y - top_anchor.to_display_point(&map).row().as_f32(), + ), + }, + scroll_top_buffer_point.row, + ) + }; + + self.set_anchor(new_anchor, top_row, local, autoscroll, workspace_id, cx); + } + + fn set_anchor( + &mut self, + anchor: ScrollAnchor, + top_row: u32, + local: bool, + autoscroll: bool, + workspace_id: Option, + cx: &mut ViewContext, + ) { + if self.forbid_vertical_scroll { + return; + } + self.anchor = anchor; + cx.emit(EditorEvent::ScrollPositionChanged { local, autoscroll }); + self.show_scrollbar(cx); + self.autoscroll_request.take(); + if let Some(workspace_id) = workspace_id { + let item_id = cx.view().entity_id().as_u64() as ItemId; + + cx.foreground_executor() + .spawn(async move { + DB.save_scroll_position( + item_id, + workspace_id, + top_row, + anchor.offset.x, + anchor.offset.y, + ) + .await + .log_err() + }) + .detach() + } + cx.notify(); + } + + pub fn show_scrollbar(&mut self, cx: &mut ViewContext) { + if !self.show_scrollbars { + self.show_scrollbars = true; + cx.notify(); + } + + if cx.default_global::().0 { + self.hide_scrollbar_task = Some(cx.spawn(|editor, mut cx| async move { + cx.background_executor() + .timer(SCROLLBAR_SHOW_INTERVAL) + .await; + editor + .update(&mut cx, |editor, cx| { + editor.scroll_manager.show_scrollbars = false; + cx.notify(); + }) + .log_err(); + })); + } else { + self.hide_scrollbar_task = None; + } + } + + pub fn scrollbars_visible(&self) -> bool { + self.show_scrollbars + } + + pub fn autoscroll_requested(&self) -> bool { + self.autoscroll_request.is_some() + } + + pub fn is_dragging_scrollbar(&self) -> bool { + self.dragging_scrollbar + } + + pub fn set_is_dragging_scrollbar(&mut self, dragging: bool, cx: &mut ViewContext) { + if dragging != self.dragging_scrollbar { + self.dragging_scrollbar = dragging; + cx.notify(); + } + } + + pub fn clamp_scroll_left(&mut self, max: f32) -> bool { + if max < self.anchor.offset.x { + self.anchor.offset.x = max; + true + } else { + false + } + } + + pub fn set_forbid_vertical_scroll(&mut self, forbid: bool) { + self.forbid_vertical_scroll = forbid; + } + + pub fn forbid_vertical_scroll(&self) -> bool { + self.forbid_vertical_scroll + } +} + +impl Editor { + pub fn vertical_scroll_margin(&self) -> usize { + self.scroll_manager.vertical_scroll_margin as usize + } + + pub fn set_vertical_scroll_margin(&mut self, margin_rows: usize, cx: &mut ViewContext) { + self.scroll_manager.vertical_scroll_margin = margin_rows as f32; + cx.notify(); + } + + pub fn visible_line_count(&self) -> Option { + self.scroll_manager.visible_line_count + } + + pub(crate) fn set_visible_line_count(&mut self, lines: f32, cx: &mut ViewContext) { + let opened_first_time = self.scroll_manager.visible_line_count.is_none(); + self.scroll_manager.visible_line_count = Some(lines); + if opened_first_time { + cx.spawn(|editor, mut cx| async move { + editor + .update(&mut cx, |editor, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx) + }) + .ok() + }) + .detach() + } + } + + pub fn apply_scroll_delta( + &mut self, + scroll_delta: gpui::Point, + cx: &mut ViewContext, + ) { + if self.scroll_manager.forbid_vertical_scroll { + return; + } + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let position = self.scroll_manager.anchor.scroll_position(&display_map) + scroll_delta; + self.set_scroll_position_taking_display_map(position, true, false, display_map, cx); + } + + pub fn set_scroll_position( + &mut self, + scroll_position: gpui::Point, + cx: &mut ViewContext, + ) { + if self.scroll_manager.forbid_vertical_scroll { + return; + } + self.set_scroll_position_internal(scroll_position, true, false, cx); + } + + pub(crate) fn set_scroll_position_internal( + &mut self, + scroll_position: gpui::Point, + local: bool, + autoscroll: bool, + cx: &mut ViewContext, + ) { + let map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.set_scroll_position_taking_display_map(scroll_position, local, autoscroll, map, cx); + } + + fn set_scroll_position_taking_display_map( + &mut self, + scroll_position: gpui::Point, + local: bool, + autoscroll: bool, + display_map: DisplaySnapshot, + cx: &mut ViewContext, + ) { + hide_hover(self, cx); + let workspace_id = self.workspace.as_ref().map(|workspace| workspace.1); + self.scroll_manager.set_scroll_position( + scroll_position, + &display_map, + local, + autoscroll, + workspace_id, + cx, + ); + + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + } + + pub fn scroll_position(&self, cx: &mut ViewContext) -> gpui::Point { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.scroll_manager.anchor.scroll_position(&display_map) + } + + pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext) { + hide_hover(self, cx); + let workspace_id = self.workspace.as_ref().map(|workspace| workspace.1); + let top_row = scroll_anchor + .anchor + .to_point(&self.buffer().read(cx).snapshot(cx)) + .row; + self.scroll_manager + .set_anchor(scroll_anchor, top_row, true, false, workspace_id, cx); + } + + pub(crate) fn set_scroll_anchor_remote( + &mut self, + scroll_anchor: ScrollAnchor, + cx: &mut ViewContext, + ) { + hide_hover(self, cx); + let workspace_id = self.workspace.as_ref().map(|workspace| workspace.1); + let snapshot = &self.buffer().read(cx).snapshot(cx); + if !scroll_anchor.anchor.is_valid(snapshot) { + log::warn!("Invalid scroll anchor: {:?}", scroll_anchor); + return; + } + let top_row = scroll_anchor.anchor.to_point(snapshot).row; + self.scroll_manager + .set_anchor(scroll_anchor, top_row, false, false, workspace_id, cx); + } + + pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext) { + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + + if self.take_rename(true, cx).is_some() { + return; + } + + let cur_position = self.scroll_position(cx); + let new_pos = cur_position + point(0., amount.lines(self)); + self.set_scroll_position(new_pos, cx); + } + + /// Returns an ordering. The newest selection is: + /// Ordering::Equal => on screen + /// Ordering::Less => above the screen + /// Ordering::Greater => below the screen + pub fn newest_selection_on_screen(&self, cx: &mut AppContext) -> Ordering { + let snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let newest_head = self + .selections + .newest_anchor() + .head() + .to_display_point(&snapshot); + let screen_top = self + .scroll_manager + .anchor + .anchor + .to_display_point(&snapshot); + + if screen_top > newest_head { + return Ordering::Less; + } + + if let Some(visible_lines) = self.visible_line_count() { + if newest_head.row() < DisplayRow(screen_top.row().0 + visible_lines as u32) { + return Ordering::Equal; + } + } + + Ordering::Greater + } + + pub fn read_scroll_position_from_db( + &mut self, + item_id: u64, + workspace_id: WorkspaceId, + cx: &mut ViewContext, + ) { + let scroll_position = DB.get_scroll_position(item_id, workspace_id); + if let Ok(Some((top_row, x, y))) = scroll_position { + let top_anchor = self + .buffer() + .read(cx) + .snapshot(cx) + .anchor_at(Point::new(top_row, 0), Bias::Left); + let scroll_anchor = ScrollAnchor { + offset: gpui::Point::new(x, y), + anchor: top_anchor, + }; + self.set_scroll_anchor(scroll_anchor, cx); + } + } +} diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs new file mode 100644 index 0000000..c43191e --- /dev/null +++ b/crates/editor/src/scroll/actions.rs @@ -0,0 +1,103 @@ +use super::Axis; +use crate::{ + Autoscroll, Bias, Editor, EditorMode, NextScreen, ScrollAnchor, ScrollCursorBottom, + ScrollCursorCenter, ScrollCursorTop, +}; +use gpui::{Point, ViewContext}; + +impl Editor { + pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext) { + if self.take_rename(true, cx).is_some() { + return; + } + + if self.mouse_context_menu.is_some() { + return; + } + + if matches!(self.mode, EditorMode::SingleLine) { + cx.propagate(); + return; + } + self.request_autoscroll(Autoscroll::Next, cx); + } + + pub fn scroll( + &mut self, + scroll_position: Point, + axis: Option, + cx: &mut ViewContext, + ) { + self.scroll_manager.update_ongoing_scroll(axis); + self.set_scroll_position(scroll_position, cx); + } + + pub fn scroll_cursor_top(&mut self, _: &ScrollCursorTop, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx).display_snapshot; + let scroll_margin_rows = self.vertical_scroll_margin() as u32; + + let mut new_screen_top = self.selections.newest_display(cx).head(); + *new_screen_top.row_mut() = new_screen_top.row().0.saturating_sub(scroll_margin_rows); + *new_screen_top.column_mut() = 0; + let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left); + let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top); + + self.set_scroll_anchor( + ScrollAnchor { + anchor: new_anchor, + offset: Default::default(), + }, + cx, + ) + } + + pub fn scroll_cursor_center(&mut self, _: &ScrollCursorCenter, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx).display_snapshot; + let visible_rows = if let Some(visible_rows) = self.visible_line_count() { + visible_rows as u32 + } else { + return; + }; + + let mut new_screen_top = self.selections.newest_display(cx).head(); + *new_screen_top.row_mut() = new_screen_top.row().0.saturating_sub(visible_rows / 2); + *new_screen_top.column_mut() = 0; + let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left); + let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top); + + self.set_scroll_anchor( + ScrollAnchor { + anchor: new_anchor, + offset: Default::default(), + }, + cx, + ) + } + + pub fn scroll_cursor_bottom(&mut self, _: &ScrollCursorBottom, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx).display_snapshot; + let scroll_margin_rows = self.vertical_scroll_margin() as u32; + let visible_rows = if let Some(visible_rows) = self.visible_line_count() { + visible_rows as u32 + } else { + return; + }; + + let mut new_screen_top = self.selections.newest_display(cx).head(); + *new_screen_top.row_mut() = new_screen_top + .row() + .0 + .saturating_sub(visible_rows.saturating_sub(scroll_margin_rows)); + *new_screen_top.column_mut() = 0; + let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left); + let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top); + + self.set_scroll_anchor( + ScrollAnchor { + anchor: new_anchor, + offset: Default::default(), + }, + cx, + ) + } +} diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs new file mode 100644 index 0000000..5c33532 --- /dev/null +++ b/crates/editor/src/scroll/autoscroll.rs @@ -0,0 +1,303 @@ +use crate::{ + display_map::ToDisplayPoint, DisplayRow, Editor, EditorMode, LineWithInvisibles, RowExt, +}; +use gpui::{px, Bounds, Pixels, ViewContext}; +use language::Point; +use std::{cmp, f32}; + +#[derive(PartialEq, Eq, Clone, Copy)] +pub enum Autoscroll { + Next, + Strategy(AutoscrollStrategy), +} + +impl Autoscroll { + /// scrolls the minimal amount to (try) and fit all cursors onscreen + pub fn fit() -> Self { + Self::Strategy(AutoscrollStrategy::Fit) + } + + /// scrolls the minimal amount to fit the newest cursor + pub fn newest() -> Self { + Self::Strategy(AutoscrollStrategy::Newest) + } + + /// scrolls so the newest cursor is vertically centered + pub fn center() -> Self { + Self::Strategy(AutoscrollStrategy::Center) + } + + /// scrolls so the neweset cursor is near the top + /// (offset by vertical_scroll_margin) + pub fn focused() -> Self { + Self::Strategy(AutoscrollStrategy::Focused) + } + /// Scrolls so that the newest cursor is roughly an n-th line from the top. + pub fn top_relative(n: usize) -> Self { + Self::Strategy(AutoscrollStrategy::TopRelative(n)) + } +} + +#[derive(PartialEq, Eq, Default, Clone, Copy)] +pub enum AutoscrollStrategy { + Fit, + Newest, + #[default] + Center, + Focused, + Top, + Bottom, + TopRelative(usize), +} + +impl AutoscrollStrategy { + fn next(&self) -> Self { + match self { + AutoscrollStrategy::Center => AutoscrollStrategy::Top, + AutoscrollStrategy::Top => AutoscrollStrategy::Bottom, + _ => AutoscrollStrategy::Center, + } + } +} + +impl Editor { + pub fn autoscroll_requested(&self) -> bool { + self.scroll_manager.autoscroll_requested() + } + + pub fn autoscroll_vertically( + &mut self, + bounds: Bounds, + line_height: Pixels, + cx: &mut ViewContext, + ) -> bool { + let viewport_height = bounds.size.height; + let visible_lines = viewport_height / line_height; + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut scroll_position = self.scroll_manager.scroll_position(&display_map); + let original_y = scroll_position.y; + if let Some(last_bounds) = self.expect_bounds_change.take() { + if scroll_position.y != 0. { + scroll_position.y += (bounds.top() - last_bounds.top()) / line_height; + if scroll_position.y < 0. { + scroll_position.y = 0.; + } + } + } + let max_scroll_top = if matches!(self.mode, EditorMode::AutoHeight { .. }) { + (display_map.max_point().row().as_f32() - visible_lines + 1.).max(0.) + } else { + display_map.max_point().row().as_f32() + }; + if scroll_position.y > max_scroll_top { + scroll_position.y = max_scroll_top; + } + + if original_y != scroll_position.y { + self.set_scroll_position(scroll_position, cx); + } + + let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else { + return false; + }; + + let mut target_top; + let mut target_bottom; + if let Some(first_highlighted_row) = + self.highlighted_display_row_for_autoscroll(&display_map) + { + target_top = first_highlighted_row.as_f32(); + target_bottom = target_top + 1.; + } else { + let selections = self.selections.all::(cx); + target_top = selections + .first() + .unwrap() + .head() + .to_display_point(&display_map) + .row() + .as_f32(); + target_bottom = selections + .last() + .unwrap() + .head() + .to_display_point(&display_map) + .row() + .next_row() + .as_f32(); + + // If the selections can't all fit on screen, scroll to the newest. + if autoscroll == Autoscroll::newest() + || autoscroll == Autoscroll::fit() && target_bottom - target_top > visible_lines + { + let newest_selection_top = selections + .iter() + .max_by_key(|s| s.id) + .unwrap() + .head() + .to_display_point(&display_map) + .row() + .as_f32(); + target_top = newest_selection_top; + target_bottom = newest_selection_top + 1.; + } + } + + let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) { + 0. + } else { + ((visible_lines - (target_bottom - target_top)) / 2.0).floor() + }; + + let strategy = match autoscroll { + Autoscroll::Strategy(strategy) => strategy, + Autoscroll::Next => { + let last_autoscroll = &self.scroll_manager.last_autoscroll; + if let Some(last_autoscroll) = last_autoscroll { + if self.scroll_manager.anchor.offset == last_autoscroll.0 + && target_top == last_autoscroll.1 + && target_bottom == last_autoscroll.2 + { + last_autoscroll.3.next() + } else { + AutoscrollStrategy::default() + } + } else { + AutoscrollStrategy::default() + } + } + }; + + match strategy { + AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => { + let margin = margin.min(self.scroll_manager.vertical_scroll_margin); + let target_top = (target_top - margin).max(0.0); + let target_bottom = target_bottom + margin; + let start_row = scroll_position.y; + let end_row = start_row + visible_lines; + + let needs_scroll_up = target_top < start_row; + let needs_scroll_down = target_bottom >= end_row; + + if needs_scroll_up && !needs_scroll_down { + scroll_position.y = target_top; + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + if !needs_scroll_up && needs_scroll_down { + scroll_position.y = target_bottom - visible_lines; + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + } + AutoscrollStrategy::Center => { + scroll_position.y = (target_top - margin).max(0.0); + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + AutoscrollStrategy::Focused => { + scroll_position.y = + (target_top - self.scroll_manager.vertical_scroll_margin).max(0.0); + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + AutoscrollStrategy::Top => { + scroll_position.y = (target_top).max(0.0); + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + AutoscrollStrategy::Bottom => { + scroll_position.y = (target_bottom - visible_lines).max(0.0); + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + AutoscrollStrategy::TopRelative(lines) => { + scroll_position.y = target_top - lines as f32; + self.set_scroll_position_internal(scroll_position, local, true, cx); + } + } + + self.scroll_manager.last_autoscroll = Some(( + self.scroll_manager.anchor.offset, + target_top, + target_bottom, + strategy, + )); + + true + } + + pub(crate) fn autoscroll_horizontally( + &mut self, + start_row: DisplayRow, + viewport_width: Pixels, + scroll_width: Pixels, + max_glyph_width: Pixels, + layouts: &[LineWithInvisibles], + cx: &mut ViewContext, + ) -> bool { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + + let mut target_left; + let mut target_right; + + if self + .highlighted_display_row_for_autoscroll(&display_map) + .is_none() + { + target_left = px(f32::INFINITY); + target_right = px(0.); + for selection in selections { + let head = selection.head().to_display_point(&display_map); + if head.row() >= start_row + && head.row() < DisplayRow(start_row.0 + layouts.len() as u32) + { + let start_column = head.column().saturating_sub(3); + let end_column = cmp::min(display_map.line_len(head.row()), head.column() + 3); + target_left = target_left.min( + layouts[head.row().minus(start_row) as usize] + .line + .x_for_index(start_column as usize), + ); + target_right = target_right.max( + layouts[head.row().minus(start_row) as usize] + .line + .x_for_index(end_column as usize) + + max_glyph_width, + ); + } + } + } else { + target_left = px(0.); + target_right = px(0.); + } + + target_right = target_right.min(scroll_width); + + if target_right - target_left > viewport_width { + return false; + } + + let scroll_left = self.scroll_manager.anchor.offset.x * max_glyph_width; + let scroll_right = scroll_left + viewport_width; + + if target_left < scroll_left { + self.scroll_manager.anchor.offset.x = target_left / max_glyph_width; + true + } else if target_right > scroll_right { + self.scroll_manager.anchor.offset.x = (target_right - viewport_width) / max_glyph_width; + true + } else { + false + } + } + + pub fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext) { + self.scroll_manager.autoscroll_request = Some((autoscroll, true)); + cx.notify(); + } + + pub(crate) fn request_autoscroll_remotely( + &mut self, + autoscroll: Autoscroll, + cx: &mut ViewContext, + ) { + self.scroll_manager.autoscroll_request = Some((autoscroll, false)); + cx.notify(); + } +} diff --git a/crates/editor/src/scroll/scroll_amount.rs b/crates/editor/src/scroll/scroll_amount.rs new file mode 100644 index 0000000..2cb22d1 --- /dev/null +++ b/crates/editor/src/scroll/scroll_amount.rs @@ -0,0 +1,28 @@ +use crate::Editor; +use serde::Deserialize; + +#[derive(Clone, PartialEq, Deserialize)] +pub enum ScrollAmount { + // Scroll N lines (positive is towards the end of the document) + Line(f32), + // Scroll N pages (positive is towards the end of the document) + Page(f32), +} + +impl ScrollAmount { + pub fn lines(&self, editor: &mut Editor) -> f32 { + match self { + Self::Line(count) => *count, + Self::Page(count) => editor + .visible_line_count() + .map(|mut l| { + // for full pages subtract one to leave an anchor line + if count.abs() == 1.0 { + l -= 1.0 + } + (l * count).trunc() + }) + .unwrap_or(0.), + } + } +} diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs new file mode 100644 index 0000000..03859dd --- /dev/null +++ b/crates/editor/src/selections_collection.rs @@ -0,0 +1,896 @@ +use std::{ + cell::Ref, + iter, mem, + ops::{Deref, DerefMut, Range, Sub}, + sync::Arc, +}; + +use collections::HashMap; +use gpui::{AppContext, Model, Pixels}; +use itertools::Itertools; +use language::{Bias, Point, Selection, SelectionGoal, TextDimension, ToPoint}; +use util::post_inc; + +use crate::{ + display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint}, + movement::TextLayoutDetails, + Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode, + ToOffset, +}; + +#[derive(Debug, Clone)] +pub struct PendingSelection { + pub selection: Selection, + pub mode: SelectMode, +} + +#[derive(Debug, Clone)] +pub struct SelectionsCollection { + display_map: Model, + buffer: Model, + pub next_selection_id: usize, + pub line_mode: bool, + /// The non-pending, non-overlapping selections. + /// The [SelectionsCollection::pending] selection could possibly overlap these + pub disjoint: Arc<[Selection]>, + /// A pending selection, such as when the mouse is being dragged + pub pending: Option, +} + +impl SelectionsCollection { + pub fn new(display_map: Model, buffer: Model) -> Self { + Self { + display_map, + buffer, + next_selection_id: 1, + line_mode: false, + disjoint: Arc::from([]), + pending: Some(PendingSelection { + selection: Selection { + id: 0, + start: Anchor::min(), + end: Anchor::min(), + reversed: false, + goal: SelectionGoal::None, + }, + mode: SelectMode::Character, + }), + } + } + + pub fn display_map(&self, cx: &mut AppContext) -> DisplaySnapshot { + self.display_map.update(cx, |map, cx| map.snapshot(cx)) + } + + fn buffer<'a>(&self, cx: &'a AppContext) -> Ref<'a, MultiBufferSnapshot> { + self.buffer.read(cx).read(cx) + } + + pub fn clone_state(&mut self, other: &SelectionsCollection) { + self.next_selection_id = other.next_selection_id; + self.line_mode = other.line_mode; + self.disjoint = other.disjoint.clone(); + self.pending.clone_from(&other.pending); + } + + pub fn count(&self) -> usize { + let mut count = self.disjoint.len(); + if self.pending.is_some() { + count += 1; + } + count + } + + /// The non-pending, non-overlapping selections. There could still be a pending + /// selection that overlaps these if the mouse is being dragged, etc. Returned as + /// selections over Anchors. + pub fn disjoint_anchors(&self) -> Arc<[Selection]> { + self.disjoint.clone() + } + + pub fn pending_anchor(&self) -> Option> { + self.pending + .as_ref() + .map(|pending| pending.selection.clone()) + } + + pub fn pending>( + &self, + cx: &AppContext, + ) -> Option> { + self.pending_anchor() + .as_ref() + .map(|pending| pending.map(|p| p.summary::(&self.buffer(cx)))) + } + + pub(crate) fn pending_mode(&self) -> Option { + self.pending.as_ref().map(|pending| pending.mode.clone()) + } + + pub fn all<'a, D>(&self, cx: &AppContext) -> Vec> + where + D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, + { + let disjoint_anchors = &self.disjoint; + let mut disjoint = + resolve_multiple::(disjoint_anchors.iter(), &self.buffer(cx)).peekable(); + + let mut pending_opt = self.pending::(cx); + + iter::from_fn(move || { + if let Some(pending) = pending_opt.as_mut() { + while let Some(next_selection) = disjoint.peek() { + if pending.start <= next_selection.end && pending.end >= next_selection.start { + let next_selection = disjoint.next().unwrap(); + if next_selection.start < pending.start { + pending.start = next_selection.start; + } + if next_selection.end > pending.end { + pending.end = next_selection.end; + } + } else if next_selection.end < pending.start { + return disjoint.next(); + } else { + break; + } + } + + pending_opt.take() + } else { + disjoint.next() + } + }) + .collect() + } + + /// Returns all of the selections, adjusted to take into account the selection line_mode + pub fn all_adjusted(&self, cx: &mut AppContext) -> Vec> { + let mut selections = self.all::(cx); + if self.line_mode { + let map = self.display_map(cx); + for selection in &mut selections { + let new_range = map.expand_to_line(selection.range()); + selection.start = new_range.start; + selection.end = new_range.end; + } + } + selections + } + + pub fn all_adjusted_display( + &self, + cx: &mut AppContext, + ) -> (DisplaySnapshot, Vec>) { + if self.line_mode { + let selections = self.all::(cx); + let map = self.display_map(cx); + let result = selections + .into_iter() + .map(|mut selection| { + let new_range = map.expand_to_line(selection.range()); + selection.start = new_range.start; + selection.end = new_range.end; + selection.map(|point| point.to_display_point(&map)) + }) + .collect(); + (map, result) + } else { + self.all_display(cx) + } + } + + pub fn disjoint_in_range<'a, D>( + &self, + range: Range, + cx: &AppContext, + ) -> Vec> + where + D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, + { + let buffer = self.buffer(cx); + let start_ix = match self + .disjoint + .binary_search_by(|probe| probe.end.cmp(&range.start, &buffer)) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match self + .disjoint + .binary_search_by(|probe| probe.start.cmp(&range.end, &buffer)) + { + Ok(ix) => ix + 1, + Err(ix) => ix, + }; + resolve_multiple(&self.disjoint[start_ix..end_ix], &buffer).collect() + } + + pub fn all_display( + &self, + cx: &mut AppContext, + ) -> (DisplaySnapshot, Vec>) { + let display_map = self.display_map(cx); + let selections = self + .all::(cx) + .into_iter() + .map(|selection| selection.map(|point| point.to_display_point(&display_map))) + .collect(); + (display_map, selections) + } + + pub fn newest_anchor(&self) -> &Selection { + self.pending + .as_ref() + .map(|s| &s.selection) + .or_else(|| self.disjoint.iter().max_by_key(|s| s.id)) + .unwrap() + } + + pub fn newest>( + &self, + cx: &AppContext, + ) -> Selection { + resolve(self.newest_anchor(), &self.buffer(cx)) + } + + pub fn newest_display(&self, cx: &mut AppContext) -> Selection { + let display_map = self.display_map(cx); + let selection = self + .newest_anchor() + .map(|point| point.to_display_point(&display_map)); + selection + } + + pub fn oldest_anchor(&self) -> &Selection { + self.disjoint + .iter() + .min_by_key(|s| s.id) + .or_else(|| self.pending.as_ref().map(|p| &p.selection)) + .unwrap() + } + + pub fn oldest>( + &self, + cx: &AppContext, + ) -> Selection { + resolve(self.oldest_anchor(), &self.buffer(cx)) + } + + pub fn first_anchor(&self) -> Selection { + self.disjoint[0].clone() + } + + pub fn first>( + &self, + cx: &AppContext, + ) -> Selection { + self.all(cx).first().unwrap().clone() + } + + pub fn last>( + &self, + cx: &AppContext, + ) -> Selection { + self.all(cx).last().unwrap().clone() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn ranges + std::fmt::Debug>( + &self, + cx: &AppContext, + ) -> Vec> { + self.all::(cx) + .iter() + .map(|s| { + if s.reversed { + s.end.clone()..s.start.clone() + } else { + s.start.clone()..s.end.clone() + } + }) + .collect() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn display_ranges(&self, cx: &mut AppContext) -> Vec> { + let display_map = self.display_map(cx); + self.disjoint_anchors() + .iter() + .chain(self.pending_anchor().as_ref()) + .map(|s| { + if s.reversed { + s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map) + } else { + s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map) + } + }) + .collect() + } + + pub fn build_columnar_selection( + &mut self, + display_map: &DisplaySnapshot, + row: DisplayRow, + positions: &Range, + reversed: bool, + text_layout_details: &TextLayoutDetails, + ) -> Option> { + let is_empty = positions.start == positions.end; + let line_len = display_map.line_len(row); + + let line = display_map.layout_row(row, &text_layout_details); + + let start_col = line.closest_index_for_x(positions.start) as u32; + if start_col < line_len || (is_empty && positions.start == line.width) { + let start = DisplayPoint::new(row, start_col); + let end_col = line.closest_index_for_x(positions.end) as u32; + let end = DisplayPoint::new(row, end_col); + + Some(Selection { + id: post_inc(&mut self.next_selection_id), + start: start.to_point(display_map), + end: end.to_point(display_map), + reversed, + goal: SelectionGoal::HorizontalRange { + start: positions.start.into(), + end: positions.end.into(), + }, + }) + } else { + None + } + } + + pub(crate) fn change_with( + &mut self, + cx: &mut AppContext, + change: impl FnOnce(&mut MutableSelectionsCollection) -> R, + ) -> (bool, R) { + let mut mutable_collection = MutableSelectionsCollection { + collection: self, + selections_changed: false, + cx, + }; + + let result = change(&mut mutable_collection); + assert!( + !mutable_collection.disjoint.is_empty() || mutable_collection.pending.is_some(), + "There must be at least one selection" + ); + (mutable_collection.selections_changed, result) + } +} + +pub struct MutableSelectionsCollection<'a> { + collection: &'a mut SelectionsCollection, + selections_changed: bool, + cx: &'a mut AppContext, +} + +impl<'a> MutableSelectionsCollection<'a> { + pub fn display_map(&mut self) -> DisplaySnapshot { + self.collection.display_map(self.cx) + } + + fn buffer(&self) -> Ref { + self.collection.buffer(self.cx) + } + + pub fn clear_disjoint(&mut self) { + self.collection.disjoint = Arc::from([]); + } + + pub fn delete(&mut self, selection_id: usize) { + let mut changed = false; + self.collection.disjoint = self + .disjoint + .iter() + .filter(|selection| { + let found = selection.id == selection_id; + changed |= found; + !found + }) + .cloned() + .collect(); + + self.selections_changed |= changed; + } + + pub fn clear_pending(&mut self) { + if self.collection.pending.is_some() { + self.collection.pending = None; + self.selections_changed = true; + } + } + + pub(crate) fn set_pending_anchor_range(&mut self, range: Range, mode: SelectMode) { + self.collection.pending = Some(PendingSelection { + selection: Selection { + id: post_inc(&mut self.collection.next_selection_id), + start: range.start, + end: range.end, + reversed: false, + goal: SelectionGoal::None, + }, + mode, + }); + self.selections_changed = true; + } + + pub(crate) fn set_pending_display_range( + &mut self, + range: Range, + mode: SelectMode, + ) { + let (start, end, reversed) = { + let display_map = self.display_map(); + let buffer = self.buffer(); + let mut start = range.start; + let mut end = range.end; + let reversed = if start > end { + mem::swap(&mut start, &mut end); + true + } else { + false + }; + + let end_bias = if end > start { Bias::Left } else { Bias::Right }; + ( + buffer.anchor_before(start.to_point(&display_map)), + buffer.anchor_at(end.to_point(&display_map), end_bias), + reversed, + ) + }; + + let new_pending = PendingSelection { + selection: Selection { + id: post_inc(&mut self.collection.next_selection_id), + start, + end, + reversed, + goal: SelectionGoal::None, + }, + mode, + }; + + self.collection.pending = Some(new_pending); + self.selections_changed = true; + } + + pub(crate) fn set_pending(&mut self, selection: Selection, mode: SelectMode) { + self.collection.pending = Some(PendingSelection { selection, mode }); + self.selections_changed = true; + } + + pub fn try_cancel(&mut self) -> bool { + if let Some(pending) = self.collection.pending.take() { + if self.disjoint.is_empty() { + self.collection.disjoint = Arc::from([pending.selection]); + } + self.selections_changed = true; + return true; + } + + let mut oldest = self.oldest_anchor().clone(); + if self.count() > 1 { + self.collection.disjoint = Arc::from([oldest]); + self.selections_changed = true; + return true; + } + + if !oldest.start.cmp(&oldest.end, &self.buffer()).is_eq() { + let head = oldest.head(); + oldest.start = head; + oldest.end = head; + self.collection.disjoint = Arc::from([oldest]); + self.selections_changed = true; + return true; + } + + false + } + + pub fn insert_range(&mut self, range: Range) + where + T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub + std::marker::Copy, + { + let mut selections = self.all(self.cx); + let mut start = range.start.to_offset(&self.buffer()); + let mut end = range.end.to_offset(&self.buffer()); + let reversed = if start > end { + mem::swap(&mut start, &mut end); + true + } else { + false + }; + selections.push(Selection { + id: post_inc(&mut self.collection.next_selection_id), + start, + end, + reversed, + goal: SelectionGoal::None, + }); + self.select(selections); + } + + pub fn select(&mut self, mut selections: Vec>) + where + T: ToOffset + ToPoint + Ord + std::marker::Copy + std::fmt::Debug, + { + let buffer = self.buffer.read(self.cx).snapshot(self.cx); + selections.sort_unstable_by_key(|s| s.start); + // Merge overlapping selections. + let mut i = 1; + while i < selections.len() { + if selections[i - 1].end >= selections[i].start { + let removed = selections.remove(i); + if removed.start < selections[i - 1].start { + selections[i - 1].start = removed.start; + } + if removed.end > selections[i - 1].end { + selections[i - 1].end = removed.end; + } + } else { + i += 1; + } + } + + self.collection.disjoint = Arc::from_iter(selections.into_iter().map(|selection| { + let end_bias = if selection.end > selection.start { + Bias::Left + } else { + Bias::Right + }; + Selection { + id: selection.id, + start: buffer.anchor_after(selection.start), + end: buffer.anchor_at(selection.end, end_bias), + reversed: selection.reversed, + goal: selection.goal, + } + })); + + self.collection.pending = None; + self.selections_changed = true; + } + + pub fn select_anchors(&mut self, selections: Vec>) { + let buffer = self.buffer.read(self.cx).snapshot(self.cx); + let resolved_selections = + resolve_multiple::(&selections, &buffer).collect::>(); + self.select(resolved_selections); + } + + pub fn select_ranges(&mut self, ranges: I) + where + I: IntoIterator>, + T: ToOffset, + { + let buffer = self.buffer.read(self.cx).snapshot(self.cx); + let ranges = ranges + .into_iter() + .map(|range| range.start.to_offset(&buffer)..range.end.to_offset(&buffer)); + self.select_offset_ranges(ranges); + } + + fn select_offset_ranges(&mut self, ranges: I) + where + I: IntoIterator>, + { + let selections = ranges + .into_iter() + .map(|range| { + let mut start = range.start; + let mut end = range.end; + let reversed = if start > end { + mem::swap(&mut start, &mut end); + true + } else { + false + }; + Selection { + id: post_inc(&mut self.collection.next_selection_id), + start, + end, + reversed, + goal: SelectionGoal::None, + } + }) + .collect::>(); + + self.select(selections) + } + + pub fn select_anchor_ranges(&mut self, ranges: I) + where + I: IntoIterator>, + { + let buffer = self.buffer.read(self.cx).snapshot(self.cx); + let selections = ranges + .into_iter() + .map(|range| { + let mut start = range.start; + let mut end = range.end; + let reversed = if start.cmp(&end, &buffer).is_gt() { + mem::swap(&mut start, &mut end); + true + } else { + false + }; + Selection { + id: post_inc(&mut self.collection.next_selection_id), + start, + end, + reversed, + goal: SelectionGoal::None, + } + }) + .collect::>(); + self.select_anchors(selections) + } + + pub fn new_selection_id(&mut self) -> usize { + post_inc(&mut self.next_selection_id) + } + + pub fn select_display_ranges(&mut self, ranges: T) + where + T: IntoIterator>, + { + let display_map = self.display_map(); + let selections = ranges + .into_iter() + .map(|range| { + let mut start = range.start; + let mut end = range.end; + let reversed = if start > end { + mem::swap(&mut start, &mut end); + true + } else { + false + }; + Selection { + id: post_inc(&mut self.collection.next_selection_id), + start: start.to_point(&display_map), + end: end.to_point(&display_map), + reversed, + goal: SelectionGoal::None, + } + }) + .collect(); + self.select(selections); + } + + pub fn move_with( + &mut self, + mut move_selection: impl FnMut(&DisplaySnapshot, &mut Selection), + ) { + let mut changed = false; + let display_map = self.display_map(); + let selections = self + .all::(self.cx) + .into_iter() + .map(|selection| { + let mut moved_selection = + selection.map(|point| point.to_display_point(&display_map)); + move_selection(&display_map, &mut moved_selection); + let moved_selection = + moved_selection.map(|display_point| display_point.to_point(&display_map)); + if selection != moved_selection { + changed = true; + } + moved_selection + }) + .collect(); + + if changed { + self.select(selections) + } + } + + pub fn move_offsets_with( + &mut self, + mut move_selection: impl FnMut(&MultiBufferSnapshot, &mut Selection), + ) { + let mut changed = false; + let snapshot = self.buffer().clone(); + let selections = self + .all::(self.cx) + .into_iter() + .map(|selection| { + let mut moved_selection = selection.clone(); + move_selection(&snapshot, &mut moved_selection); + if selection != moved_selection { + changed = true; + } + moved_selection + }) + .collect(); + drop(snapshot); + + if changed { + self.select(selections) + } + } + + pub fn move_heads_with( + &mut self, + mut update_head: impl FnMut( + &DisplaySnapshot, + DisplayPoint, + SelectionGoal, + ) -> (DisplayPoint, SelectionGoal), + ) { + self.move_with(|map, selection| { + let (new_head, new_goal) = update_head(map, selection.head(), selection.goal); + selection.set_head(new_head, new_goal); + }); + } + + pub fn move_cursors_with( + &mut self, + mut update_cursor_position: impl FnMut( + &DisplaySnapshot, + DisplayPoint, + SelectionGoal, + ) -> (DisplayPoint, SelectionGoal), + ) { + self.move_with(|map, selection| { + let (cursor, new_goal) = update_cursor_position(map, selection.head(), selection.goal); + selection.collapse_to(cursor, new_goal) + }); + } + + pub fn maybe_move_cursors_with( + &mut self, + mut update_cursor_position: impl FnMut( + &DisplaySnapshot, + DisplayPoint, + SelectionGoal, + ) -> Option<(DisplayPoint, SelectionGoal)>, + ) { + self.move_cursors_with(|map, point, goal| { + update_cursor_position(map, point, goal).unwrap_or((point, goal)) + }) + } + + pub fn replace_cursors_with( + &mut self, + mut find_replacement_cursors: impl FnMut(&DisplaySnapshot) -> Vec, + ) { + let display_map = self.display_map(); + let new_selections = find_replacement_cursors(&display_map) + .into_iter() + .map(|cursor| { + let cursor_point = cursor.to_point(&display_map); + Selection { + id: post_inc(&mut self.collection.next_selection_id), + start: cursor_point, + end: cursor_point, + reversed: false, + goal: SelectionGoal::None, + } + }) + .collect(); + self.select(new_selections); + } + + /// Compute new ranges for any selections that were located in excerpts that have + /// since been removed. + /// + /// Returns a `HashMap` indicating which selections whose former head position + /// was no longer present. The keys of the map are selection ids. The values are + /// the id of the new excerpt where the head of the selection has been moved. + pub fn refresh(&mut self) -> HashMap { + let mut pending = self.collection.pending.take(); + let mut selections_with_lost_position = HashMap::default(); + + let anchors_with_status = { + let buffer = self.buffer(); + let disjoint_anchors = self + .disjoint + .iter() + .flat_map(|selection| [&selection.start, &selection.end]); + buffer.refresh_anchors(disjoint_anchors) + }; + let adjusted_disjoint: Vec<_> = anchors_with_status + .chunks(2) + .map(|selection_anchors| { + let (anchor_ix, start, kept_start) = selection_anchors[0]; + let (_, end, kept_end) = selection_anchors[1]; + let selection = &self.disjoint[anchor_ix / 2]; + let kept_head = if selection.reversed { + kept_start + } else { + kept_end + }; + if !kept_head { + selections_with_lost_position.insert(selection.id, selection.head().excerpt_id); + } + + Selection { + id: selection.id, + start, + end, + reversed: selection.reversed, + goal: selection.goal, + } + }) + .collect(); + + if !adjusted_disjoint.is_empty() { + let resolved_selections = + resolve_multiple(adjusted_disjoint.iter(), &self.buffer()).collect(); + self.select::(resolved_selections); + } + + if let Some(pending) = pending.as_mut() { + let buffer = self.buffer(); + let anchors = + buffer.refresh_anchors([&pending.selection.start, &pending.selection.end]); + let (_, start, kept_start) = anchors[0]; + let (_, end, kept_end) = anchors[1]; + let kept_head = if pending.selection.reversed { + kept_start + } else { + kept_end + }; + if !kept_head { + selections_with_lost_position + .insert(pending.selection.id, pending.selection.head().excerpt_id); + } + + pending.selection.start = start; + pending.selection.end = end; + } + self.collection.pending = pending; + self.selections_changed = true; + + selections_with_lost_position + } +} + +impl<'a> Deref for MutableSelectionsCollection<'a> { + type Target = SelectionsCollection; + fn deref(&self) -> &Self::Target { + self.collection + } +} + +impl<'a> DerefMut for MutableSelectionsCollection<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + self.collection + } +} + +// Panics if passed selections are not in order +pub(crate) fn resolve_multiple<'a, D, I>( + selections: I, + snapshot: &MultiBufferSnapshot, +) -> impl 'a + Iterator> +where + D: TextDimension + Ord + Sub + std::fmt::Debug, + I: 'a + IntoIterator>, +{ + let (to_summarize, selections) = selections.into_iter().tee(); + let mut summaries = snapshot + .summaries_for_anchors::( + to_summarize + .flat_map(|s| [&s.start, &s.end]) + .collect::>(), + ) + .into_iter(); + selections.map(move |s| Selection { + id: s.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: s.reversed, + goal: s.goal, + }) +} + +fn resolve>( + selection: &Selection, + buffer: &MultiBufferSnapshot, +) -> Selection { + selection.map(|p| p.summary::(buffer)) +} diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs new file mode 100644 index 0000000..c39f38e --- /dev/null +++ b/crates/editor/src/tasks.rs @@ -0,0 +1,132 @@ +use crate::Editor; + +use std::{path::Path, sync::Arc}; + +use anyhow::Context; +use gpui::WindowContext; +use language::{BasicContextProvider, ContextProvider}; +use project::{Location, WorktreeId}; +use task::{TaskContext, TaskVariables, VariableName}; +use util::ResultExt; +use workspace::Workspace; + +pub(crate) fn task_context_for_location( + workspace: &Workspace, + location: Location, + cx: &mut WindowContext<'_>, +) -> Option { + let cwd = workspace::tasks::task_cwd(workspace, cx) + .log_err() + .flatten(); + + let buffer = location.buffer.clone(); + let language_context_provider = buffer + .read(cx) + .language() + .and_then(|language| language.context_provider()) + .unwrap_or_else(|| Arc::new(BasicContextProvider)); + + let worktree_abs_path = buffer + .read(cx) + .file() + .map(|file| WorktreeId::from_usize(file.worktree_id())) + .and_then(|worktree_id| { + workspace + .project() + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }); + let task_variables = combine_task_variables( + worktree_abs_path.as_deref(), + location, + language_context_provider.as_ref(), + cx, + ) + .log_err()?; + Some(TaskContext { + cwd, + task_variables, + }) +} + +pub(crate) fn task_context_with_editor( + workspace: &Workspace, + editor: &mut Editor, + cx: &mut WindowContext<'_>, +) -> Option { + let (selection, buffer, editor_snapshot) = { + let selection = editor.selections.newest::(cx); + let (buffer, _, _) = editor + .buffer() + .read(cx) + .point_to_buffer_offset(selection.start, cx)?; + let snapshot = editor.snapshot(cx); + Some((selection, buffer, snapshot)) + }?; + let selection_range = selection.range(); + let start = editor_snapshot + .display_snapshot + .buffer_snapshot + .anchor_after(selection_range.start) + .text_anchor; + let end = editor_snapshot + .display_snapshot + .buffer_snapshot + .anchor_after(selection_range.end) + .text_anchor; + let location = Location { + buffer, + range: start..end, + }; + task_context_for_location(workspace, location.clone(), cx).map(|mut task_context| { + for range in location + .buffer + .read(cx) + .snapshot() + .runnable_ranges(location.range) + { + for (capture_name, value) in range.extra_captures { + task_context + .task_variables + .insert(VariableName::Custom(capture_name.into()), value); + } + } + task_context + }) +} + +pub fn task_context(workspace: &Workspace, cx: &mut WindowContext<'_>) -> TaskContext { + let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + else { + return Default::default(); + }; + editor.update(cx, |editor, cx| { + task_context_with_editor(workspace, editor, cx).unwrap_or_default() + }) +} + +fn combine_task_variables( + worktree_abs_path: Option<&Path>, + location: Location, + context_provider: &dyn ContextProvider, + cx: &mut WindowContext<'_>, +) -> anyhow::Result { + if context_provider.is_basic() { + context_provider + .build_context(worktree_abs_path, &location, cx) + .context("building basic provider context") + } else { + let mut basic_context = BasicContextProvider + .build_context(worktree_abs_path, &location, cx) + .context("building basic default context")?; + basic_context.extend( + context_provider + .build_context(worktree_abs_path, &location, cx) + .context("building provider context ")?, + ); + Ok(basic_context) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs new file mode 100644 index 0000000..3a82992 --- /dev/null +++ b/crates/editor/src/test.rs @@ -0,0 +1,190 @@ +pub mod editor_lsp_test_context; +pub mod editor_test_context; + +use crate::{ + display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint}, + DisplayPoint, Editor, EditorMode, MultiBuffer, +}; + +use gpui::{Context, Font, FontFeatures, FontStyle, FontWeight, Model, Pixels, ViewContext}; + +use project::Project; +use util::test::{marked_text_offsets, marked_text_ranges}; + +#[cfg(test)] +#[ctor::ctor] +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} + +// Returns a snapshot from text containing '|' character markers with the markers removed, and DisplayPoints for each one. +pub fn marked_display_snapshot( + text: &str, + cx: &mut gpui::AppContext, +) -> (DisplaySnapshot, Vec) { + let (unmarked_text, markers) = marked_text_offsets(text); + + let font = Font { + family: "Courier".into(), + features: FontFeatures::default(), + weight: FontWeight::default(), + style: FontStyle::default(), + }; + let font_size: Pixels = 14usize.into(); + + let buffer = MultiBuffer::build_simple(&unmarked_text, cx); + let display_map = cx.new_model(|cx| DisplayMap::new(buffer, font, font_size, None, 1, 1, cx)); + let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); + let markers = markers + .into_iter() + .map(|offset| offset.to_display_point(&snapshot)) + .collect(); + + (snapshot, markers) +} + +pub fn select_ranges(editor: &mut Editor, marked_text: &str, cx: &mut ViewContext) { + let (unmarked_text, text_ranges) = marked_text_ranges(marked_text, true); + assert_eq!(editor.text(cx), unmarked_text); + editor.change_selections(None, cx, |s| s.select_ranges(text_ranges)); +} + +pub fn assert_text_with_selections( + editor: &mut Editor, + marked_text: &str, + cx: &mut ViewContext, +) { + let (unmarked_text, text_ranges) = marked_text_ranges(marked_text, true); + assert_eq!(editor.text(cx), unmarked_text); + assert_eq!(editor.selections.ranges(cx), text_ranges); +} + +// RA thinks this is dead code even though it is used in a whole lot of tests +#[allow(dead_code)] +#[cfg(any(test, feature = "test-support"))] +pub(crate) fn build_editor(buffer: Model, cx: &mut ViewContext) -> Editor { + Editor::new(EditorMode::Full, buffer, None, cx) +} + +pub(crate) fn build_editor_with_project( + project: Model, + buffer: Model, + cx: &mut ViewContext, +) -> Editor { + Editor::new(EditorMode::Full, buffer, Some(project), cx) +} + +#[cfg(any(test, feature = "test-support"))] +pub fn editor_hunks( + editor: &Editor, + snapshot: &DisplaySnapshot, + cx: &mut ViewContext<'_, Editor>, +) -> Vec<( + String, + git::diff::DiffHunkStatus, + std::ops::Range, +)> { + use multi_buffer::MultiBufferRow; + use text::Point; + + use crate::hunk_status; + + snapshot + .buffer_snapshot + .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) + .map(|hunk| { + let display_range = Point::new(hunk.associated_range.start.0, 0) + .to_display_point(snapshot) + .row() + ..Point::new(hunk.associated_range.end.0, 0) + .to_display_point(snapshot) + .row(); + let (_, buffer, _) = editor + .buffer() + .read(cx) + .excerpt_containing(Point::new(hunk.associated_range.start.0, 0), cx) + .expect("no excerpt for expanded buffer's hunk start"); + let diff_base = buffer + .read(cx) + .diff_base() + .expect("should have a diff base for expanded hunk") + .slice(hunk.diff_base_byte_range.clone()) + .to_string(); + (diff_base, hunk_status(&hunk), display_range) + }) + .collect() +} + +#[cfg(any(test, feature = "test-support"))] +pub fn expanded_hunks( + editor: &Editor, + snapshot: &DisplaySnapshot, + cx: &mut ViewContext<'_, Editor>, +) -> Vec<( + String, + git::diff::DiffHunkStatus, + std::ops::Range, +)> { + editor + .expanded_hunks + .hunks(false) + .map(|expanded_hunk| { + let hunk_display_range = expanded_hunk + .hunk_range + .start + .to_display_point(snapshot) + .row() + ..expanded_hunk + .hunk_range + .end + .to_display_point(snapshot) + .row(); + let (_, buffer, _) = editor + .buffer() + .read(cx) + .excerpt_containing(expanded_hunk.hunk_range.start, cx) + .expect("no excerpt for expanded buffer's hunk start"); + let diff_base = buffer + .read(cx) + .diff_base() + .expect("should have a diff base for expanded hunk") + .slice(expanded_hunk.diff_base_byte_range.clone()) + .to_string(); + (diff_base, expanded_hunk.status, hunk_display_range) + }) + .collect() +} + +#[cfg(any(test, feature = "test-support"))] +pub fn expanded_hunks_background_highlights( + editor: &mut Editor, + cx: &mut gpui::WindowContext, +) -> Vec> { + use crate::DisplayRow; + + let mut highlights = Vec::new(); + + let mut range_start = 0; + let mut previous_highlighted_row = None; + for (highlighted_row, _) in editor.highlighted_display_rows(cx) { + match previous_highlighted_row { + Some(previous_row) => { + if previous_row + 1 != highlighted_row.0 { + highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row)); + range_start = highlighted_row.0; + } + } + None => { + range_start = highlighted_row.0; + } + } + previous_highlighted_row = Some(highlighted_row.0); + } + if let Some(previous_row) = previous_highlighted_row { + highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row)); + } + + highlights +} diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs new file mode 100644 index 0000000..848e47a --- /dev/null +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -0,0 +1,323 @@ +use std::{ + borrow::Cow, + ops::{Deref, DerefMut, Range}, + sync::Arc, +}; + +use anyhow::Result; +use serde_json::json; + +use crate::{Editor, ToPoint}; +use collections::HashSet; +use futures::Future; +use gpui::{View, ViewContext, VisualTestContext}; +use indoc::indoc; +use language::{ + point_to_lsp, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageQueries, +}; +use lsp::{notification, request}; +use multi_buffer::ToPointUtf16; +use project::Project; +use smol::stream::StreamExt; +use workspace::{AppState, Workspace, WorkspaceHandle}; + +use super::editor_test_context::{AssertionContextManager, EditorTestContext}; + +pub struct EditorLspTestContext { + pub cx: EditorTestContext, + pub lsp: lsp::FakeLanguageServer, + pub workspace: View, + pub buffer_lsp_url: lsp::Url, +} + +impl EditorLspTestContext { + pub async fn new( + language: Language, + capabilities: lsp::ServerCapabilities, + cx: &mut gpui::TestAppContext, + ) -> EditorLspTestContext { + let app_state = cx.update(AppState::test); + + cx.update(|cx| { + language::init(cx); + crate::init(cx); + workspace::init(app_state.clone(), cx); + Project::init_settings(cx); + }); + + let file_name = format!( + "file.{}", + language + .path_suffixes() + .first() + .expect("language must have a path suffix for EditorLspTestContext") + ); + + let project = Project::test(app_state.fs.clone(), [], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let mut fake_servers = language_registry.register_fake_lsp_adapter( + language.name().as_ref(), + FakeLspAdapter { + capabilities, + ..Default::default() + }, + ); + language_registry.add(Arc::new(language)); + + app_state + .fs + .as_fake() + .insert_tree("/root", json!({ "dir": { file_name.clone(): "" }})) + .await; + + let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + + let workspace = window.root_view(cx).unwrap(); + + let mut cx = VisualTestContext::from_window(*window.deref(), cx); + project + .update(&mut cx, |project, cx| { + project.find_or_create_local_worktree("/root", true, cx) + }) + .await + .unwrap(); + cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx)) + .await; + let file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone()); + let item = workspace + .update(&mut cx, |workspace, cx| { + workspace.open_path(file, None, true, cx) + }) + .await + .expect("Could not open test file"); + let editor = cx.update(|cx| { + item.act_as::(cx) + .expect("Opened test file wasn't an editor") + }); + editor.update(&mut cx, |editor, cx| editor.focus(cx)); + + let lsp = fake_servers.next().await.unwrap(); + Self { + cx: EditorTestContext { + cx, + window: window.into(), + editor, + assertion_cx: AssertionContextManager::new(), + }, + lsp, + workspace, + buffer_lsp_url: lsp::Url::from_file_path(format!("/root/dir/{file_name}")).unwrap(), + } + } + + pub async fn new_rust( + capabilities: lsp::ServerCapabilities, + cx: &mut gpui::TestAppContext, + ) -> EditorLspTestContext { + let language = Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ) + .with_queries(LanguageQueries { + indents: Some(Cow::from(indoc! {r#" + [ + ((where_clause) _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) + (let_chain) + (await_expression) + ] @indent + + (_ "[" "]" @end) @indent + (_ "<" ">" @end) @indent + (_ "{" "}" @end) @indent + (_ "(" ")" @end) @indent"#})), + brackets: Some(Cow::from(indoc! {r#" + ("(" @open ")" @close) + ("[" @open "]" @close) + ("{" @open "}" @close) + ("<" @open ">" @close) + ("\"" @open "\"" @close) + (closure_parameters "|" @open "|" @close)"#})), + ..Default::default() + }) + .expect("Could not parse queries"); + + Self::new(language, capabilities, cx).await + } + + pub async fn new_typescript( + capabilities: lsp::ServerCapabilities, + cx: &mut gpui::TestAppContext, + ) -> EditorLspTestContext { + let mut word_characters: HashSet = Default::default(); + word_characters.insert('$'); + word_characters.insert('#'); + let language = Language::new( + LanguageConfig { + name: "Typescript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, + brackets: language::BracketPairConfig { + pairs: vec![language::BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }], + disabled_scopes_by_bracket_ix: Default::default(), + }, + word_characters, + ..Default::default() + }, + Some(tree_sitter_typescript::language_typescript()), + ) + .with_queries(LanguageQueries { + brackets: Some(Cow::from(indoc! {r#" + ("(" @open ")" @close) + ("[" @open "]" @close) + ("{" @open "}" @close) + ("<" @open ">" @close) + ("\"" @open "\"" @close)"#})), + indents: Some(Cow::from(indoc! {r#" + [ + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) + ] @indent + + (_ "[" "]" @end) @indent + (_ "<" ">" @end) @indent + (_ "{" "}" @end) @indent + (_ "(" ")" @end) @indent + "#})), + ..Default::default() + }) + .expect("Could not parse queries"); + + Self::new(language, capabilities, cx).await + } + + pub async fn new_html(cx: &mut gpui::TestAppContext) -> Self { + let language = Language::new( + LanguageConfig { + name: "HTML".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["html".into()], + ..Default::default() + }, + block_comment: Some(("".into())), + ..Default::default() + }, + Some(tree_sitter_html::language()), + ); + Self::new(language, Default::default(), cx).await + } + + // Constructs lsp range using a marked string with '[', ']' range delimiters + pub fn lsp_range(&mut self, marked_text: &str) -> lsp::Range { + let ranges = self.ranges(marked_text); + self.to_lsp_range(ranges[0].clone()) + } + + pub fn to_lsp_range(&mut self, range: Range) -> lsp::Range { + let snapshot = self.update_editor(|editor, cx| editor.snapshot(cx)); + let start_point = range.start.to_point(&snapshot.buffer_snapshot); + let end_point = range.end.to_point(&snapshot.buffer_snapshot); + + self.editor(|editor, cx| { + let buffer = editor.buffer().read(cx); + let start = point_to_lsp( + buffer + .point_to_buffer_offset(start_point, cx) + .unwrap() + .1 + .to_point_utf16(&buffer.read(cx)), + ); + let end = point_to_lsp( + buffer + .point_to_buffer_offset(end_point, cx) + .unwrap() + .1 + .to_point_utf16(&buffer.read(cx)), + ); + + lsp::Range { start, end } + }) + } + + pub fn to_lsp(&mut self, offset: usize) -> lsp::Position { + let snapshot = self.update_editor(|editor, cx| editor.snapshot(cx)); + let point = offset.to_point(&snapshot.buffer_snapshot); + + self.editor(|editor, cx| { + let buffer = editor.buffer().read(cx); + point_to_lsp( + buffer + .point_to_buffer_offset(point, cx) + .unwrap() + .1 + .to_point_utf16(&buffer.read(cx)), + ) + }) + } + + pub fn update_workspace(&mut self, update: F) -> T + where + F: FnOnce(&mut Workspace, &mut ViewContext) -> T, + { + self.workspace.update(&mut self.cx.cx, update) + } + + pub fn handle_request( + &self, + mut handler: F, + ) -> futures::channel::mpsc::UnboundedReceiver<()> + where + T: 'static + request::Request, + T::Params: 'static + Send, + F: 'static + Send + FnMut(lsp::Url, T::Params, gpui::AsyncAppContext) -> Fut, + Fut: 'static + Send + Future>, + { + let url = self.buffer_lsp_url.clone(); + self.lsp.handle_request::(move |params, cx| { + let url = url.clone(); + handler(url, params, cx) + }) + } + + pub fn notify(&self, params: T::Params) { + self.lsp.notify::(params); + } +} + +impl Deref for EditorLspTestContext { + type Target = EditorTestContext; + + fn deref(&self) -> &Self::Target { + &self.cx + } +} + +impl DerefMut for EditorLspTestContext { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.cx + } +} diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs new file mode 100644 index 0000000..c54892f --- /dev/null +++ b/crates/editor/src/test/editor_test_context.rs @@ -0,0 +1,459 @@ +use crate::{ + display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer, + RowExt, +}; +use collections::BTreeMap; +use futures::Future; +use gpui::{ + AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext, + VisualTestContext, +}; +use indoc::indoc; +use itertools::Itertools; +use language::{Buffer, BufferSnapshot, LanguageRegistry}; +use multi_buffer::ExcerptRange; +use parking_lot::RwLock; +use project::{FakeFs, Project}; +use std::{ + any::TypeId, + ops::{Deref, DerefMut, Range}, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, +}; +use ui::Context; +use util::{ + assert_set_eq, + test::{generate_marked_text, marked_text_ranges}, +}; + +use super::{build_editor, build_editor_with_project}; + +pub struct EditorTestContext { + pub cx: gpui::VisualTestContext, + pub window: AnyWindowHandle, + pub editor: View, + pub assertion_cx: AssertionContextManager, +} + +impl EditorTestContext { + pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext { + let fs = FakeFs::new(cx.executor()); + // fs.insert_file("/file", "".to_owned()).await; + fs.insert_tree( + "/root", + serde_json::json!({ + "file": "", + }), + ) + .await; + let project = Project::test(fs, ["/root".as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/root/file", cx) + }) + .await + .unwrap(); + let editor = cx.add_window(|cx| { + let editor = + build_editor_with_project(project, MultiBuffer::build_from_buffer(buffer, cx), cx); + editor.focus(cx); + editor + }); + let editor_view = editor.root_view(cx).unwrap(); + Self { + cx: VisualTestContext::from_window(*editor.deref(), cx), + window: editor.into(), + editor: editor_view, + assertion_cx: AssertionContextManager::new(), + } + } + + pub fn new_multibuffer( + cx: &mut gpui::TestAppContext, + excerpts: [&str; COUNT], + ) -> EditorTestContext { + let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let buffer = cx.new_model(|cx| { + for excerpt in excerpts.into_iter() { + let (text, ranges) = marked_text_ranges(excerpt, false); + let buffer = cx.new_model(|cx| Buffer::local(text, cx)); + multibuffer.push_excerpts( + buffer, + ranges.into_iter().map(|range| ExcerptRange { + context: range, + primary: None, + }), + cx, + ); + } + multibuffer + }); + + let editor = cx.add_window(|cx| { + let editor = build_editor(buffer, cx); + editor.focus(cx); + editor + }); + + let editor_view = editor.root_view(cx).unwrap(); + Self { + cx: VisualTestContext::from_window(*editor.deref(), cx), + window: editor.into(), + editor: editor_view, + assertion_cx: AssertionContextManager::new(), + } + } + + pub fn condition( + &self, + predicate: impl FnMut(&Editor, &AppContext) -> bool, + ) -> impl Future { + self.editor + .condition::(&self.cx, predicate) + } + + #[track_caller] + pub fn editor(&mut self, read: F) -> T + where + F: FnOnce(&Editor, &ViewContext) -> T, + { + self.editor + .update(&mut self.cx, |this, cx| read(&this, &cx)) + } + + #[track_caller] + pub fn update_editor(&mut self, update: F) -> T + where + F: FnOnce(&mut Editor, &mut ViewContext) -> T, + { + self.editor.update(&mut self.cx, update) + } + + pub fn multibuffer(&mut self, read: F) -> T + where + F: FnOnce(&MultiBuffer, &AppContext) -> T, + { + self.editor(|editor, cx| read(editor.buffer().read(cx), cx)) + } + + pub fn update_multibuffer(&mut self, update: F) -> T + where + F: FnOnce(&mut MultiBuffer, &mut ModelContext) -> T, + { + self.update_editor(|editor, cx| editor.buffer().update(cx, update)) + } + + pub fn buffer_text(&mut self) -> String { + self.multibuffer(|buffer, cx| buffer.snapshot(cx).text()) + } + + pub fn buffer(&mut self, read: F) -> T + where + F: FnOnce(&Buffer, &AppContext) -> T, + { + self.multibuffer(|multibuffer, cx| { + let buffer = multibuffer.as_singleton().unwrap().read(cx); + read(buffer, cx) + }) + } + + pub fn language_registry(&mut self) -> Arc { + self.editor(|editor, cx| { + editor + .project + .as_ref() + .unwrap() + .read(cx) + .languages() + .clone() + }) + } + + pub fn update_buffer(&mut self, update: F) -> T + where + F: FnOnce(&mut Buffer, &mut ModelContext) -> T, + { + self.update_multibuffer(|multibuffer, cx| { + let buffer = multibuffer.as_singleton().unwrap(); + buffer.update(cx, update) + }) + } + + pub fn buffer_snapshot(&mut self) -> BufferSnapshot { + self.buffer(|buffer, _| buffer.snapshot()) + } + + pub fn add_assertion_context(&self, context: String) -> ContextHandle { + self.assertion_cx.add_context(context) + } + + pub fn assertion_context(&self) -> String { + self.assertion_cx.context() + } + + // unlike cx.simulate_keystrokes(), this does not run_until_parked + // so you can use it to test detailed timing + pub fn simulate_keystroke(&mut self, keystroke_text: &str) { + let keystroke = Keystroke::parse(keystroke_text).unwrap(); + self.cx.dispatch_keystroke(self.window, keystroke); + } + + pub fn run_until_parked(&mut self) { + self.cx.background_executor.run_until_parked(); + } + + pub fn ranges(&mut self, marked_text: &str) -> Vec> { + let (unmarked_text, ranges) = marked_text_ranges(marked_text, false); + assert_eq!(self.buffer_text(), unmarked_text); + ranges + } + + pub fn display_point(&mut self, marked_text: &str) -> DisplayPoint { + let ranges = self.ranges(marked_text); + let snapshot = self + .editor + .update(&mut self.cx, |editor, cx| editor.snapshot(cx)); + ranges[0].start.to_display_point(&snapshot) + } + + pub fn pixel_position(&mut self, marked_text: &str) -> Point { + let display_point = self.display_point(marked_text); + self.pixel_position_for(display_point) + } + + pub fn pixel_position_for(&mut self, display_point: DisplayPoint) -> Point { + self.update_editor(|editor, cx| { + let newest_point = editor.selections.newest_display(cx).head(); + let pixel_position = editor.pixel_position_of_newest_cursor.unwrap(); + let line_height = editor + .style() + .unwrap() + .text + .line_height_in_pixels(cx.rem_size()); + let snapshot = editor.snapshot(cx); + let details = editor.text_layout_details(cx); + + let y = pixel_position.y + + line_height * (display_point.row().as_f32() - newest_point.row().as_f32()); + let x = pixel_position.x + snapshot.x_for_display_point(display_point, &details) + - snapshot.x_for_display_point(newest_point, &details); + Point::new(x, y) + }) + } + + // Returns anchors for the current buffer using `«` and `»` + pub fn text_anchor_range(&mut self, marked_text: &str) -> Range { + let ranges = self.ranges(marked_text); + let snapshot = self.buffer_snapshot(); + snapshot.anchor_before(ranges[0].start)..snapshot.anchor_after(ranges[0].end) + } + + pub fn set_diff_base(&mut self, diff_base: Option<&str>) { + self.update_buffer(|buffer, cx| buffer.set_diff_base(diff_base.map(ToOwned::to_owned), cx)); + } + + /// Change the editor's text and selections using a string containing + /// embedded range markers that represent the ranges and directions of + /// each selection. + /// + /// Returns a context handle so that assertion failures can print what + /// editor state was needed to cause the failure. + /// + /// See the `util::test::marked_text_ranges` function for more information. + pub fn set_state(&mut self, marked_text: &str) -> ContextHandle { + let state_context = self.add_assertion_context(format!( + "Initial Editor State: \"{}\"", + marked_text.escape_debug() + )); + let (unmarked_text, selection_ranges) = marked_text_ranges(marked_text, true); + self.editor.update(&mut self.cx, |editor, cx| { + editor.set_text(unmarked_text, cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges(selection_ranges) + }) + }); + state_context + } + + /// Only change the editor's selections + pub fn set_selections_state(&mut self, marked_text: &str) -> ContextHandle { + let state_context = self.add_assertion_context(format!( + "Initial Editor State: \"{}\"", + marked_text.escape_debug() + )); + let (unmarked_text, selection_ranges) = marked_text_ranges(marked_text, true); + self.editor.update(&mut self.cx, |editor, cx| { + assert_eq!(editor.text(cx), unmarked_text); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select_ranges(selection_ranges) + }) + }); + state_context + } + + /// Make an assertion about the editor's text and the ranges and directions + /// of its selections using a string containing embedded range markers. + /// + /// See the `util::test::marked_text_ranges` function for more information. + #[track_caller] + pub fn assert_editor_state(&mut self, marked_text: &str) { + let (unmarked_text, expected_selections) = marked_text_ranges(marked_text, true); + let buffer_text = self.buffer_text(); + + if buffer_text != unmarked_text { + panic!("Unmarked text doesn't match buffer text\nBuffer text: {buffer_text:?}\nUnmarked text: {unmarked_text:?}\nRaw buffer text\n{buffer_text}\nRaw unmarked text\n{unmarked_text}"); + } + + self.assert_selections(expected_selections, marked_text.to_string()) + } + + pub fn editor_state(&mut self) -> String { + generate_marked_text(self.buffer_text().as_str(), &self.editor_selections(), true) + } + + #[track_caller] + pub fn assert_editor_background_highlights(&mut self, marked_text: &str) { + let expected_ranges = self.ranges(marked_text); + let actual_ranges: Vec> = self.update_editor(|editor, cx| { + let snapshot = editor.snapshot(cx); + editor + .background_highlights + .get(&TypeId::of::()) + .map(|h| h.1.clone()) + .unwrap_or_else(|| Arc::from([])) + .into_iter() + .map(|range| range.to_offset(&snapshot.buffer_snapshot)) + .collect() + }); + assert_set_eq!(actual_ranges, expected_ranges); + } + + #[track_caller] + pub fn assert_editor_text_highlights(&mut self, marked_text: &str) { + let expected_ranges = self.ranges(marked_text); + let snapshot = self.update_editor(|editor, cx| editor.snapshot(cx)); + let actual_ranges: Vec> = snapshot + .text_highlight_ranges::() + .map(|ranges| ranges.as_ref().clone().1) + .unwrap_or_default() + .into_iter() + .map(|range| range.to_offset(&snapshot.buffer_snapshot)) + .collect(); + assert_set_eq!(actual_ranges, expected_ranges); + } + + #[track_caller] + pub fn assert_editor_selections(&mut self, expected_selections: Vec>) { + let expected_marked_text = + generate_marked_text(&self.buffer_text(), &expected_selections, true); + self.assert_selections(expected_selections, expected_marked_text) + } + + #[track_caller] + fn editor_selections(&mut self) -> Vec> { + self.editor + .update(&mut self.cx, |editor, cx| { + editor.selections.all::(cx) + }) + .into_iter() + .map(|s| { + if s.reversed { + s.end..s.start + } else { + s.start..s.end + } + }) + .collect::>() + } + + #[track_caller] + fn assert_selections( + &mut self, + expected_selections: Vec>, + expected_marked_text: String, + ) { + let actual_selections = self.editor_selections(); + let actual_marked_text = + generate_marked_text(&self.buffer_text(), &actual_selections, true); + if expected_selections != actual_selections { + panic!( + indoc! {" + + {}Editor has unexpected selections. + + Expected selections: + {} + + Actual selections: + {} + "}, + self.assertion_context(), + expected_marked_text, + actual_marked_text, + ); + } + } +} + +impl Deref for EditorTestContext { + type Target = gpui::VisualTestContext; + + fn deref(&self) -> &Self::Target { + &self.cx + } +} + +impl DerefMut for EditorTestContext { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.cx + } +} + +/// Tracks string context to be printed when assertions fail. +/// Often this is done by storing a context string in the manager and returning the handle. +#[derive(Clone)] +pub struct AssertionContextManager { + id: Arc, + contexts: Arc>>, +} + +impl AssertionContextManager { + pub fn new() -> Self { + Self { + id: Arc::new(AtomicUsize::new(0)), + contexts: Arc::new(RwLock::new(BTreeMap::new())), + } + } + + pub fn add_context(&self, context: String) -> ContextHandle { + let id = self.id.fetch_add(1, Ordering::Relaxed); + let mut contexts = self.contexts.write(); + contexts.insert(id, context); + ContextHandle { + id, + manager: self.clone(), + } + } + + pub fn context(&self) -> String { + let contexts = self.contexts.read(); + format!("\n{}\n", contexts.values().join("\n")) + } +} + +/// Used to track the lifetime of a piece of context so that it can be provided when an assertion fails. +/// For example, in the EditorTestContext, `set_state` returns a context handle so that if an assertion fails, +/// the state that was set initially for the failure can be printed in the error message +pub struct ContextHandle { + id: usize, + manager: AssertionContextManager, +} + +impl Drop for ContextHandle { + fn drop(&mut self) { + let mut contexts = self.manager.contexts.write(); + contexts.remove(&self.id); + } +} diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml new file mode 100644 index 0000000..96c9459 --- /dev/null +++ b/crates/extension/Cargo.toml @@ -0,0 +1,58 @@ +[package] +name = "extension" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/extension_store.rs" +doctest = false + +[dependencies] +anyhow.workspace = true +async-compression.workspace = true +async-tar.workspace = true +async-trait.workspace = true +cap-std.workspace = true +client.workspace = true +collections.workspace = true +fs.workspace = true +futures.workspace = true +gpui.workspace = true +http.workspace = true +isahc.workspace = true +language.workspace = true +log.workspace = true +lsp.workspace = true +node_runtime.workspace = true +project.workspace = true +schemars.workspace = true +semantic_version.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +theme.workspace = true +toml.workspace = true +url.workspace = true +util.workspace = true +wasm-encoder.workspace = true +wasmtime.workspace = true +wasmtime-wasi.workspace = true +wasmparser.workspace = true +wit-component.workspace = true +task.workspace = true +serde_json_lenient.workspace = true + +[dev-dependencies] +ctor.workspace = true +env_logger.workspace = true +parking_lot.workspace = true + +fs = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } diff --git a/crates/extension/LICENSE-GPL b/crates/extension/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/extension/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs new file mode 100644 index 0000000..3c53579 --- /dev/null +++ b/crates/extension/src/extension_builder.rs @@ -0,0 +1,569 @@ +use crate::wasm_host::parse_wasm_extension_version; +use crate::ExtensionManifest; +use crate::{extension_manifest::ExtensionLibraryKind, GrammarManifestEntry}; +use anyhow::{anyhow, bail, Context as _, Result}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use futures::io::BufReader; +use futures::AsyncReadExt; +use http::{self, AsyncBody, HttpClient}; +use serde::Deserialize; +use std::{ + env, fs, mem, + path::{Path, PathBuf}, + process::{Command, Stdio}, + sync::Arc, +}; +use wasm_encoder::{ComponentSectionId, Encode as _, RawSection, Section as _}; +use wasmparser::Parser; +use wit_component::ComponentEncoder; + +/// Currently, we compile with Rust's `wasm32-wasi` target, which works with WASI `preview1`. +/// But the WASM component model is based on WASI `preview2`. So we need an 'adapter' WASM +/// module, which implements the `preview1` interface in terms of `preview2`. +/// +/// Once Rust 1.78 is released, there will be a `wasm32-wasip2` target available, so we will +/// not need the adapter anymore. +const RUST_TARGET: &str = "wasm32-wasi"; +const WASI_ADAPTER_URL: &str = + "https://github.com/bytecodealliance/wasmtime/releases/download/v18.0.2/wasi_snapshot_preview1.reactor.wasm"; + +/// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc +/// and clang's runtime library. The `wasi-sdk` provides these binaries. +/// +/// Once Clang 17 and its wasm target are available via system package managers, we won't need +/// to download this. +const WASI_SDK_URL: &str = "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-21/"; +const WASI_SDK_ASSET_NAME: Option<&str> = if cfg!(target_os = "macos") { + Some("wasi-sdk-21.0-macos.tar.gz") +} else if cfg!(target_os = "linux") { + Some("wasi-sdk-21.0-linux.tar.gz") +} else if cfg!(target_os = "windows") { + Some("wasi-sdk-21.0.m-mingw.tar.gz") +} else { + None +}; + +pub struct ExtensionBuilder { + cache_dir: PathBuf, + pub http: Arc, +} + +pub struct CompileExtensionOptions { + pub release: bool, +} + +#[derive(Deserialize)] +struct CargoToml { + package: CargoTomlPackage, +} + +#[derive(Deserialize)] +struct CargoTomlPackage { + name: String, +} + +impl ExtensionBuilder { + pub fn new(cache_dir: PathBuf) -> Self { + Self { + cache_dir, + http: http::client(None), + } + } + + pub async fn compile_extension( + &self, + extension_dir: &Path, + extension_manifest: &mut ExtensionManifest, + options: CompileExtensionOptions, + ) -> Result<()> { + populate_defaults(extension_manifest, &extension_dir)?; + + if extension_dir.is_relative() { + bail!( + "extension dir {} is not an absolute path", + extension_dir.display() + ); + } + + fs::create_dir_all(&self.cache_dir).context("failed to create cache dir")?; + + if extension_manifest.lib.kind == Some(ExtensionLibraryKind::Rust) { + log::info!("compiling Rust extension {}", extension_dir.display()); + self.compile_rust_extension(extension_dir, extension_manifest, options) + .await + .context("failed to compile Rust extension")?; + } + + for (grammar_name, grammar_metadata) in &extension_manifest.grammars { + self.compile_grammar(extension_dir, grammar_name.as_ref(), grammar_metadata) + .await + .with_context(|| format!("failed to compile grammar '{grammar_name}'"))?; + } + + log::info!("finished compiling extension {}", extension_dir.display()); + Ok(()) + } + + async fn compile_rust_extension( + &self, + extension_dir: &Path, + manifest: &mut ExtensionManifest, + options: CompileExtensionOptions, + ) -> Result<(), anyhow::Error> { + self.install_rust_wasm_target_if_needed()?; + let adapter_bytes = self.install_wasi_preview1_adapter_if_needed().await?; + + let cargo_toml_content = fs::read_to_string(&extension_dir.join("Cargo.toml"))?; + let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?; + + log::info!("compiling rust extension {}", extension_dir.display()); + let output = Command::new("cargo") + .args(["build", "--target", RUST_TARGET]) + .args(options.release.then_some("--release")) + .arg("--target-dir") + .arg(extension_dir.join("target")) + .current_dir(&extension_dir) + .output() + .context("failed to run `cargo`")?; + if !output.status.success() { + bail!( + "failed to build extension {}", + String::from_utf8_lossy(&output.stderr) + ); + } + + let mut wasm_path = PathBuf::from(extension_dir); + wasm_path.extend([ + "target", + RUST_TARGET, + if options.release { "release" } else { "debug" }, + &cargo_toml + .package + .name + // The wasm32-wasi target normalizes `-` in package names to `_` in the resulting `.wasm` file. + .replace('-', "_"), + ]); + wasm_path.set_extension("wasm"); + + let wasm_bytes = fs::read(&wasm_path) + .with_context(|| format!("failed to read output module `{}`", wasm_path.display()))?; + + let encoder = ComponentEncoder::default() + .module(&wasm_bytes)? + .adapter("wasi_snapshot_preview1", &adapter_bytes) + .context("failed to load adapter module")? + .validate(true); + + let component_bytes = encoder + .encode() + .context("failed to encode wasm component")?; + + let component_bytes = self + .strip_custom_sections(&component_bytes) + .context("failed to strip debug sections from wasm component")?; + + let wasm_extension_api_version = + parse_wasm_extension_version(&manifest.id, &component_bytes) + .context("compiled wasm did not contain a valid zed extension api version")?; + manifest.lib.version = Some(wasm_extension_api_version); + + fs::write(extension_dir.join("extension.wasm"), &component_bytes) + .context("failed to write extension.wasm")?; + + Ok(()) + } + + async fn compile_grammar( + &self, + extension_dir: &Path, + grammar_name: &str, + grammar_metadata: &GrammarManifestEntry, + ) -> Result<()> { + let clang_path = self.install_wasi_sdk_if_needed().await?; + + let mut grammar_repo_dir = extension_dir.to_path_buf(); + grammar_repo_dir.extend(["grammars", grammar_name]); + + let mut grammar_wasm_path = grammar_repo_dir.clone(); + grammar_wasm_path.set_extension("wasm"); + + log::info!("checking out {grammar_name} parser"); + self.checkout_repo( + &grammar_repo_dir, + &grammar_metadata.repository, + &grammar_metadata.rev, + )?; + + let base_grammar_path = grammar_metadata + .path + .as_ref() + .map(|path| grammar_repo_dir.join(path)) + .unwrap_or(grammar_repo_dir); + + let src_path = base_grammar_path.join("src"); + let parser_path = src_path.join("parser.c"); + let scanner_path = src_path.join("scanner.c"); + + log::info!("compiling {grammar_name} parser"); + let clang_output = Command::new(&clang_path) + .args(["-fPIC", "-shared", "-Os"]) + .arg(format!("-Wl,--export=tree_sitter_{grammar_name}")) + .arg("-o") + .arg(&grammar_wasm_path) + .arg("-I") + .arg(&src_path) + .arg(&parser_path) + .args(scanner_path.exists().then_some(scanner_path)) + .output() + .context("failed to run clang")?; + if !clang_output.status.success() { + bail!( + "failed to compile {} parser with clang: {}", + grammar_name, + String::from_utf8_lossy(&clang_output.stderr), + ); + } + + Ok(()) + } + + fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> { + let git_dir = directory.join(".git"); + + if directory.exists() { + let remotes_output = Command::new("git") + .arg("--git-dir") + .arg(&git_dir) + .args(["remote", "-v"]) + .output()?; + let has_remote = remotes_output.status.success() + && String::from_utf8_lossy(&remotes_output.stdout) + .lines() + .any(|line| { + let mut parts = line.split(|c: char| c.is_whitespace()); + parts.next() == Some("origin") && parts.any(|part| part == url) + }); + if !has_remote { + bail!( + "grammar directory '{}' already exists, but is not a git clone of '{}'", + directory.display(), + url + ); + } + } else { + fs::create_dir_all(&directory).with_context(|| { + format!("failed to create grammar directory {}", directory.display(),) + })?; + let init_output = Command::new("git") + .arg("init") + .current_dir(&directory) + .output()?; + if !init_output.status.success() { + bail!( + "failed to run `git init` in directory '{}'", + directory.display() + ); + } + + let remote_add_output = Command::new("git") + .arg("--git-dir") + .arg(&git_dir) + .args(["remote", "add", "origin", url]) + .output() + .context("failed to execute `git remote add`")?; + if !remote_add_output.status.success() { + bail!( + "failed to add remote {url} for git repository {}", + git_dir.display() + ); + } + } + + let fetch_output = Command::new("git") + .arg("--git-dir") + .arg(&git_dir) + .args(["fetch", "--depth", "1", "origin", &rev]) + .output() + .context("failed to execute `git fetch`")?; + + let checkout_output = Command::new("git") + .arg("--git-dir") + .arg(&git_dir) + .args(["checkout", &rev]) + .current_dir(&directory) + .output() + .context("failed to execute `git checkout`")?; + if !checkout_output.status.success() { + if !fetch_output.status.success() { + bail!( + "failed to fetch revision {} in directory '{}'", + rev, + directory.display() + ); + } + bail!( + "failed to checkout revision {} in directory '{}': {}", + rev, + directory.display(), + String::from_utf8_lossy(&checkout_output.stderr) + ); + } + + Ok(()) + } + + fn install_rust_wasm_target_if_needed(&self) -> Result<()> { + let rustc_output = Command::new("rustc") + .arg("--print") + .arg("sysroot") + .output() + .context("failed to run rustc")?; + if !rustc_output.status.success() { + bail!( + "failed to retrieve rust sysroot: {}", + String::from_utf8_lossy(&rustc_output.stderr) + ); + } + + let sysroot = PathBuf::from(String::from_utf8(rustc_output.stdout)?.trim()); + if sysroot.join("lib/rustlib").join(RUST_TARGET).exists() { + return Ok(()); + } + + let output = Command::new("rustup") + .args(["target", "add", RUST_TARGET]) + .stderr(Stdio::inherit()) + .stdout(Stdio::inherit()) + .output() + .context("failed to run `rustup target add`")?; + if !output.status.success() { + bail!("failed to install the `{RUST_TARGET}` target"); + } + + Ok(()) + } + + async fn install_wasi_preview1_adapter_if_needed(&self) -> Result> { + let cache_path = self.cache_dir.join("wasi_snapshot_preview1.reactor.wasm"); + if let Ok(content) = fs::read(&cache_path) { + if Parser::is_core_wasm(&content) { + return Ok(content); + } + } + + fs::remove_file(&cache_path).ok(); + + log::info!( + "downloading wasi adapter module to {}", + cache_path.display() + ); + let mut response = self + .http + .get(WASI_ADAPTER_URL, AsyncBody::default(), true) + .await?; + + let mut content = Vec::new(); + let mut body = BufReader::new(response.body_mut()); + body.read_to_end(&mut content).await?; + + fs::write(&cache_path, &content) + .with_context(|| format!("failed to save file {}", cache_path.display()))?; + + if !Parser::is_core_wasm(&content) { + bail!("downloaded wasi adapter is invalid"); + } + Ok(content) + } + + async fn install_wasi_sdk_if_needed(&self) -> Result { + let url = if let Some(asset_name) = WASI_SDK_ASSET_NAME { + format!("{WASI_SDK_URL}/{asset_name}") + } else { + bail!("wasi-sdk is not available for platform {}", env::consts::OS); + }; + + let wasi_sdk_dir = self.cache_dir.join("wasi-sdk"); + let mut clang_path = wasi_sdk_dir.clone(); + clang_path.extend(["bin", &format!("clang{}", env::consts::EXE_SUFFIX)]); + + if fs::metadata(&clang_path).map_or(false, |metadata| metadata.is_file()) { + return Ok(clang_path); + } + + let mut tar_out_dir = wasi_sdk_dir.clone(); + tar_out_dir.set_extension("archive"); + + fs::remove_dir_all(&wasi_sdk_dir).ok(); + fs::remove_dir_all(&tar_out_dir).ok(); + + log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display()); + let mut response = self.http.get(&url, AsyncBody::default(), true).await?; + let body = BufReader::new(response.body_mut()); + let body = GzipDecoder::new(body); + let tar = Archive::new(body); + tar.unpack(&tar_out_dir) + .await + .context("failed to unpack wasi-sdk archive")?; + + let inner_dir = fs::read_dir(&tar_out_dir)? + .next() + .ok_or_else(|| anyhow!("no content"))? + .context("failed to read contents of extracted wasi archive directory")? + .path(); + fs::rename(&inner_dir, &wasi_sdk_dir).context("failed to move extracted wasi dir")?; + fs::remove_dir_all(&tar_out_dir).ok(); + + Ok(clang_path) + } + + // This was adapted from: + // https://github.com/bytecodealliance/wasm-tools/1791a8f139722e9f8679a2bd3d8e423e55132b22/src/bin/wasm-tools/strip.rs + fn strip_custom_sections(&self, input: &Vec) -> Result> { + use wasmparser::Payload::*; + + let strip_custom_section = |name: &str| name.starts_with(".debug"); + + let mut output = Vec::new(); + let mut stack = Vec::new(); + + for payload in Parser::new(0).parse_all(input) { + let payload = payload?; + + // Track nesting depth, so that we don't mess with inner producer sections: + match payload { + Version { encoding, .. } => { + output.extend_from_slice(match encoding { + wasmparser::Encoding::Component => &wasm_encoder::Component::HEADER, + wasmparser::Encoding::Module => &wasm_encoder::Module::HEADER, + }); + } + ModuleSection { .. } | ComponentSection { .. } => { + stack.push(mem::take(&mut output)); + continue; + } + End { .. } => { + let mut parent = match stack.pop() { + Some(c) => c, + None => break, + }; + if output.starts_with(&wasm_encoder::Component::HEADER) { + parent.push(ComponentSectionId::Component as u8); + output.encode(&mut parent); + } else { + parent.push(ComponentSectionId::CoreModule as u8); + output.encode(&mut parent); + } + output = parent; + } + _ => {} + } + + match &payload { + CustomSection(c) => { + if strip_custom_section(c.name()) { + continue; + } + } + + _ => {} + } + + if let Some((id, range)) = payload.as_section() { + RawSection { + id, + data: &input[range], + } + .append_to(&mut output); + } + } + + Ok(output) + } +} + +fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Result<()> { + // For legacy extensions on the v0 schema (aka, using `extension.json`), clear out any existing + // contents of the computed fields, since we don't care what the existing values are. + if manifest.schema_version.is_v0() { + manifest.languages.clear(); + manifest.grammars.clear(); + manifest.themes.clear(); + } + + let cargo_toml_path = extension_path.join("Cargo.toml"); + if cargo_toml_path.exists() { + manifest.lib.kind = Some(ExtensionLibraryKind::Rust); + } + + let languages_dir = extension_path.join("languages"); + if languages_dir.exists() { + for entry in fs::read_dir(&languages_dir).context("failed to list languages dir")? { + let entry = entry?; + let language_dir = entry.path(); + let config_path = language_dir.join("config.toml"); + if config_path.exists() { + let relative_language_dir = + language_dir.strip_prefix(extension_path)?.to_path_buf(); + if !manifest.languages.contains(&relative_language_dir) { + manifest.languages.push(relative_language_dir); + } + } + } + } + + let themes_dir = extension_path.join("themes"); + if themes_dir.exists() { + for entry in fs::read_dir(&themes_dir).context("failed to list themes dir")? { + let entry = entry?; + let theme_path = entry.path(); + if theme_path.extension() == Some("json".as_ref()) { + let relative_theme_path = theme_path.strip_prefix(extension_path)?.to_path_buf(); + if !manifest.themes.contains(&relative_theme_path) { + manifest.themes.push(relative_theme_path); + } + } + } + } + + // For legacy extensions on the v0 schema (aka, using `extension.json`), we want to populate the grammars in + // the manifest using the contents of the `grammars` directory. + if manifest.schema_version.is_v0() { + let grammars_dir = extension_path.join("grammars"); + if grammars_dir.exists() { + for entry in fs::read_dir(&grammars_dir).context("failed to list grammars dir")? { + let entry = entry?; + let grammar_path = entry.path(); + if grammar_path.extension() == Some("toml".as_ref()) { + #[derive(Deserialize)] + struct GrammarConfigToml { + pub repository: String, + pub commit: String, + #[serde(default)] + pub path: Option, + } + + let grammar_config = fs::read_to_string(&grammar_path)?; + let grammar_config: GrammarConfigToml = toml::from_str(&grammar_config)?; + + let grammar_name = grammar_path + .file_stem() + .and_then(|stem| stem.to_str()) + .ok_or_else(|| anyhow!("no grammar name"))?; + if !manifest.grammars.contains_key(grammar_name) { + manifest.grammars.insert( + grammar_name.into(), + GrammarManifestEntry { + repository: grammar_config.repository, + rev: grammar_config.commit, + path: grammar_config.path, + }, + ); + } + } + } + } + } + + Ok(()) +} diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs new file mode 100644 index 0000000..bc18843 --- /dev/null +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -0,0 +1,593 @@ +use crate::wasm_host::{ + wit::{self, LanguageServerConfig}, + WasmExtension, WasmHost, +}; +use anyhow::{anyhow, Context, Result}; +use async_trait::async_trait; +use collections::HashMap; +use futures::{Future, FutureExt}; +use gpui::AsyncAppContext; +use language::{ + CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate, +}; +use lsp::{CodeActionKind, LanguageServerBinary}; +use serde::Serialize; +use serde_json::Value; +use std::ops::Range; +use std::{ + any::Any, + path::{Path, PathBuf}, + pin::Pin, + sync::Arc, +}; +use util::{maybe, ResultExt}; +use wasmtime_wasi::WasiView as _; + +pub struct ExtensionLspAdapter { + pub(crate) extension: WasmExtension, + pub(crate) language_server_id: LanguageServerName, + pub(crate) config: LanguageServerConfig, + pub(crate) host: Arc, +} + +#[async_trait(?Send)] +impl LspAdapter for ExtensionLspAdapter { + fn name(&self) -> LanguageServerName { + LanguageServerName(self.config.name.clone().into()) + } + + fn get_language_server_command<'a>( + self: Arc, + _: Arc, + _: Arc, + delegate: Arc, + _: futures::lock::MutexGuard<'a, Option>, + _: &'a mut AsyncAppContext, + ) -> Pin>>> { + async move { + let command = self + .extension + .call({ + let this = self.clone(); + |extension, store| { + async move { + let resource = store.data_mut().table().push(delegate)?; + let command = extension + .call_language_server_command( + store, + &this.language_server_id, + &this.config, + resource, + ) + .await? + .map_err(|e| anyhow!("{}", e))?; + anyhow::Ok(command) + } + .boxed() + } + }) + .await?; + + let path = self + .host + .path_from_extension(&self.extension.manifest.id, command.command.as_ref()); + + // TODO: This should now be done via the `zed::make_file_executable` function in + // Zed extension API, but we're leaving these existing usages in place temporarily + // to avoid any compatibility issues between Zed and the extension versions. + // + // We can remove once the following extension versions no longer see any use: + // - toml@0.0.2 + // - zig@0.0.1 + if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref()) + && path.starts_with(&self.host.work_dir) + { + #[cfg(not(windows))] + { + use std::fs::{self, Permissions}; + use std::os::unix::fs::PermissionsExt; + + fs::set_permissions(&path, Permissions::from_mode(0o755)) + .context("failed to set file permissions")?; + } + } + + Ok(LanguageServerBinary { + path, + arguments: command.args.into_iter().map(|arg| arg.into()).collect(), + env: Some(command.env.into_iter().collect()), + }) + } + .boxed_local() + } + + async fn fetch_latest_server_version( + &self, + _: &dyn LspAdapterDelegate, + ) -> Result> { + unreachable!("get_language_server_command is overridden") + } + + async fn fetch_server_binary( + &self, + _: Box, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Result { + unreachable!("get_language_server_command is overridden") + } + + async fn cached_server_binary( + &self, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + unreachable!("get_language_server_command is overridden") + } + + async fn installation_test_binary(&self, _: PathBuf) -> Option { + None + } + + fn code_action_kinds(&self) -> Option> { + let code_action_kinds = self + .extension + .manifest + .language_servers + .get(&self.language_server_id) + .and_then(|server| server.code_action_kinds.clone()); + + code_action_kinds.or(Some(vec![ + CodeActionKind::EMPTY, + CodeActionKind::QUICKFIX, + CodeActionKind::REFACTOR, + CodeActionKind::REFACTOR_EXTRACT, + CodeActionKind::SOURCE, + ])) + } + + fn language_ids(&self) -> HashMap { + // TODO: The language IDs can be provided via the language server options + // in `extension.toml now but we're leaving these existing usages in place temporarily + // to avoid any compatibility issues between Zed and the extension versions. + // + // We can remove once the following extension versions no longer see any use: + // - php@0.0.1 + if self.extension.manifest.id.as_ref() == "php" { + return HashMap::from_iter([("PHP".into(), "php".into())]); + } + + self.extension + .manifest + .language_servers + .get(&LanguageServerName(self.config.name.clone().into())) + .map(|server| server.language_ids.clone()) + .unwrap_or_default() + } + + async fn initialization_options( + self: Arc, + delegate: &Arc, + ) -> Result> { + let delegate = delegate.clone(); + let json_options = self + .extension + .call({ + let this = self.clone(); + |extension, store| { + async move { + let resource = store.data_mut().table().push(delegate)?; + let options = extension + .call_language_server_initialization_options( + store, + &this.language_server_id, + &this.config, + resource, + ) + .await? + .map_err(|e| anyhow!("{}", e))?; + anyhow::Ok(options) + } + .boxed() + } + }) + .await?; + Ok(if let Some(json_options) = json_options { + serde_json::from_str(&json_options).with_context(|| { + format!("failed to parse initialization_options from extension: {json_options}") + })? + } else { + None + }) + } + + async fn workspace_configuration( + self: Arc, + delegate: &Arc, + _cx: &mut AsyncAppContext, + ) -> Result { + let delegate = delegate.clone(); + let json_options: Option = self + .extension + .call({ + let this = self.clone(); + |extension, store| { + async move { + let resource = store.data_mut().table().push(delegate)?; + let options = extension + .call_language_server_workspace_configuration( + store, + &this.language_server_id, + resource, + ) + .await? + .map_err(|e| anyhow!("{}", e))?; + anyhow::Ok(options) + } + .boxed() + } + }) + .await?; + Ok(if let Some(json_options) = json_options { + serde_json::from_str(&json_options).with_context(|| { + format!("failed to parse initialization_options from extension: {json_options}") + })? + } else { + serde_json::json!({}) + }) + } + + async fn labels_for_completions( + self: Arc, + completions: &[lsp::CompletionItem], + language: &Arc, + ) -> Result>> { + let completions = completions + .into_iter() + .map(|completion| wit::Completion::from(completion.clone())) + .collect::>(); + + let labels = self + .extension + .call({ + let this = self.clone(); + |extension, store| { + async move { + extension + .call_labels_for_completions( + store, + &this.language_server_id, + completions, + ) + .await? + .map_err(|e| anyhow!("{}", e)) + } + .boxed() + } + }) + .await?; + + Ok(labels_from_wit(labels, language)) + } + + async fn labels_for_symbols( + self: Arc, + symbols: &[(String, lsp::SymbolKind)], + language: &Arc, + ) -> Result>> { + let symbols = symbols + .into_iter() + .cloned() + .map(|(name, kind)| wit::Symbol { + name, + kind: kind.into(), + }) + .collect::>(); + + let labels = self + .extension + .call({ + let this = self.clone(); + |extension, store| { + async move { + extension + .call_labels_for_symbols(store, &this.language_server_id, symbols) + .await? + .map_err(|e| anyhow!("{}", e)) + } + .boxed() + } + }) + .await?; + + Ok(labels_from_wit(labels, language)) + } +} + +fn labels_from_wit( + labels: Vec>, + language: &Arc, +) -> Vec> { + labels + .into_iter() + .map(|label| { + let label = label?; + let runs = if label.code.is_empty() { + Vec::new() + } else { + language.highlight_text(&label.code.as_str().into(), 0..label.code.len()) + }; + build_code_label(&label, &runs, &language) + }) + .collect() +} + +fn build_code_label( + label: &wit::CodeLabel, + parsed_runs: &[(Range, HighlightId)], + language: &Arc, +) -> Option { + let mut text = String::new(); + let mut runs = vec![]; + + for span in &label.spans { + match span { + wit::CodeLabelSpan::CodeRange(range) => { + let range = Range::from(*range); + let code_span = &label.code.get(range.clone())?; + let mut input_ix = range.start; + let mut output_ix = text.len(); + for (run_range, id) in parsed_runs { + if run_range.start >= range.end { + break; + } + if run_range.end <= input_ix { + continue; + } + + if run_range.start > input_ix { + let len = run_range.start - input_ix; + output_ix += len; + input_ix += len; + } + + let len = range.end.min(run_range.end) - input_ix; + runs.push((output_ix..output_ix + len, *id)); + output_ix += len; + input_ix += len; + } + + text.push_str(code_span); + } + wit::CodeLabelSpan::Literal(span) => { + let highlight_id = language + .grammar() + .zip(span.highlight_name.as_ref()) + .and_then(|(grammar, highlight_name)| { + grammar.highlight_id_for_name(&highlight_name) + }) + .unwrap_or_default(); + let ix = text.len(); + runs.push((ix..ix + span.text.len(), highlight_id)); + text.push_str(&span.text); + } + } + } + + let filter_range = Range::from(label.filter_range); + text.get(filter_range.clone())?; + Some(CodeLabel { + text, + runs, + filter_range, + }) +} + +impl From for Range { + fn from(range: wit::Range) -> Self { + let start = range.start as usize; + let end = range.end as usize; + start..end + } +} + +impl From for wit::Completion { + fn from(value: lsp::CompletionItem) -> Self { + Self { + label: value.label, + detail: value.detail, + kind: value.kind.map(Into::into), + insert_text_format: value.insert_text_format.map(Into::into), + } + } +} + +impl From for wit::CompletionKind { + fn from(value: lsp::CompletionItemKind) -> Self { + match value { + lsp::CompletionItemKind::TEXT => Self::Text, + lsp::CompletionItemKind::METHOD => Self::Method, + lsp::CompletionItemKind::FUNCTION => Self::Function, + lsp::CompletionItemKind::CONSTRUCTOR => Self::Constructor, + lsp::CompletionItemKind::FIELD => Self::Field, + lsp::CompletionItemKind::VARIABLE => Self::Variable, + lsp::CompletionItemKind::CLASS => Self::Class, + lsp::CompletionItemKind::INTERFACE => Self::Interface, + lsp::CompletionItemKind::MODULE => Self::Module, + lsp::CompletionItemKind::PROPERTY => Self::Property, + lsp::CompletionItemKind::UNIT => Self::Unit, + lsp::CompletionItemKind::VALUE => Self::Value, + lsp::CompletionItemKind::ENUM => Self::Enum, + lsp::CompletionItemKind::KEYWORD => Self::Keyword, + lsp::CompletionItemKind::SNIPPET => Self::Snippet, + lsp::CompletionItemKind::COLOR => Self::Color, + lsp::CompletionItemKind::FILE => Self::File, + lsp::CompletionItemKind::REFERENCE => Self::Reference, + lsp::CompletionItemKind::FOLDER => Self::Folder, + lsp::CompletionItemKind::ENUM_MEMBER => Self::EnumMember, + lsp::CompletionItemKind::CONSTANT => Self::Constant, + lsp::CompletionItemKind::STRUCT => Self::Struct, + lsp::CompletionItemKind::EVENT => Self::Event, + lsp::CompletionItemKind::OPERATOR => Self::Operator, + lsp::CompletionItemKind::TYPE_PARAMETER => Self::TypeParameter, + _ => Self::Other(extract_int(value)), + } + } +} + +impl From for wit::InsertTextFormat { + fn from(value: lsp::InsertTextFormat) -> Self { + match value { + lsp::InsertTextFormat::PLAIN_TEXT => Self::PlainText, + lsp::InsertTextFormat::SNIPPET => Self::Snippet, + _ => Self::Other(extract_int(value)), + } + } +} + +impl From for wit::SymbolKind { + fn from(value: lsp::SymbolKind) -> Self { + match value { + lsp::SymbolKind::FILE => Self::File, + lsp::SymbolKind::MODULE => Self::Module, + lsp::SymbolKind::NAMESPACE => Self::Namespace, + lsp::SymbolKind::PACKAGE => Self::Package, + lsp::SymbolKind::CLASS => Self::Class, + lsp::SymbolKind::METHOD => Self::Method, + lsp::SymbolKind::PROPERTY => Self::Property, + lsp::SymbolKind::FIELD => Self::Field, + lsp::SymbolKind::CONSTRUCTOR => Self::Constructor, + lsp::SymbolKind::ENUM => Self::Enum, + lsp::SymbolKind::INTERFACE => Self::Interface, + lsp::SymbolKind::FUNCTION => Self::Function, + lsp::SymbolKind::VARIABLE => Self::Variable, + lsp::SymbolKind::CONSTANT => Self::Constant, + lsp::SymbolKind::STRING => Self::String, + lsp::SymbolKind::NUMBER => Self::Number, + lsp::SymbolKind::BOOLEAN => Self::Boolean, + lsp::SymbolKind::ARRAY => Self::Array, + lsp::SymbolKind::OBJECT => Self::Object, + lsp::SymbolKind::KEY => Self::Key, + lsp::SymbolKind::NULL => Self::Null, + lsp::SymbolKind::ENUM_MEMBER => Self::EnumMember, + lsp::SymbolKind::STRUCT => Self::Struct, + lsp::SymbolKind::EVENT => Self::Event, + lsp::SymbolKind::OPERATOR => Self::Operator, + lsp::SymbolKind::TYPE_PARAMETER => Self::TypeParameter, + _ => Self::Other(extract_int(value)), + } + } +} + +fn extract_int(value: T) -> i32 { + maybe!({ + let kind = serde_json::to_value(&value)?; + serde_json::from_value(kind) + }) + .log_err() + .unwrap_or(-1) +} + +#[test] +fn test_build_code_label() { + use util::test::marked_text_ranges; + + let (code, code_ranges) = marked_text_ranges( + "«const» «a»: «fn»(«Bcd»(«Efgh»)) -> «Ijklm» = pqrs.tuv", + false, + ); + let code_runs = code_ranges + .into_iter() + .map(|range| (range, HighlightId(0))) + .collect::>(); + + let label = build_code_label( + &wit::CodeLabel { + spans: vec![ + wit::CodeLabelSpan::CodeRange(wit::Range { + start: code.find("pqrs").unwrap() as u32, + end: code.len() as u32, + }), + wit::CodeLabelSpan::CodeRange(wit::Range { + start: code.find(": fn").unwrap() as u32, + end: code.find(" = ").unwrap() as u32, + }), + ], + filter_range: wit::Range { + start: 0, + end: "pqrs.tuv".len() as u32, + }, + code, + }, + &code_runs, + &language::PLAIN_TEXT, + ) + .unwrap(); + + let (label_text, label_ranges) = + marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false); + let label_runs = label_ranges + .into_iter() + .map(|range| (range, HighlightId(0))) + .collect::>(); + + assert_eq!( + label, + CodeLabel { + text: label_text, + runs: label_runs, + filter_range: label.filter_range.clone() + } + ) +} + +#[test] +fn test_build_code_label_with_invalid_ranges() { + use util::test::marked_text_ranges; + + let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false); + let code_runs = code_ranges + .into_iter() + .map(|range| (range, HighlightId(0))) + .collect::>(); + + // A span uses a code range that is invalid because it starts inside of + // a multi-byte character. + let label = build_code_label( + &wit::CodeLabel { + spans: vec![ + wit::CodeLabelSpan::CodeRange(wit::Range { + start: code.find('B').unwrap() as u32, + end: code.find(" = ").unwrap() as u32, + }), + wit::CodeLabelSpan::CodeRange(wit::Range { + start: code.find('🏀').unwrap() as u32 + 1, + end: code.len() as u32, + }), + ], + filter_range: wit::Range { + start: 0, + end: "B".len() as u32, + }, + code, + }, + &code_runs, + &language::PLAIN_TEXT, + ); + assert!(label.is_none()); + + // Filter range extends beyond actual text + let label = build_code_label( + &wit::CodeLabel { + spans: vec![wit::CodeLabelSpan::Literal(wit::CodeLabelSpanLiteral { + text: "abc".into(), + highlight_name: Some("type".into()), + })], + filter_range: wit::Range { start: 0, end: 5 }, + code: String::new(), + }, + &code_runs, + &language::PLAIN_TEXT, + ); + assert!(label.is_none()); +} diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs new file mode 100644 index 0000000..bd315f0 --- /dev/null +++ b/crates/extension/src/extension_manifest.rs @@ -0,0 +1,194 @@ +use anyhow::{anyhow, Context, Result}; +use collections::{BTreeMap, HashMap}; +use fs::Fs; +use language::LanguageServerName; +use semantic_version::SemanticVersion; +use serde::{Deserialize, Serialize}; +use std::{ + ffi::OsStr, + fmt, + path::{Path, PathBuf}, + sync::Arc, +}; + +/// This is the old version of the extension manifest, from when it was `extension.json`. +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +pub struct OldExtensionManifest { + pub name: String, + pub version: Arc, + + #[serde(default)] + pub description: Option, + #[serde(default)] + pub repository: Option, + #[serde(default)] + pub authors: Vec, + + #[serde(default)] + pub themes: BTreeMap, PathBuf>, + #[serde(default)] + pub languages: BTreeMap, PathBuf>, + #[serde(default)] + pub grammars: BTreeMap, PathBuf>, +} + +/// The schema version of the [`ExtensionManifest`]. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] +pub struct SchemaVersion(pub i32); + +impl fmt::Display for SchemaVersion { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl SchemaVersion { + pub const ZERO: Self = Self(0); + + pub fn is_v0(&self) -> bool { + self == &Self::ZERO + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +pub struct ExtensionManifest { + pub id: Arc, + pub name: String, + pub version: Arc, + pub schema_version: SchemaVersion, + + #[serde(default)] + pub description: Option, + #[serde(default)] + pub repository: Option, + #[serde(default)] + pub authors: Vec, + #[serde(default)] + pub lib: LibManifestEntry, + + #[serde(default)] + pub themes: Vec, + #[serde(default)] + pub languages: Vec, + #[serde(default)] + pub grammars: BTreeMap, GrammarManifestEntry>, + #[serde(default)] + pub language_servers: BTreeMap, +} + +#[derive(Clone, Default, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub struct LibManifestEntry { + pub kind: Option, + pub version: Option, +} + +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub enum ExtensionLibraryKind { + Rust, +} + +#[derive(Clone, Default, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub struct GrammarManifestEntry { + pub repository: String, + #[serde(alias = "commit")] + pub rev: String, + #[serde(default)] + pub path: Option, +} + +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub struct LanguageServerManifestEntry { + /// Deprecated in favor of `languages`. + #[serde(default)] + language: Option>, + /// The list of languages this language server should work with. + #[serde(default)] + languages: Vec>, + #[serde(default)] + pub language_ids: HashMap, + #[serde(default)] + pub code_action_kinds: Option>, +} + +impl LanguageServerManifestEntry { + /// Returns the list of languages for the language server. + /// + /// Prefer this over accessing the `language` or `languages` fields directly, + /// as we currently support both. + /// + /// We can replace this with just field access for the `languages` field once + /// we have removed `language`. + pub fn languages(&self) -> impl IntoIterator> + '_ { + let language = if self.languages.is_empty() { + self.language.clone() + } else { + None + }; + self.languages.iter().cloned().chain(language) + } +} + +impl ExtensionManifest { + pub async fn load(fs: Arc, extension_dir: &Path) -> Result { + let extension_name = extension_dir + .file_name() + .and_then(OsStr::to_str) + .ok_or_else(|| anyhow!("invalid extension name"))?; + + let mut extension_manifest_path = extension_dir.join("extension.json"); + if fs.is_file(&extension_manifest_path).await { + let manifest_content = fs + .load(&extension_manifest_path) + .await + .with_context(|| format!("failed to load {extension_name} extension.json"))?; + let manifest_json = serde_json::from_str::(&manifest_content) + .with_context(|| { + format!("invalid extension.json for extension {extension_name}") + })?; + + Ok(manifest_from_old_manifest(manifest_json, extension_name)) + } else { + extension_manifest_path.set_extension("toml"); + let manifest_content = fs + .load(&extension_manifest_path) + .await + .with_context(|| format!("failed to load {extension_name} extension.toml"))?; + toml::from_str(&manifest_content) + .with_context(|| format!("invalid extension.json for extension {extension_name}")) + } + } +} + +fn manifest_from_old_manifest( + manifest_json: OldExtensionManifest, + extension_id: &str, +) -> ExtensionManifest { + ExtensionManifest { + id: extension_id.into(), + name: manifest_json.name, + version: manifest_json.version, + description: manifest_json.description, + repository: manifest_json.repository, + authors: manifest_json.authors, + schema_version: SchemaVersion::ZERO, + lib: Default::default(), + themes: { + let mut themes = manifest_json.themes.into_values().collect::>(); + themes.sort(); + themes.dedup(); + themes + }, + languages: { + let mut languages = manifest_json.languages.into_values().collect::>(); + languages.sort(); + languages.dedup(); + languages + }, + grammars: manifest_json + .grammars + .into_keys() + .map(|grammar_name| (grammar_name, Default::default())) + .collect(), + language_servers: Default::default(), + } +} diff --git a/crates/extension/src/extension_settings.rs b/crates/extension/src/extension_settings.rs new file mode 100644 index 0000000..a2ab7ac --- /dev/null +++ b/crates/extension/src/extension_settings.rs @@ -0,0 +1,48 @@ +use anyhow::Result; +use collections::HashMap; +use gpui::AppContext; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; +use std::sync::Arc; + +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] +pub struct ExtensionSettings { + /// The extensions that should be automatically installed by Zed. + /// + /// This is used to make functionality provided by extensions (e.g., language support) + /// available out-of-the-box. + #[serde(default)] + pub auto_install_extensions: HashMap, bool>, + #[serde(default)] + pub auto_update_extensions: HashMap, bool>, +} + +impl ExtensionSettings { + /// Returns whether the given extension should be auto-installed. + pub fn should_auto_install(&self, extension_id: &str) -> bool { + self.auto_install_extensions + .get(extension_id) + .copied() + .unwrap_or(true) + } + + pub fn should_auto_update(&self, extension_id: &str) -> bool { + self.auto_update_extensions + .get(extension_id) + .copied() + .unwrap_or(true) + } +} + +impl Settings for ExtensionSettings { + const KEY: Option<&'static str> = None; + + type FileContent = Self; + + fn load(sources: SettingsSources, _cx: &mut AppContext) -> Result { + SettingsSources::::json_merge_with( + [sources.default].into_iter().chain(sources.user), + ) + } +} diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs new file mode 100644 index 0000000..2f220b6 --- /dev/null +++ b/crates/extension/src/extension_store.rs @@ -0,0 +1,1346 @@ +pub mod extension_builder; +mod extension_lsp_adapter; +mod extension_manifest; +mod extension_settings; +mod wasm_host; + +#[cfg(test)] +mod extension_store_test; + +use crate::extension_manifest::SchemaVersion; +use crate::{extension_lsp_adapter::ExtensionLspAdapter, wasm_host::wit}; +use anyhow::{anyhow, bail, Context as _, Result}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use client::{telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse}; +use collections::{btree_map, BTreeMap, HashSet}; +use extension_builder::{CompileExtensionOptions, ExtensionBuilder}; +use fs::{Fs, RemoveOptions}; +use futures::{ + channel::{ + mpsc::{unbounded, UnboundedSender}, + oneshot, + }, + io::BufReader, + select_biased, AsyncReadExt as _, Future, FutureExt as _, StreamExt as _, +}; +use gpui::{ + actions, AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Task, + WeakModel, +}; +use http::{AsyncBody, HttpClient, HttpClientWithUrl}; +use language::{ + ContextProviderWithTasks, LanguageConfig, LanguageMatcher, LanguageQueries, LanguageRegistry, + QUERY_FILENAME_PREFIXES, +}; +use node_runtime::NodeRuntime; +use semantic_version::SemanticVersion; +use serde::{Deserialize, Serialize}; +use settings::Settings; +use std::ops::RangeInclusive; +use std::str::FromStr; +use std::{ + cmp::Ordering, + path::{self, Path, PathBuf}, + sync::Arc, + time::{Duration, Instant}, +}; +use theme::{ThemeRegistry, ThemeSettings}; +use url::Url; +use util::{maybe, paths::EXTENSIONS_DIR, ResultExt}; +use wasm_host::{ + wit::{is_supported_wasm_api_version, wasm_api_version_range}, + WasmExtension, WasmHost, +}; + +pub use extension_manifest::{ + ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, OldExtensionManifest, +}; +pub use extension_settings::ExtensionSettings; + +const RELOAD_DEBOUNCE_DURATION: Duration = Duration::from_millis(200); +const FS_WATCH_LATENCY: Duration = Duration::from_millis(100); + +/// The current extension [`SchemaVersion`] supported by Zed. +const CURRENT_SCHEMA_VERSION: SchemaVersion = SchemaVersion(1); + +/// Returns the [`SchemaVersion`] range that is compatible with this version of Zed. +pub fn schema_version_range() -> RangeInclusive { + SchemaVersion::ZERO..=CURRENT_SCHEMA_VERSION +} + +/// Returns whether the given extension version is compatible with this version of Zed. +pub fn is_version_compatible(extension_version: &ExtensionMetadata) -> bool { + let schema_version = extension_version.manifest.schema_version.unwrap_or(0); + if CURRENT_SCHEMA_VERSION.0 < schema_version { + return false; + } + + if let Some(wasm_api_version) = extension_version + .manifest + .wasm_api_version + .as_ref() + .and_then(|wasm_api_version| SemanticVersion::from_str(wasm_api_version).ok()) + { + if !is_supported_wasm_api_version(wasm_api_version) { + return false; + } + } + + true +} + +pub struct ExtensionStore { + builder: Arc, + extension_index: ExtensionIndex, + fs: Arc, + http_client: Arc, + telemetry: Option>, + reload_tx: UnboundedSender>>, + reload_complete_senders: Vec>, + installed_dir: PathBuf, + outstanding_operations: BTreeMap, ExtensionOperation>, + index_path: PathBuf, + language_registry: Arc, + theme_registry: Arc, + modified_extensions: HashSet>, + wasm_host: Arc, + wasm_extensions: Vec<(Arc, WasmExtension)>, + tasks: Vec>, +} + +#[derive(Clone, Copy)] +pub enum ExtensionOperation { + Upgrade, + Install, + Remove, +} + +#[derive(Clone)] +pub enum Event { + ExtensionsUpdated, + StartedReloading, + ExtensionInstalled(Arc), + ExtensionFailedToLoad(Arc), +} + +impl EventEmitter for ExtensionStore {} + +struct GlobalExtensionStore(Model); + +impl Global for GlobalExtensionStore {} + +#[derive(Debug, Deserialize, Serialize, Default, PartialEq, Eq)] +pub struct ExtensionIndex { + pub extensions: BTreeMap, ExtensionIndexEntry>, + pub themes: BTreeMap, ExtensionIndexThemeEntry>, + pub languages: BTreeMap, ExtensionIndexLanguageEntry>, +} + +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub struct ExtensionIndexEntry { + pub manifest: Arc, + pub dev: bool, +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Deserialize, Serialize)] +pub struct ExtensionIndexThemeEntry { + extension: Arc, + path: PathBuf, +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Deserialize, Serialize)] +pub struct ExtensionIndexLanguageEntry { + extension: Arc, + path: PathBuf, + matcher: LanguageMatcher, + grammar: Option>, +} + +actions!(zed, [ReloadExtensions]); + +pub fn init( + fs: Arc, + client: Arc, + node_runtime: Arc, + language_registry: Arc, + theme_registry: Arc, + cx: &mut AppContext, +) { + ExtensionSettings::register(cx); + + let store = cx.new_model(move |cx| { + ExtensionStore::new( + EXTENSIONS_DIR.clone(), + None, + fs, + client.http_client().clone(), + Some(client.telemetry().clone()), + node_runtime, + language_registry, + theme_registry, + cx, + ) + }); + + cx.on_action(|_: &ReloadExtensions, cx| { + let store = cx.global::().0.clone(); + store.update(cx, |store, cx| drop(store.reload(None, cx))); + }); + + cx.set_global(GlobalExtensionStore(store)); +} + +impl ExtensionStore { + pub fn try_global(cx: &AppContext) -> Option> { + cx.try_global::() + .map(|store| store.0.clone()) + } + + pub fn global(cx: &AppContext) -> Model { + cx.global::().0.clone() + } + + #[allow(clippy::too_many_arguments)] + pub fn new( + extensions_dir: PathBuf, + build_dir: Option, + fs: Arc, + http_client: Arc, + telemetry: Option>, + node_runtime: Arc, + language_registry: Arc, + theme_registry: Arc, + cx: &mut ModelContext, + ) -> Self { + let work_dir = extensions_dir.join("work"); + let build_dir = build_dir.unwrap_or_else(|| extensions_dir.join("build")); + let installed_dir = extensions_dir.join("installed"); + let index_path = extensions_dir.join("index.json"); + + let (reload_tx, mut reload_rx) = unbounded(); + let mut this = Self { + extension_index: Default::default(), + installed_dir, + index_path, + builder: Arc::new(ExtensionBuilder::new(build_dir)), + outstanding_operations: Default::default(), + modified_extensions: Default::default(), + reload_complete_senders: Vec::new(), + wasm_host: WasmHost::new( + fs.clone(), + http_client.clone(), + node_runtime, + language_registry.clone(), + work_dir, + cx, + ), + wasm_extensions: Vec::new(), + fs, + http_client, + telemetry, + language_registry, + theme_registry, + reload_tx, + tasks: Vec::new(), + }; + + // The extensions store maintains an index file, which contains a complete + // list of the installed extensions and the resources that they provide. + // This index is loaded synchronously on startup. + let (index_content, index_metadata, extensions_metadata) = + cx.background_executor().block(async { + futures::join!( + this.fs.load(&this.index_path), + this.fs.metadata(&this.index_path), + this.fs.metadata(&this.installed_dir), + ) + }); + + // Normally, there is no need to rebuild the index. But if the index file + // is invalid or is out-of-date according to the filesystem mtimes, then + // it must be asynchronously rebuilt. + let mut extension_index = ExtensionIndex::default(); + let mut extension_index_needs_rebuild = true; + if let Some(index_content) = index_content.ok() { + if let Some(index) = serde_json::from_str(&index_content).log_err() { + extension_index = index; + if let (Ok(Some(index_metadata)), Ok(Some(extensions_metadata))) = + (index_metadata, extensions_metadata) + { + if index_metadata.mtime > extensions_metadata.mtime { + extension_index_needs_rebuild = false; + } + } + } + } + + // Immediately load all of the extensions in the initial manifest. If the + // index needs to be rebuild, then enqueue + let load_initial_extensions = this.extensions_updated(extension_index, cx); + let mut reload_future = None; + if extension_index_needs_rebuild { + reload_future = Some(this.reload(None, cx)); + } + + cx.spawn(|this, mut cx| async move { + if let Some(future) = reload_future { + future.await; + } + this.update(&mut cx, |this, cx| this.auto_install_extensions(cx)) + .ok(); + this.update(&mut cx, |this, cx| this.check_for_updates(cx)) + .ok(); + }) + .detach(); + + // Perform all extension loading in a single task to ensure that we + // never attempt to simultaneously load/unload extensions from multiple + // parallel tasks. + this.tasks.push(cx.spawn(|this, mut cx| { + async move { + load_initial_extensions.await; + + let mut debounce_timer = cx + .background_executor() + .spawn(futures::future::pending()) + .fuse(); + loop { + select_biased! { + _ = debounce_timer => { + let index = this + .update(&mut cx, |this, cx| this.rebuild_extension_index(cx))? + .await; + this.update(&mut cx, |this, cx| this.extensions_updated(index, cx))? + .await; + } + extension_id = reload_rx.next() => { + let Some(extension_id) = extension_id else { break; }; + this.update(&mut cx, |this, _| { + this.modified_extensions.extend(extension_id); + })?; + debounce_timer = cx + .background_executor() + .timer(RELOAD_DEBOUNCE_DURATION) + .fuse(); + } + } + } + + anyhow::Ok(()) + } + .map(drop) + })); + + // Watch the installed extensions directory for changes. Whenever changes are + // detected, rebuild the extension index, and load/unload any extensions that + // have been added, removed, or modified. + this.tasks.push(cx.background_executor().spawn({ + let fs = this.fs.clone(); + let reload_tx = this.reload_tx.clone(); + let installed_dir = this.installed_dir.clone(); + async move { + let mut paths = fs.watch(&installed_dir, FS_WATCH_LATENCY).await; + while let Some(paths) = paths.next().await { + for path in paths { + let Ok(event_path) = path.strip_prefix(&installed_dir) else { + continue; + }; + + if let Some(path::Component::Normal(extension_dir_name)) = + event_path.components().next() + { + if let Some(extension_id) = extension_dir_name.to_str() { + reload_tx.unbounded_send(Some(extension_id.into())).ok(); + } + } + } + } + } + })); + + this + } + + fn reload( + &mut self, + modified_extension: Option>, + cx: &mut ModelContext, + ) -> impl Future { + let (tx, rx) = oneshot::channel(); + self.reload_complete_senders.push(tx); + self.reload_tx + .unbounded_send(modified_extension) + .expect("reload task exited"); + cx.emit(Event::StartedReloading); + + async move { + rx.await.ok(); + } + } + + fn extensions_dir(&self) -> PathBuf { + self.installed_dir.clone() + } + + pub fn outstanding_operations(&self) -> &BTreeMap, ExtensionOperation> { + &self.outstanding_operations + } + + pub fn installed_extensions(&self) -> &BTreeMap, ExtensionIndexEntry> { + &self.extension_index.extensions + } + + pub fn dev_extensions(&self) -> impl Iterator> { + self.extension_index + .extensions + .values() + .filter_map(|extension| extension.dev.then_some(&extension.manifest)) + } + + /// Returns the names of themes provided by extensions. + pub fn extension_themes<'a>( + &'a self, + extension_id: &'a str, + ) -> impl Iterator> { + self.extension_index + .themes + .iter() + .filter_map(|(name, theme)| theme.extension.as_ref().eq(extension_id).then_some(name)) + } + + pub fn fetch_extensions( + &self, + search: Option<&str>, + cx: &mut ModelContext, + ) -> Task>> { + let version = CURRENT_SCHEMA_VERSION.to_string(); + let mut query = vec![("max_schema_version", version.as_str())]; + if let Some(search) = search { + query.push(("filter", search)); + } + + self.fetch_extensions_from_api("/extensions", &query, cx) + } + + pub fn fetch_extensions_with_update_available( + &mut self, + cx: &mut ModelContext, + ) -> Task>> { + let schema_versions = schema_version_range(); + let wasm_api_versions = wasm_api_version_range(); + let extension_settings = ExtensionSettings::get_global(cx); + let extension_ids = self + .extension_index + .extensions + .keys() + .map(|id| id.as_ref()) + .filter(|id| extension_settings.should_auto_update(id)) + .collect::>() + .join(","); + let task = self.fetch_extensions_from_api( + "/extensions/updates", + &[ + ("min_schema_version", &schema_versions.start().to_string()), + ("max_schema_version", &schema_versions.end().to_string()), + ( + "min_wasm_api_version", + &wasm_api_versions.start().to_string(), + ), + ("max_wasm_api_version", &wasm_api_versions.end().to_string()), + ("ids", &extension_ids), + ], + cx, + ); + cx.spawn(move |this, mut cx| async move { + let extensions = task.await?; + this.update(&mut cx, |this, _cx| { + extensions + .into_iter() + .filter(|extension| { + this.extension_index.extensions.get(&extension.id).map_or( + true, + |installed_extension| { + installed_extension.manifest.version != extension.manifest.version + }, + ) + }) + .collect() + }) + }) + } + + pub fn fetch_extension_versions( + &self, + extension_id: &str, + cx: &mut ModelContext, + ) -> Task>> { + self.fetch_extensions_from_api(&format!("/extensions/{extension_id}"), &[], cx) + } + + /// Installs any extensions that should be included with Zed by default. + /// + /// This can be used to make certain functionality provided by extensions + /// available out-of-the-box. + pub fn auto_install_extensions(&mut self, cx: &mut ModelContext) { + let extension_settings = ExtensionSettings::get_global(cx); + + let extensions_to_install = extension_settings + .auto_install_extensions + .keys() + .filter(|extension_id| extension_settings.should_auto_install(extension_id)) + .filter(|extension_id| { + let is_already_installed = self + .extension_index + .extensions + .contains_key(extension_id.as_ref()); + !is_already_installed + }) + .cloned() + .collect::>(); + + cx.spawn(move |this, mut cx| async move { + for extension_id in extensions_to_install { + this.update(&mut cx, |this, cx| { + this.install_latest_extension(extension_id.clone(), cx); + }) + .ok(); + } + }) + .detach(); + } + + pub fn check_for_updates(&mut self, cx: &mut ModelContext) { + let task = self.fetch_extensions_with_update_available(cx); + cx.spawn(move |this, mut cx| async move { + Self::upgrade_extensions(this, task.await?, &mut cx).await + }) + .detach(); + } + + async fn upgrade_extensions( + this: WeakModel, + extensions: Vec, + cx: &mut AsyncAppContext, + ) -> Result<()> { + for extension in extensions { + let task = this.update(cx, |this, cx| { + if let Some(installed_extension) = + this.extension_index.extensions.get(&extension.id) + { + let installed_version = + SemanticVersion::from_str(&installed_extension.manifest.version).ok()?; + let latest_version = + SemanticVersion::from_str(&extension.manifest.version).ok()?; + + if installed_version >= latest_version { + return None; + } + } + + Some(this.upgrade_extension(extension.id, extension.manifest.version, cx)) + })?; + + if let Some(task) = task { + task.await.log_err(); + } + } + anyhow::Ok(()) + } + + fn fetch_extensions_from_api( + &self, + path: &str, + query: &[(&str, &str)], + cx: &mut ModelContext<'_, ExtensionStore>, + ) -> Task>> { + let url = self.http_client.build_zed_api_url(path, &query); + let http_client = self.http_client.clone(); + cx.spawn(move |_, _| async move { + let mut response = http_client + .get(&url?.as_ref(), AsyncBody::empty(), true) + .await?; + + let mut body = Vec::new(); + response + .body_mut() + .read_to_end(&mut body) + .await + .context("error reading extensions")?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let response: GetExtensionsResponse = serde_json::from_slice(&body)?; + Ok(response.data) + }) + } + + pub fn install_extension( + &mut self, + extension_id: Arc, + version: Arc, + cx: &mut ModelContext, + ) { + self.install_or_upgrade_extension(extension_id, version, ExtensionOperation::Install, cx) + .detach_and_log_err(cx); + } + + fn install_or_upgrade_extension_at_endpoint( + &mut self, + extension_id: Arc, + url: Url, + operation: ExtensionOperation, + cx: &mut ModelContext, + ) -> Task> { + let extension_dir = self.installed_dir.join(extension_id.as_ref()); + let http_client = self.http_client.clone(); + let fs = self.fs.clone(); + + match self.outstanding_operations.entry(extension_id.clone()) { + btree_map::Entry::Occupied(_) => return Task::ready(Ok(())), + btree_map::Entry::Vacant(e) => e.insert(operation), + }; + cx.notify(); + + cx.spawn(move |this, mut cx| async move { + let _finish = util::defer({ + let this = this.clone(); + let mut cx = cx.clone(); + let extension_id = extension_id.clone(); + move || { + this.update(&mut cx, |this, cx| { + this.outstanding_operations.remove(extension_id.as_ref()); + cx.notify(); + }) + .ok(); + } + }); + + let mut response = http_client + .get(&url.as_ref(), Default::default(), true) + .await + .map_err(|err| anyhow!("error downloading extension: {}", err))?; + + fs.remove_dir( + &extension_dir, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await?; + + let content_length = response + .headers() + .get(isahc::http::header::CONTENT_LENGTH) + .and_then(|value| value.to_str().ok()?.parse::().ok()); + + let mut body = BufReader::new(response.body_mut()); + let mut tar_gz_bytes = Vec::new(); + body.read_to_end(&mut tar_gz_bytes).await?; + + if let Some(content_length) = content_length { + let actual_len = tar_gz_bytes.len(); + if content_length != actual_len { + bail!("downloaded extension size {actual_len} does not match content length {content_length}"); + } + } + let decompressed_bytes = GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice())); + let archive = Archive::new(decompressed_bytes); + archive.unpack(extension_dir).await?; + this.update(&mut cx, |this, cx| { + this.reload(Some(extension_id.clone()), cx) + })? + .await; + + match operation { + ExtensionOperation::Install => { + this.update(&mut cx, |_, cx| { + cx.emit(Event::ExtensionInstalled(extension_id)); + }) + .ok(); + } + _ => {} + } + + anyhow::Ok(()) + }) + } + + pub fn install_latest_extension( + &mut self, + extension_id: Arc, + cx: &mut ModelContext, + ) { + log::info!("installing extension {extension_id} latest version"); + + let schema_versions = schema_version_range(); + let wasm_api_versions = wasm_api_version_range(); + + let Some(url) = self + .http_client + .build_zed_api_url( + &format!("/extensions/{extension_id}/download"), + &[ + ("min_schema_version", &schema_versions.start().to_string()), + ("max_schema_version", &schema_versions.end().to_string()), + ( + "min_wasm_api_version", + &wasm_api_versions.start().to_string(), + ), + ("max_wasm_api_version", &wasm_api_versions.end().to_string()), + ], + ) + .log_err() + else { + return; + }; + + self.install_or_upgrade_extension_at_endpoint( + extension_id, + url, + ExtensionOperation::Install, + cx, + ) + .detach_and_log_err(cx); + } + + pub fn upgrade_extension( + &mut self, + extension_id: Arc, + version: Arc, + cx: &mut ModelContext, + ) -> Task> { + self.install_or_upgrade_extension(extension_id, version, ExtensionOperation::Upgrade, cx) + } + + fn install_or_upgrade_extension( + &mut self, + extension_id: Arc, + version: Arc, + operation: ExtensionOperation, + cx: &mut ModelContext, + ) -> Task> { + log::info!("installing extension {extension_id} {version}"); + let Some(url) = self + .http_client + .build_zed_api_url( + &format!("/extensions/{extension_id}/{version}/download"), + &[], + ) + .log_err() + else { + return Task::ready(Ok(())); + }; + + self.install_or_upgrade_extension_at_endpoint(extension_id, url, operation, cx) + } + + pub fn uninstall_extension(&mut self, extension_id: Arc, cx: &mut ModelContext) { + let extension_dir = self.installed_dir.join(extension_id.as_ref()); + let fs = self.fs.clone(); + + match self.outstanding_operations.entry(extension_id.clone()) { + btree_map::Entry::Occupied(_) => return, + btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove), + }; + + cx.spawn(move |this, mut cx| async move { + let _finish = util::defer({ + let this = this.clone(); + let mut cx = cx.clone(); + let extension_id = extension_id.clone(); + move || { + this.update(&mut cx, |this, cx| { + this.outstanding_operations.remove(extension_id.as_ref()); + cx.notify(); + }) + .ok(); + } + }); + + fs.remove_dir( + &extension_dir, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await?; + + this.update(&mut cx, |this, cx| this.reload(None, cx))? + .await; + anyhow::Ok(()) + }) + .detach_and_log_err(cx) + } + + pub fn install_dev_extension( + &mut self, + extension_source_path: PathBuf, + cx: &mut ModelContext, + ) -> Task> { + let extensions_dir = self.extensions_dir(); + let fs = self.fs.clone(); + let builder = self.builder.clone(); + + cx.spawn(move |this, mut cx| async move { + let mut extension_manifest = + ExtensionManifest::load(fs.clone(), &extension_source_path).await?; + let extension_id = extension_manifest.id.clone(); + + if !this.update(&mut cx, |this, cx| { + match this.outstanding_operations.entry(extension_id.clone()) { + btree_map::Entry::Occupied(_) => return false, + btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove), + }; + cx.notify(); + true + })? { + return Ok(()); + } + + let _finish = util::defer({ + let this = this.clone(); + let mut cx = cx.clone(); + let extension_id = extension_id.clone(); + move || { + this.update(&mut cx, |this, cx| { + this.outstanding_operations.remove(extension_id.as_ref()); + cx.notify(); + }) + .ok(); + } + }); + + cx.background_executor() + .spawn({ + let extension_source_path = extension_source_path.clone(); + async move { + builder + .compile_extension( + &extension_source_path, + &mut extension_manifest, + CompileExtensionOptions { release: false }, + ) + .await + } + }) + .await?; + + let output_path = &extensions_dir.join(extension_id.as_ref()); + if let Some(metadata) = fs.metadata(&output_path).await? { + if metadata.is_symlink { + fs.remove_file( + &output_path, + RemoveOptions { + recursive: false, + ignore_if_not_exists: true, + }, + ) + .await?; + } else { + bail!("extension {extension_id} is already installed"); + } + } + + fs.create_symlink(output_path, extension_source_path) + .await?; + + this.update(&mut cx, |this, cx| this.reload(None, cx))? + .await; + Ok(()) + }) + } + + pub fn rebuild_dev_extension(&mut self, extension_id: Arc, cx: &mut ModelContext) { + let path = self.installed_dir.join(extension_id.as_ref()); + let builder = self.builder.clone(); + let fs = self.fs.clone(); + + match self.outstanding_operations.entry(extension_id.clone()) { + btree_map::Entry::Occupied(_) => return, + btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Upgrade), + }; + + cx.notify(); + let compile = cx.background_executor().spawn(async move { + let mut manifest = ExtensionManifest::load(fs, &path).await?; + builder + .compile_extension( + &path, + &mut manifest, + CompileExtensionOptions { release: true }, + ) + .await + }); + + cx.spawn(|this, mut cx| async move { + let result = compile.await; + + this.update(&mut cx, |this, cx| { + this.outstanding_operations.remove(&extension_id); + cx.notify(); + })?; + + if result.is_ok() { + this.update(&mut cx, |this, cx| this.reload(Some(extension_id), cx))? + .await; + } + + result + }) + .detach_and_log_err(cx) + } + + /// Updates the set of installed extensions. + /// + /// First, this unloads any themes, languages, or grammars that are + /// no longer in the manifest, or whose files have changed on disk. + /// Then it loads any themes, languages, or grammars that are newly + /// added to the manifest, or whose files have changed on disk. + fn extensions_updated( + &mut self, + new_index: ExtensionIndex, + cx: &mut ModelContext, + ) -> Task<()> { + let old_index = &self.extension_index; + + // Determine which extensions need to be loaded and unloaded, based + // on the changes to the manifest and the extensions that we know have been + // modified. + let mut extensions_to_unload = Vec::default(); + let mut extensions_to_load = Vec::default(); + { + let mut old_keys = old_index.extensions.iter().peekable(); + let mut new_keys = new_index.extensions.iter().peekable(); + loop { + match (old_keys.peek(), new_keys.peek()) { + (None, None) => break, + (None, Some(_)) => { + extensions_to_load.push(new_keys.next().unwrap().0.clone()); + } + (Some(_), None) => { + extensions_to_unload.push(old_keys.next().unwrap().0.clone()); + } + (Some((old_key, _)), Some((new_key, _))) => match old_key.cmp(&new_key) { + Ordering::Equal => { + let (old_key, old_value) = old_keys.next().unwrap(); + let (new_key, new_value) = new_keys.next().unwrap(); + if old_value != new_value || self.modified_extensions.contains(old_key) + { + extensions_to_unload.push(old_key.clone()); + extensions_to_load.push(new_key.clone()); + } + } + Ordering::Less => { + extensions_to_unload.push(old_keys.next().unwrap().0.clone()); + } + Ordering::Greater => { + extensions_to_load.push(new_keys.next().unwrap().0.clone()); + } + }, + } + } + self.modified_extensions.clear(); + } + + if extensions_to_load.is_empty() && extensions_to_unload.is_empty() { + return Task::ready(()); + } + + let reload_count = extensions_to_unload + .iter() + .filter(|id| extensions_to_load.contains(id)) + .count(); + + log::info!( + "extensions updated. loading {}, reloading {}, unloading {}", + extensions_to_load.len() - reload_count, + reload_count, + extensions_to_unload.len() - reload_count + ); + + if let Some(telemetry) = &self.telemetry { + for extension_id in &extensions_to_load { + if let Some(extension) = new_index.extensions.get(extension_id) { + telemetry.report_extension_event( + extension_id.clone(), + extension.manifest.version.clone(), + ); + } + } + } + + let themes_to_remove = old_index + .themes + .iter() + .filter_map(|(name, entry)| { + if extensions_to_unload.contains(&entry.extension) { + Some(name.clone().into()) + } else { + None + } + }) + .collect::>(); + let languages_to_remove = old_index + .languages + .iter() + .filter_map(|(name, entry)| { + if extensions_to_unload.contains(&entry.extension) { + Some(name.clone()) + } else { + None + } + }) + .collect::>(); + let mut grammars_to_remove = Vec::new(); + for extension_id in &extensions_to_unload { + let Some(extension) = old_index.extensions.get(extension_id) else { + continue; + }; + grammars_to_remove.extend(extension.manifest.grammars.keys().cloned()); + for (language_server_name, config) in extension.manifest.language_servers.iter() { + for language in config.languages() { + self.language_registry + .remove_lsp_adapter(&language, language_server_name); + } + } + } + + self.wasm_extensions + .retain(|(extension, _)| !extensions_to_unload.contains(&extension.id)); + self.theme_registry.remove_user_themes(&themes_to_remove); + self.language_registry + .remove_languages(&languages_to_remove, &grammars_to_remove); + + let languages_to_add = new_index + .languages + .iter() + .filter(|(_, entry)| extensions_to_load.contains(&entry.extension)) + .collect::>(); + let mut grammars_to_add = Vec::new(); + let mut themes_to_add = Vec::new(); + for extension_id in &extensions_to_load { + let Some(extension) = new_index.extensions.get(extension_id) else { + continue; + }; + + grammars_to_add.extend(extension.manifest.grammars.keys().map(|grammar_name| { + let mut grammar_path = self.installed_dir.clone(); + grammar_path.extend([extension_id.as_ref(), "grammars"]); + grammar_path.push(grammar_name.as_ref()); + grammar_path.set_extension("wasm"); + (grammar_name.clone(), grammar_path) + })); + themes_to_add.extend(extension.manifest.themes.iter().map(|theme_path| { + let mut path = self.installed_dir.clone(); + path.extend([Path::new(extension_id.as_ref()), theme_path.as_path()]); + path + })); + } + + self.language_registry + .register_wasm_grammars(grammars_to_add); + + for (language_name, language) in languages_to_add { + let mut language_path = self.installed_dir.clone(); + language_path.extend([ + Path::new(language.extension.as_ref()), + language.path.as_path(), + ]); + self.language_registry.register_language( + language_name.clone(), + language.grammar.clone(), + language.matcher.clone(), + move || { + let config = std::fs::read_to_string(language_path.join("config.toml"))?; + let config: LanguageConfig = ::toml::from_str(&config)?; + let queries = load_plugin_queries(&language_path); + let tasks = std::fs::read_to_string(language_path.join("tasks.json")) + .ok() + .and_then(|contents| { + let definitions = serde_json_lenient::from_str(&contents).log_err()?; + Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) + }); + + Ok((config, queries, tasks)) + }, + ); + } + + let fs = self.fs.clone(); + let wasm_host = self.wasm_host.clone(); + let root_dir = self.installed_dir.clone(); + let theme_registry = self.theme_registry.clone(); + let extension_entries = extensions_to_load + .iter() + .filter_map(|name| new_index.extensions.get(name).cloned()) + .collect::>(); + + self.extension_index = new_index; + cx.notify(); + cx.emit(Event::ExtensionsUpdated); + + cx.spawn(|this, mut cx| async move { + cx.background_executor() + .spawn({ + let fs = fs.clone(); + async move { + for theme_path in &themes_to_add { + theme_registry + .load_user_theme(&theme_path, fs.clone()) + .await + .log_err(); + } + } + }) + .await; + + let mut wasm_extensions = Vec::new(); + for extension in extension_entries { + if extension.manifest.lib.kind.is_none() { + continue; + }; + + let wasm_extension = maybe!(async { + let mut path = root_dir.clone(); + path.extend([extension.manifest.clone().id.as_ref(), "extension.wasm"]); + let mut wasm_file = fs + .open_sync(&path) + .await + .context("failed to open wasm file")?; + + let mut wasm_bytes = Vec::new(); + wasm_file + .read_to_end(&mut wasm_bytes) + .context("failed to read wasm")?; + + wasm_host + .load_extension( + wasm_bytes, + extension.manifest.clone().clone(), + cx.background_executor().clone(), + ) + .await + .with_context(|| { + format!("failed to load wasm extension {}", extension.manifest.id) + }) + }) + .await; + + if let Some(wasm_extension) = wasm_extension.log_err() { + wasm_extensions.push((extension.manifest.clone(), wasm_extension)); + } else { + this.update(&mut cx, |_, cx| { + cx.emit(Event::ExtensionFailedToLoad(extension.manifest.id.clone())) + }) + .ok(); + } + } + + this.update(&mut cx, |this, cx| { + this.reload_complete_senders.clear(); + + for (manifest, wasm_extension) in &wasm_extensions { + for (language_server_id, language_server_config) in &manifest.language_servers { + for language in language_server_config.languages() { + this.language_registry.register_lsp_adapter( + language.clone(), + Arc::new(ExtensionLspAdapter { + extension: wasm_extension.clone(), + host: this.wasm_host.clone(), + language_server_id: language_server_id.clone(), + config: wit::LanguageServerConfig { + name: language_server_id.0.to_string(), + language_name: language.to_string(), + }, + }), + ); + } + } + } + this.wasm_extensions.extend(wasm_extensions); + ThemeSettings::reload_current_theme(cx) + }) + .ok(); + }) + } + + fn rebuild_extension_index(&self, cx: &mut ModelContext) -> Task { + let fs = self.fs.clone(); + let work_dir = self.wasm_host.work_dir.clone(); + let extensions_dir = self.installed_dir.clone(); + let index_path = self.index_path.clone(); + cx.background_executor().spawn(async move { + let start_time = Instant::now(); + let mut index = ExtensionIndex::default(); + + fs.create_dir(&work_dir).await.log_err(); + fs.create_dir(&extensions_dir).await.log_err(); + + let extension_paths = fs.read_dir(&extensions_dir).await; + if let Ok(mut extension_paths) = extension_paths { + while let Some(extension_dir) = extension_paths.next().await { + let Ok(extension_dir) = extension_dir else { + continue; + }; + + if extension_dir + .file_name() + .map_or(false, |file_name| file_name == ".DS_Store") + { + continue; + } + + Self::add_extension_to_index(fs.clone(), extension_dir, &mut index) + .await + .log_err(); + } + } + + if let Ok(index_json) = serde_json::to_string_pretty(&index) { + fs.save(&index_path, &index_json.as_str().into(), Default::default()) + .await + .context("failed to save extension index") + .log_err(); + } + + log::info!("rebuilt extension index in {:?}", start_time.elapsed()); + index + }) + } + + async fn add_extension_to_index( + fs: Arc, + extension_dir: PathBuf, + index: &mut ExtensionIndex, + ) -> Result<()> { + let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_dir).await?; + let extension_id = extension_manifest.id.clone(); + + // TODO: distinguish dev extensions more explicitly, by the absence + // of a checksum file that we'll create when downloading normal extensions. + let is_dev = fs + .metadata(&extension_dir) + .await? + .ok_or_else(|| anyhow!("directory does not exist"))? + .is_symlink; + + if let Ok(mut language_paths) = fs.read_dir(&extension_dir.join("languages")).await { + while let Some(language_path) = language_paths.next().await { + let language_path = language_path?; + let Ok(relative_path) = language_path.strip_prefix(&extension_dir) else { + continue; + }; + let Ok(Some(fs_metadata)) = fs.metadata(&language_path).await else { + continue; + }; + if !fs_metadata.is_dir { + continue; + } + let config = fs.load(&language_path.join("config.toml")).await?; + let config = ::toml::from_str::(&config)?; + + let relative_path = relative_path.to_path_buf(); + if !extension_manifest.languages.contains(&relative_path) { + extension_manifest.languages.push(relative_path.clone()); + } + + index.languages.insert( + config.name.clone(), + ExtensionIndexLanguageEntry { + extension: extension_id.clone(), + path: relative_path, + matcher: config.matcher, + grammar: config.grammar, + }, + ); + } + } + + if let Ok(mut theme_paths) = fs.read_dir(&extension_dir.join("themes")).await { + while let Some(theme_path) = theme_paths.next().await { + let theme_path = theme_path?; + let Ok(relative_path) = theme_path.strip_prefix(&extension_dir) else { + continue; + }; + + let Some(theme_family) = ThemeRegistry::read_user_theme(&theme_path, fs.clone()) + .await + .log_err() + else { + continue; + }; + + let relative_path = relative_path.to_path_buf(); + if !extension_manifest.themes.contains(&relative_path) { + extension_manifest.themes.push(relative_path.clone()); + } + + for theme in theme_family.themes { + index.themes.insert( + theme.name.into(), + ExtensionIndexThemeEntry { + extension: extension_id.clone(), + path: relative_path.clone(), + }, + ); + } + } + } + + let extension_wasm_path = extension_dir.join("extension.wasm"); + if fs.is_file(&extension_wasm_path).await { + extension_manifest + .lib + .kind + .get_or_insert(ExtensionLibraryKind::Rust); + } + + index.extensions.insert( + extension_id.clone(), + ExtensionIndexEntry { + dev: is_dev, + manifest: Arc::new(extension_manifest), + }, + ); + + Ok(()) + } +} + +fn load_plugin_queries(root_path: &Path) -> LanguageQueries { + let mut result = LanguageQueries::default(); + if let Some(entries) = std::fs::read_dir(root_path).log_err() { + for entry in entries { + let Some(entry) = entry.log_err() else { + continue; + }; + let path = entry.path(); + if let Some(remainder) = path.strip_prefix(root_path).ok().and_then(|p| p.to_str()) { + if !remainder.ends_with(".scm") { + continue; + } + for (name, query) in QUERY_FILENAME_PREFIXES { + if remainder.starts_with(name) { + if let Some(contents) = std::fs::read_to_string(&path).log_err() { + match query(&mut result) { + None => *query(&mut result) = Some(contents.into()), + Some(r) => r.to_mut().push_str(contents.as_ref()), + } + } + break; + } + } + } + } + } + result +} diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs new file mode 100644 index 0000000..30de245 --- /dev/null +++ b/crates/extension/src/extension_store_test.rs @@ -0,0 +1,722 @@ +use crate::extension_manifest::SchemaVersion; +use crate::extension_settings::ExtensionSettings; +use crate::{ + Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry, + ExtensionIndexThemeEntry, ExtensionManifest, ExtensionStore, GrammarManifestEntry, + RELOAD_DEBOUNCE_DURATION, +}; +use async_compression::futures::bufread::GzipEncoder; +use collections::BTreeMap; +use fs::{FakeFs, Fs, RealFs}; +use futures::{io::BufReader, AsyncReadExt, StreamExt}; +use gpui::{Context, TestAppContext}; +use http::{FakeHttpClient, Response}; +use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; +use node_runtime::FakeNodeRuntime; +use parking_lot::Mutex; +use project::Project; +use serde_json::json; +use settings::{Settings as _, SettingsStore}; +use std::{ + ffi::OsString, + path::{Path, PathBuf}, + sync::Arc, +}; +use theme::ThemeRegistry; +use util::test::temp_tree; + +#[cfg(test)] +#[ctor::ctor] +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} + +#[gpui::test] +async fn test_extension_store(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let http_client = FakeHttpClient::with_200_response(); + + fs.insert_tree( + "/the-extension-dir", + json!({ + "installed": { + "zed-monokai": { + "extension.json": r#"{ + "id": "zed-monokai", + "name": "Zed Monokai", + "version": "2.0.0", + "themes": { + "Monokai Dark": "themes/monokai.json", + "Monokai Light": "themes/monokai.json", + "Monokai Pro Dark": "themes/monokai-pro.json", + "Monokai Pro Light": "themes/monokai-pro.json" + } + }"#, + "themes": { + "monokai.json": r#"{ + "name": "Monokai", + "author": "Someone", + "themes": [ + { + "name": "Monokai Dark", + "appearance": "dark", + "style": {} + }, + { + "name": "Monokai Light", + "appearance": "light", + "style": {} + } + ] + }"#, + "monokai-pro.json": r#"{ + "name": "Monokai Pro", + "author": "Someone", + "themes": [ + { + "name": "Monokai Pro Dark", + "appearance": "dark", + "style": {} + }, + { + "name": "Monokai Pro Light", + "appearance": "light", + "style": {} + } + ] + }"#, + } + }, + "zed-ruby": { + "extension.json": r#"{ + "id": "zed-ruby", + "name": "Zed Ruby", + "version": "1.0.0", + "grammars": { + "ruby": "grammars/ruby.wasm", + "embedded_template": "grammars/embedded_template.wasm" + }, + "languages": { + "ruby": "languages/ruby", + "erb": "languages/erb" + } + }"#, + "grammars": { + "ruby.wasm": "", + "embedded_template.wasm": "", + }, + "languages": { + "ruby": { + "config.toml": r#" + name = "Ruby" + grammar = "ruby" + path_suffixes = ["rb"] + "#, + "highlights.scm": "", + }, + "erb": { + "config.toml": r#" + name = "ERB" + grammar = "embedded_template" + path_suffixes = ["erb"] + "#, + "highlights.scm": "", + } + }, + } + } + }), + ) + .await; + + let mut expected_index = ExtensionIndex { + extensions: [ + ( + "zed-ruby".into(), + ExtensionIndexEntry { + manifest: Arc::new(ExtensionManifest { + id: "zed-ruby".into(), + name: "Zed Ruby".into(), + version: "1.0.0".into(), + schema_version: SchemaVersion::ZERO, + description: None, + authors: Vec::new(), + repository: None, + themes: Default::default(), + lib: Default::default(), + languages: vec!["languages/erb".into(), "languages/ruby".into()], + grammars: [ + ("embedded_template".into(), GrammarManifestEntry::default()), + ("ruby".into(), GrammarManifestEntry::default()), + ] + .into_iter() + .collect(), + language_servers: BTreeMap::default(), + }), + dev: false, + }, + ), + ( + "zed-monokai".into(), + ExtensionIndexEntry { + manifest: Arc::new(ExtensionManifest { + id: "zed-monokai".into(), + name: "Zed Monokai".into(), + version: "2.0.0".into(), + schema_version: SchemaVersion::ZERO, + description: None, + authors: vec![], + repository: None, + themes: vec![ + "themes/monokai-pro.json".into(), + "themes/monokai.json".into(), + ], + lib: Default::default(), + languages: Default::default(), + grammars: BTreeMap::default(), + language_servers: BTreeMap::default(), + }), + dev: false, + }, + ), + ] + .into_iter() + .collect(), + languages: [ + ( + "ERB".into(), + ExtensionIndexLanguageEntry { + extension: "zed-ruby".into(), + path: "languages/erb".into(), + grammar: Some("embedded_template".into()), + matcher: LanguageMatcher { + path_suffixes: vec!["erb".into()], + first_line_pattern: None, + }, + }, + ), + ( + "Ruby".into(), + ExtensionIndexLanguageEntry { + extension: "zed-ruby".into(), + path: "languages/ruby".into(), + grammar: Some("ruby".into()), + matcher: LanguageMatcher { + path_suffixes: vec!["rb".into()], + first_line_pattern: None, + }, + }, + ), + ] + .into_iter() + .collect(), + themes: [ + ( + "Monokai Dark".into(), + ExtensionIndexThemeEntry { + extension: "zed-monokai".into(), + path: "themes/monokai.json".into(), + }, + ), + ( + "Monokai Light".into(), + ExtensionIndexThemeEntry { + extension: "zed-monokai".into(), + path: "themes/monokai.json".into(), + }, + ), + ( + "Monokai Pro Dark".into(), + ExtensionIndexThemeEntry { + extension: "zed-monokai".into(), + path: "themes/monokai-pro.json".into(), + }, + ), + ( + "Monokai Pro Light".into(), + ExtensionIndexThemeEntry { + extension: "zed-monokai".into(), + path: "themes/monokai-pro.json".into(), + }, + ), + ] + .into_iter() + .collect(), + }; + + let language_registry = Arc::new(LanguageRegistry::test(cx.executor())); + let theme_registry = Arc::new(ThemeRegistry::new(Box::new(()))); + let node_runtime = FakeNodeRuntime::new(); + + let store = cx.new_model(|cx| { + ExtensionStore::new( + PathBuf::from("/the-extension-dir"), + None, + fs.clone(), + http_client.clone(), + None, + node_runtime.clone(), + language_registry.clone(), + theme_registry.clone(), + cx, + ) + }); + + cx.executor().advance_clock(super::RELOAD_DEBOUNCE_DURATION); + store.read_with(cx, |store, _| { + let index = &store.extension_index; + assert_eq!(index.extensions, expected_index.extensions); + assert_eq!(index.languages, expected_index.languages); + assert_eq!(index.themes, expected_index.themes); + + assert_eq!( + language_registry.language_names(), + ["ERB", "Plain Text", "Ruby"] + ); + assert_eq!( + theme_registry.list_names(false), + [ + "Monokai Dark", + "Monokai Light", + "Monokai Pro Dark", + "Monokai Pro Light", + "One Dark", + ] + ); + }); + + fs.insert_tree( + "/the-extension-dir/installed/zed-gruvbox", + json!({ + "extension.json": r#"{ + "id": "zed-gruvbox", + "name": "Zed Gruvbox", + "version": "1.0.0", + "themes": { + "Gruvbox": "themes/gruvbox.json" + } + }"#, + "themes": { + "gruvbox.json": r#"{ + "name": "Gruvbox", + "author": "Someone Else", + "themes": [ + { + "name": "Gruvbox", + "appearance": "dark", + "style": {} + } + ] + }"#, + } + }), + ) + .await; + + expected_index.extensions.insert( + "zed-gruvbox".into(), + ExtensionIndexEntry { + manifest: Arc::new(ExtensionManifest { + id: "zed-gruvbox".into(), + name: "Zed Gruvbox".into(), + version: "1.0.0".into(), + schema_version: SchemaVersion::ZERO, + description: None, + authors: vec![], + repository: None, + themes: vec!["themes/gruvbox.json".into()], + lib: Default::default(), + languages: Default::default(), + grammars: BTreeMap::default(), + language_servers: BTreeMap::default(), + }), + dev: false, + }, + ); + expected_index.themes.insert( + "Gruvbox".into(), + ExtensionIndexThemeEntry { + extension: "zed-gruvbox".into(), + path: "themes/gruvbox.json".into(), + }, + ); + + let _ = store.update(cx, |store, cx| store.reload(None, cx)); + + cx.executor().advance_clock(RELOAD_DEBOUNCE_DURATION); + store.read_with(cx, |store, _| { + let index = &store.extension_index; + assert_eq!(index.extensions, expected_index.extensions); + assert_eq!(index.languages, expected_index.languages); + assert_eq!(index.themes, expected_index.themes); + + assert_eq!( + theme_registry.list_names(false), + [ + "Gruvbox", + "Monokai Dark", + "Monokai Light", + "Monokai Pro Dark", + "Monokai Pro Light", + "One Dark", + ] + ); + }); + + let prev_fs_metadata_call_count = fs.metadata_call_count(); + let prev_fs_read_dir_call_count = fs.read_dir_call_count(); + + // Create new extension store, as if Zed were restarting. + drop(store); + let store = cx.new_model(|cx| { + ExtensionStore::new( + PathBuf::from("/the-extension-dir"), + None, + fs.clone(), + http_client.clone(), + None, + node_runtime.clone(), + language_registry.clone(), + theme_registry.clone(), + cx, + ) + }); + + cx.executor().run_until_parked(); + store.read_with(cx, |store, _| { + assert_eq!(store.extension_index, expected_index); + assert_eq!( + language_registry.language_names(), + ["ERB", "Plain Text", "Ruby"] + ); + assert_eq!( + language_registry.grammar_names(), + ["embedded_template".into(), "ruby".into()] + ); + assert_eq!( + theme_registry.list_names(false), + [ + "Gruvbox", + "Monokai Dark", + "Monokai Light", + "Monokai Pro Dark", + "Monokai Pro Light", + "One Dark", + ] + ); + + // The on-disk manifest limits the number of FS calls that need to be made + // on startup. + assert_eq!(fs.read_dir_call_count(), prev_fs_read_dir_call_count); + assert_eq!(fs.metadata_call_count(), prev_fs_metadata_call_count + 2); + }); + + store.update(cx, |store, cx| { + store.uninstall_extension("zed-ruby".into(), cx) + }); + + cx.executor().advance_clock(RELOAD_DEBOUNCE_DURATION); + expected_index.extensions.remove("zed-ruby"); + expected_index.languages.remove("Ruby"); + expected_index.languages.remove("ERB"); + + store.read_with(cx, |store, _| { + assert_eq!(store.extension_index, expected_index); + assert_eq!(language_registry.language_names(), ["Plain Text"]); + assert_eq!(language_registry.grammar_names(), []); + }); +} + +#[gpui::test] +async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root_dir = Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .unwrap() + .parent() + .unwrap(); + let cache_dir = root_dir.join("target"); + let gleam_extension_dir = root_dir.join("extensions").join("gleam"); + + let fs = Arc::new(RealFs::default()); + let extensions_dir = temp_tree(json!({ + "installed": {}, + "work": {} + })); + let project_dir = temp_tree(json!({ + "test.gleam": "" + })); + + let extensions_dir = extensions_dir.path().canonicalize().unwrap(); + let project_dir = project_dir.path().canonicalize().unwrap(); + + let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await; + + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let theme_registry = Arc::new(ThemeRegistry::new(Box::new(()))); + let node_runtime = FakeNodeRuntime::new(); + + let mut status_updates = language_registry.language_server_binary_statuses(); + + struct FakeLanguageServerVersion { + version: String, + binary_contents: String, + http_request_count: usize, + } + + let language_server_version = Arc::new(Mutex::new(FakeLanguageServerVersion { + version: "v1.2.3".into(), + binary_contents: "the-binary-contents".into(), + http_request_count: 0, + })); + + let http_client = FakeHttpClient::create({ + let language_server_version = language_server_version.clone(); + move |request| { + let language_server_version = language_server_version.clone(); + async move { + let version = language_server_version.lock().version.clone(); + let binary_contents = language_server_version.lock().binary_contents.clone(); + + let github_releases_uri = "https://api.github.com/repos/gleam-lang/gleam/releases"; + let asset_download_uri = + format!("https://fake-download.example.com/gleam-{version}"); + + let uri = request.uri().to_string(); + if uri == github_releases_uri { + language_server_version.lock().http_request_count += 1; + Ok(Response::new( + json!([ + { + "tag_name": version, + "prerelease": false, + "tarball_url": "", + "zipball_url": "", + "assets": [ + { + "name": format!("gleam-{version}-aarch64-apple-darwin.tar.gz"), + "browser_download_url": asset_download_uri + } + ] + } + ]) + .to_string() + .into(), + )) + } else if uri == asset_download_uri { + language_server_version.lock().http_request_count += 1; + let mut bytes = Vec::::new(); + let mut archive = async_tar::Builder::new(&mut bytes); + let mut header = async_tar::Header::new_gnu(); + header.set_size(binary_contents.len() as u64); + archive + .append_data(&mut header, "gleam", binary_contents.as_bytes()) + .await + .unwrap(); + archive.into_inner().await.unwrap(); + let mut gzipped_bytes = Vec::new(); + let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); + encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); + Ok(Response::new(gzipped_bytes.into())) + } else { + Ok(Response::builder().status(404).body("not found".into())?) + } + } + } + }); + + let extension_store = cx.new_model(|cx| { + ExtensionStore::new( + extensions_dir.clone(), + Some(cache_dir), + fs.clone(), + http_client.clone(), + None, + node_runtime, + language_registry.clone(), + theme_registry.clone(), + cx, + ) + }); + + // Ensure that debounces fire. + let mut events = cx.events(&extension_store); + let executor = cx.executor(); + let _task = cx.executor().spawn(async move { + while let Some(event) = events.next().await { + match event { + crate::Event::StartedReloading => { + executor.advance_clock(RELOAD_DEBOUNCE_DURATION); + } + _ => (), + } + } + }); + + extension_store.update(cx, |_, cx| { + cx.subscribe(&extension_store, |_, _, event, _| { + if matches!(event, Event::ExtensionFailedToLoad(_)) { + panic!("extension failed to load"); + } + }) + .detach(); + }); + + extension_store + .update(cx, |store, cx| { + store.install_dev_extension(gleam_extension_dir.clone(), cx) + }) + .await + .unwrap(); + + let mut fake_servers = language_registry.fake_language_servers("Gleam"); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(project_dir.join("test.gleam"), cx) + }) + .await + .unwrap(); + + let fake_server = fake_servers.next().await.unwrap(); + let expected_server_path = extensions_dir.join("work/gleam/gleam-v1.2.3/gleam"); + let expected_binary_contents = language_server_version.lock().binary_contents.clone(); + + assert_eq!(fake_server.binary.path, expected_server_path); + assert_eq!(fake_server.binary.arguments, [OsString::from("lsp")]); + assert_eq!( + fs.load(&expected_server_path).await.unwrap(), + expected_binary_contents + ); + assert_eq!(language_server_version.lock().http_request_count, 2); + assert_eq!( + [ + status_updates.next().await.unwrap(), + status_updates.next().await.unwrap(), + status_updates.next().await.unwrap(), + ], + [ + ( + LanguageServerName("gleam".into()), + LanguageServerBinaryStatus::CheckingForUpdate + ), + ( + LanguageServerName("gleam".into()), + LanguageServerBinaryStatus::Downloading + ), + ( + LanguageServerName("gleam".into()), + LanguageServerBinaryStatus::None + ) + ] + ); + + // The extension creates custom labels for completion items. + fake_server.handle_request::(|_, _| async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "foo".into(), + kind: Some(lsp::CompletionItemKind::FUNCTION), + detail: Some("fn() -> Result(Nil, Error)".into()), + ..Default::default() + }, + lsp::CompletionItem { + label: "bar.baz".into(), + kind: Some(lsp::CompletionItemKind::FUNCTION), + detail: Some("fn(List(a)) -> a".into()), + ..Default::default() + }, + lsp::CompletionItem { + label: "Quux".into(), + kind: Some(lsp::CompletionItemKind::CONSTRUCTOR), + detail: Some("fn(String) -> T".into()), + ..Default::default() + }, + lsp::CompletionItem { + label: "my_string".into(), + kind: Some(lsp::CompletionItemKind::CONSTANT), + detail: Some("String".into()), + ..Default::default() + }, + ]))) + }); + + let completion_labels = project + .update(cx, |project, cx| project.completions(&buffer, 0, cx)) + .await + .unwrap() + .into_iter() + .map(|c| c.label.text) + .collect::>(); + assert_eq!( + completion_labels, + [ + "foo: fn() -> Result(Nil, Error)".to_string(), + "bar.baz: fn(List(a)) -> a".to_string(), + "Quux: fn(String) -> T".to_string(), + "my_string: String".to_string(), + ] + ); + + // Simulate a new version of the language server being released + language_server_version.lock().version = "v2.0.0".into(); + language_server_version.lock().binary_contents = "the-new-binary-contents".into(); + language_server_version.lock().http_request_count = 0; + + // Start a new instance of the language server. + project.update(cx, |project, cx| { + project.restart_language_servers_for_buffers([buffer.clone()], cx) + }); + + // The extension has cached the binary path, and does not attempt + // to reinstall it. + let fake_server = fake_servers.next().await.unwrap(); + assert_eq!(fake_server.binary.path, expected_server_path); + assert_eq!( + fs.load(&expected_server_path).await.unwrap(), + expected_binary_contents + ); + assert_eq!(language_server_version.lock().http_request_count, 0); + + // Reload the extension, clearing its cache. + // Start a new instance of the language server. + extension_store + .update(cx, |store, cx| store.reload(Some("gleam".into()), cx)) + .await; + + cx.executor().run_until_parked(); + project.update(cx, |project, cx| { + project.restart_language_servers_for_buffers([buffer.clone()], cx) + }); + + // The extension re-fetches the latest version of the language server. + let fake_server = fake_servers.next().await.unwrap(); + let new_expected_server_path = extensions_dir.join("work/gleam/gleam-v2.0.0/gleam"); + let expected_binary_contents = language_server_version.lock().binary_contents.clone(); + assert_eq!(fake_server.binary.path, new_expected_server_path); + assert_eq!(fake_server.binary.arguments, [OsString::from("lsp")]); + assert_eq!( + fs.load(&new_expected_server_path).await.unwrap(), + expected_binary_contents + ); + + // The old language server directory has been cleaned up. + assert!(fs.metadata(&expected_server_path).await.unwrap().is_none()); +} + +fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + theme::init(theme::LoadThemes::JustBase, cx); + Project::init_settings(cx); + ExtensionSettings::register(cx); + language::init(cx); + }); +} diff --git a/crates/extension/src/wasm_host.rs b/crates/extension/src/wasm_host.rs new file mode 100644 index 0000000..25a73f1 --- /dev/null +++ b/crates/extension/src/wasm_host.rs @@ -0,0 +1,297 @@ +pub(crate) mod wit; + +use crate::ExtensionManifest; +use anyhow::{anyhow, bail, Context as _, Result}; +use fs::{normalize_path, Fs}; +use futures::future::LocalBoxFuture; +use futures::{ + channel::{ + mpsc::{self, UnboundedSender}, + oneshot, + }, + future::BoxFuture, + Future, FutureExt, StreamExt as _, +}; +use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task}; +use http::HttpClient; +use language::LanguageRegistry; +use node_runtime::NodeRuntime; +use semantic_version::SemanticVersion; +use std::{ + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use wasmtime::{ + component::{Component, ResourceTable}, + Engine, Store, +}; +use wasmtime_wasi as wasi; +use wit::Extension; + +pub(crate) struct WasmHost { + engine: Engine, + http_client: Arc, + node_runtime: Arc, + pub(crate) language_registry: Arc, + fs: Arc, + pub(crate) work_dir: PathBuf, + _main_thread_message_task: Task<()>, + main_thread_message_tx: mpsc::UnboundedSender, +} + +#[derive(Clone)] +pub struct WasmExtension { + tx: UnboundedSender, + pub(crate) manifest: Arc, + #[allow(unused)] + pub zed_api_version: SemanticVersion, +} + +pub(crate) struct WasmState { + manifest: Arc, + pub(crate) table: ResourceTable, + ctx: wasi::WasiCtx, + pub(crate) host: Arc, +} + +type MainThreadCall = + Box FnOnce(&'a mut AsyncAppContext) -> LocalBoxFuture<'a, ()>>; + +type ExtensionCall = Box< + dyn Send + for<'a> FnOnce(&'a mut Extension, &'a mut Store) -> BoxFuture<'a, ()>, +>; + +fn wasm_engine() -> wasmtime::Engine { + static WASM_ENGINE: OnceLock = OnceLock::new(); + + WASM_ENGINE + .get_or_init(|| { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + config.async_support(true); + wasmtime::Engine::new(&config).unwrap() + }) + .clone() +} + +impl WasmHost { + pub fn new( + fs: Arc, + http_client: Arc, + node_runtime: Arc, + language_registry: Arc, + work_dir: PathBuf, + cx: &mut AppContext, + ) -> Arc { + let (tx, mut rx) = mpsc::unbounded::(); + let task = cx.spawn(|mut cx| async move { + while let Some(message) = rx.next().await { + message(&mut cx).await; + } + }); + Arc::new(Self { + engine: wasm_engine(), + fs, + work_dir, + http_client, + node_runtime, + language_registry, + _main_thread_message_task: task, + main_thread_message_tx: tx, + }) + } + + pub fn load_extension( + self: &Arc, + wasm_bytes: Vec, + manifest: Arc, + executor: BackgroundExecutor, + ) -> Task> { + let this = self.clone(); + executor.clone().spawn(async move { + let zed_api_version = parse_wasm_extension_version(&manifest.id, &wasm_bytes)?; + + let component = Component::from_binary(&this.engine, &wasm_bytes) + .context("failed to compile wasm component")?; + + let mut store = wasmtime::Store::new( + &this.engine, + WasmState { + ctx: this.build_wasi_ctx(&manifest).await?, + manifest: manifest.clone(), + table: ResourceTable::new(), + host: this.clone(), + }, + ); + + let (mut extension, instance) = + Extension::instantiate_async(&mut store, zed_api_version, &component).await?; + + extension + .call_init_extension(&mut store) + .await + .context("failed to initialize wasm extension")?; + + let (tx, mut rx) = mpsc::unbounded::(); + executor + .spawn(async move { + let _instance = instance; + while let Some(call) = rx.next().await { + (call)(&mut extension, &mut store).await; + } + }) + .detach(); + + Ok(WasmExtension { + manifest, + tx, + zed_api_version, + }) + }) + } + + async fn build_wasi_ctx(&self, manifest: &Arc) -> Result { + use cap_std::{ambient_authority, fs::Dir}; + + let extension_work_dir = self.work_dir.join(manifest.id.as_ref()); + self.fs + .create_dir(&extension_work_dir) + .await + .context("failed to create extension work dir")?; + + let work_dir_preopen = Dir::open_ambient_dir(&extension_work_dir, ambient_authority()) + .context("failed to preopen extension work directory")?; + let current_dir_preopen = work_dir_preopen + .try_clone() + .context("failed to preopen extension current directory")?; + let extension_work_dir = extension_work_dir.to_string_lossy(); + + let perms = wasi::FilePerms::all(); + let dir_perms = wasi::DirPerms::all(); + + Ok(wasi::WasiCtxBuilder::new() + .inherit_stdio() + .preopened_dir(current_dir_preopen, dir_perms, perms, ".") + .preopened_dir(work_dir_preopen, dir_perms, perms, &extension_work_dir) + .env("PWD", &extension_work_dir) + .env("RUST_BACKTRACE", "full") + .build()) + } + + pub fn path_from_extension(&self, id: &Arc, path: &Path) -> PathBuf { + let extension_work_dir = self.work_dir.join(id.as_ref()); + normalize_path(&extension_work_dir.join(path)) + } + + pub fn writeable_path_from_extension(&self, id: &Arc, path: &Path) -> Result { + let extension_work_dir = self.work_dir.join(id.as_ref()); + let path = normalize_path(&extension_work_dir.join(path)); + if path.starts_with(&extension_work_dir) { + Ok(path) + } else { + Err(anyhow!("cannot write to path {}", path.display())) + } + } +} + +pub fn parse_wasm_extension_version( + extension_id: &str, + wasm_bytes: &[u8], +) -> Result { + let mut version = None; + + for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) { + if let wasmparser::Payload::CustomSection(s) = + part.context("error parsing wasm extension")? + { + if s.name() == "zed:api-version" { + version = parse_wasm_extension_version_custom_section(s.data()); + if version.is_none() { + bail!( + "extension {} has invalid zed:api-version section: {:?}", + extension_id, + s.data() + ); + } + } + } + } + + // The reason we wait until we're done parsing all of the Wasm bytes to return the version + // is to work around a panic that can happen inside of Wasmtime when the bytes are invalid. + // + // By parsing the entirety of the Wasm bytes before we return, we're able to detect this problem + // earlier as an `Err` rather than as a panic. + version.ok_or_else(|| anyhow!("extension {} has no zed:api-version section", extension_id)) +} + +fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option { + if data.len() == 6 { + Some(SemanticVersion::new( + u16::from_be_bytes([data[0], data[1]]) as _, + u16::from_be_bytes([data[2], data[3]]) as _, + u16::from_be_bytes([data[4], data[5]]) as _, + )) + } else { + None + } +} + +impl WasmExtension { + pub async fn call(&self, f: Fn) -> T + where + T: 'static + Send, + Fn: 'static + + Send + + for<'a> FnOnce(&'a mut Extension, &'a mut Store) -> BoxFuture<'a, T>, + { + let (return_tx, return_rx) = oneshot::channel(); + self.tx + .clone() + .unbounded_send(Box::new(move |extension, store| { + async { + let result = f(extension, store).await; + return_tx.send(result).ok(); + } + .boxed() + })) + .expect("wasm extension channel should not be closed yet"); + return_rx.await.expect("wasm extension channel") + } +} + +impl WasmState { + fn on_main_thread(&self, f: Fn) -> impl 'static + Future + where + T: 'static + Send, + Fn: 'static + Send + for<'a> FnOnce(&'a mut AsyncAppContext) -> LocalBoxFuture<'a, T>, + { + let (return_tx, return_rx) = oneshot::channel(); + self.host + .main_thread_message_tx + .clone() + .unbounded_send(Box::new(move |cx| { + async { + let result = f(cx).await; + return_tx.send(result).ok(); + } + .boxed_local() + })) + .expect("main thread message channel should not be closed yet"); + async move { return_rx.await.expect("main thread message channel") } + } + + fn work_dir(&self) -> PathBuf { + self.host.work_dir.join(self.manifest.id.as_ref()) + } +} + +impl wasi::WasiView for WasmState { + fn table(&mut self) -> &mut ResourceTable { + &mut self.table + } + + fn ctx(&mut self) -> &mut wasi::WasiCtx { + &mut self.ctx + } +} diff --git a/crates/extension/src/wasm_host/wit.rs b/crates/extension/src/wasm_host/wit.rs new file mode 100644 index 0000000..35c5a6c --- /dev/null +++ b/crates/extension/src/wasm_host/wit.rs @@ -0,0 +1,207 @@ +mod since_v0_0_1; +mod since_v0_0_4; +mod since_v0_0_6; +use since_v0_0_6 as latest; + +use super::{wasm_engine, WasmState}; +use anyhow::{Context, Result}; +use language::{LanguageServerName, LspAdapterDelegate}; +use semantic_version::SemanticVersion; +use std::{ops::RangeInclusive, sync::Arc}; +use wasmtime::{ + component::{Component, Instance, Linker, Resource}, + Store, +}; + +#[cfg(test)] +pub use latest::CodeLabelSpanLiteral; +pub use latest::{ + zed::extension::lsp::{Completion, CompletionKind, InsertTextFormat, Symbol, SymbolKind}, + CodeLabel, CodeLabelSpan, Command, Range, +}; +pub use since_v0_0_4::LanguageServerConfig; + +pub fn new_linker( + f: impl Fn(&mut Linker, fn(&mut WasmState) -> &mut WasmState) -> Result<()>, +) -> Linker { + let mut linker = Linker::new(&wasm_engine()); + wasmtime_wasi::command::add_to_linker(&mut linker).unwrap(); + f(&mut linker, wasi_view).unwrap(); + linker +} + +fn wasi_view(state: &mut WasmState) -> &mut WasmState { + state +} + +/// Returns whether the given Wasm API version is supported by the Wasm host. +pub fn is_supported_wasm_api_version(version: SemanticVersion) -> bool { + wasm_api_version_range().contains(&version) +} + +/// Returns the Wasm API version range that is supported by the Wasm host. +#[inline(always)] +pub fn wasm_api_version_range() -> RangeInclusive { + since_v0_0_1::MIN_VERSION..=latest::MAX_VERSION +} + +pub enum Extension { + V006(since_v0_0_6::Extension), + V004(since_v0_0_4::Extension), + V001(since_v0_0_1::Extension), +} + +impl Extension { + pub async fn instantiate_async( + store: &mut Store, + version: SemanticVersion, + component: &Component, + ) -> Result<(Self, Instance)> { + if version >= latest::MIN_VERSION { + let (extension, instance) = + latest::Extension::instantiate_async(store, &component, latest::linker()) + .await + .context("failed to instantiate wasm extension")?; + Ok((Self::V006(extension), instance)) + } else if version >= since_v0_0_4::MIN_VERSION { + let (extension, instance) = since_v0_0_4::Extension::instantiate_async( + store, + &component, + since_v0_0_4::linker(), + ) + .await + .context("failed to instantiate wasm extension")?; + Ok((Self::V004(extension), instance)) + } else { + let (extension, instance) = since_v0_0_1::Extension::instantiate_async( + store, + &component, + since_v0_0_1::linker(), + ) + .await + .context("failed to instantiate wasm extension")?; + Ok((Self::V001(extension), instance)) + } + } + + pub async fn call_init_extension(&self, store: &mut Store) -> Result<()> { + match self { + Extension::V006(ext) => ext.call_init_extension(store).await, + Extension::V004(ext) => ext.call_init_extension(store).await, + Extension::V001(ext) => ext.call_init_extension(store).await, + } + } + + pub async fn call_language_server_command( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + config: &LanguageServerConfig, + resource: Resource>, + ) -> Result> { + match self { + Extension::V006(ext) => { + ext.call_language_server_command(store, &language_server_id.0, resource) + .await + } + Extension::V004(ext) => Ok(ext + .call_language_server_command(store, config, resource) + .await? + .map(|command| command.into())), + Extension::V001(ext) => Ok(ext + .call_language_server_command(store, &config.clone().into(), resource) + .await? + .map(|command| command.into())), + } + } + + pub async fn call_language_server_initialization_options( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + config: &LanguageServerConfig, + resource: Resource>, + ) -> Result, String>> { + match self { + Extension::V006(ext) => { + ext.call_language_server_initialization_options( + store, + &language_server_id.0, + resource, + ) + .await + } + Extension::V004(ext) => { + ext.call_language_server_initialization_options(store, config, resource) + .await + } + Extension::V001(ext) => { + ext.call_language_server_initialization_options( + store, + &config.clone().into(), + resource, + ) + .await + } + } + } + + pub async fn call_language_server_workspace_configuration( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + resource: Resource>, + ) -> Result, String>> { + match self { + Extension::V006(ext) => { + ext.call_language_server_workspace_configuration( + store, + &language_server_id.0, + resource, + ) + .await + } + Extension::V004(_) | Extension::V001(_) => Ok(Ok(None)), + } + } + + pub async fn call_labels_for_completions( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + completions: Vec, + ) -> Result>, String>> { + match self { + Extension::V001(_) | Extension::V004(_) => Ok(Ok(Vec::new())), + Extension::V006(ext) => { + ext.call_labels_for_completions(store, &language_server_id.0, &completions) + .await + } + } + } + + pub async fn call_labels_for_symbols( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + symbols: Vec, + ) -> Result>, String>> { + match self { + Extension::V001(_) | Extension::V004(_) => Ok(Ok(Vec::new())), + Extension::V006(ext) => { + ext.call_labels_for_symbols(store, &language_server_id.0, &symbols) + .await + } + } + } +} + +trait ToWasmtimeResult { + fn to_wasmtime_result(self) -> wasmtime::Result>; +} + +impl ToWasmtimeResult for Result { + fn to_wasmtime_result(self) -> wasmtime::Result> { + Ok(self.map_err(|error| error.to_string())) + } +} diff --git a/crates/extension/src/wasm_host/wit/since_v0_0_1.rs b/crates/extension/src/wasm_host/wit/since_v0_0_1.rs new file mode 100644 index 0000000..1dab4a6 --- /dev/null +++ b/crates/extension/src/wasm_host/wit/since_v0_0_1.rs @@ -0,0 +1,163 @@ +use super::latest; +use crate::wasm_host::wit::since_v0_0_4; +use crate::wasm_host::WasmState; +use anyhow::Result; +use async_trait::async_trait; +use language::{LanguageServerBinaryStatus, LspAdapterDelegate}; +use semantic_version::SemanticVersion; +use std::sync::{Arc, OnceLock}; +use wasmtime::component::{Linker, Resource}; + +pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 1); + +wasmtime::component::bindgen!({ + async: true, + path: "../extension_api/wit/since_v0.0.1", + with: { + "worktree": ExtensionWorktree, + "zed:extension/github": latest::zed::extension::github, + "zed:extension/platform": latest::zed::extension::platform, + }, +}); + +pub type ExtensionWorktree = Arc; + +pub fn linker() -> &'static Linker { + static LINKER: OnceLock> = OnceLock::new(); + LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) +} + +impl From for latest::DownloadedFileType { + fn from(value: DownloadedFileType) -> Self { + match value { + DownloadedFileType::Gzip => latest::DownloadedFileType::Gzip, + DownloadedFileType::GzipTar => latest::DownloadedFileType::GzipTar, + DownloadedFileType::Zip => latest::DownloadedFileType::Zip, + DownloadedFileType::Uncompressed => latest::DownloadedFileType::Uncompressed, + } + } +} + +impl From for LanguageServerConfig { + fn from(value: since_v0_0_4::LanguageServerConfig) -> Self { + Self { + name: value.name, + language_name: value.language_name, + } + } +} + +impl From for latest::Command { + fn from(value: Command) -> Self { + Self { + command: value.command, + args: value.args, + env: value.env, + } + } +} + +#[async_trait] +impl HostWorktree for WasmState { + async fn read_text_file( + &mut self, + delegate: Resource>, + path: String, + ) -> wasmtime::Result> { + latest::HostWorktree::read_text_file(self, delegate, path).await + } + + async fn shell_env( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + latest::HostWorktree::shell_env(self, delegate).await + } + + async fn which( + &mut self, + delegate: Resource>, + binary_name: String, + ) -> wasmtime::Result> { + latest::HostWorktree::which(self, delegate, binary_name).await + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + Ok(()) + } +} + +#[async_trait] +impl ExtensionImports for WasmState { + async fn node_binary_path(&mut self) -> wasmtime::Result> { + latest::nodejs::Host::node_binary_path(self).await + } + + async fn npm_package_latest_version( + &mut self, + package_name: String, + ) -> wasmtime::Result> { + latest::nodejs::Host::npm_package_latest_version(self, package_name).await + } + + async fn npm_package_installed_version( + &mut self, + package_name: String, + ) -> wasmtime::Result, String>> { + latest::nodejs::Host::npm_package_installed_version(self, package_name).await + } + + async fn npm_install_package( + &mut self, + package_name: String, + version: String, + ) -> wasmtime::Result> { + latest::nodejs::Host::npm_install_package(self, package_name, version).await + } + + async fn latest_github_release( + &mut self, + repo: String, + options: GithubReleaseOptions, + ) -> wasmtime::Result> { + latest::zed::extension::github::Host::latest_github_release(self, repo, options).await + } + + async fn current_platform(&mut self) -> Result<(Os, Architecture)> { + latest::zed::extension::platform::Host::current_platform(self).await + } + + async fn set_language_server_installation_status( + &mut self, + server_name: String, + status: LanguageServerInstallationStatus, + ) -> wasmtime::Result<()> { + let status = match status { + LanguageServerInstallationStatus::CheckingForUpdate => { + LanguageServerBinaryStatus::CheckingForUpdate + } + LanguageServerInstallationStatus::Downloading => { + LanguageServerBinaryStatus::Downloading + } + LanguageServerInstallationStatus::Cached + | LanguageServerInstallationStatus::Downloaded => LanguageServerBinaryStatus::None, + LanguageServerInstallationStatus::Failed(error) => { + LanguageServerBinaryStatus::Failed { error } + } + }; + + self.host + .language_registry + .update_lsp_status(language::LanguageServerName(server_name.into()), status); + Ok(()) + } + + async fn download_file( + &mut self, + url: String, + path: String, + file_type: DownloadedFileType, + ) -> wasmtime::Result> { + latest::ExtensionImports::download_file(self, url, path, file_type.into()).await + } +} diff --git a/crates/extension/src/wasm_host/wit/since_v0_0_4.rs b/crates/extension/src/wasm_host/wit/since_v0_0_4.rs new file mode 100644 index 0000000..86b3a39 --- /dev/null +++ b/crates/extension/src/wasm_host/wit/since_v0_0_4.rs @@ -0,0 +1,165 @@ +use super::latest; +use crate::wasm_host::WasmState; +use anyhow::Result; +use async_trait::async_trait; +use language::LspAdapterDelegate; +use semantic_version::SemanticVersion; +use std::sync::{Arc, OnceLock}; +use wasmtime::component::{Linker, Resource}; + +pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 4); + +wasmtime::component::bindgen!({ + async: true, + path: "../extension_api/wit/since_v0.0.4", + with: { + "worktree": ExtensionWorktree, + "zed:extension/github": latest::zed::extension::github, + "zed:extension/platform": latest::zed::extension::platform, + }, +}); + +pub type ExtensionWorktree = Arc; + +pub fn linker() -> &'static Linker { + static LINKER: OnceLock> = OnceLock::new(); + LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) +} + +impl From for latest::DownloadedFileType { + fn from(value: DownloadedFileType) -> Self { + match value { + DownloadedFileType::Gzip => latest::DownloadedFileType::Gzip, + DownloadedFileType::GzipTar => latest::DownloadedFileType::GzipTar, + DownloadedFileType::Zip => latest::DownloadedFileType::Zip, + DownloadedFileType::Uncompressed => latest::DownloadedFileType::Uncompressed, + } + } +} + +impl From for latest::LanguageServerInstallationStatus { + fn from(value: LanguageServerInstallationStatus) -> Self { + match value { + LanguageServerInstallationStatus::None => { + latest::LanguageServerInstallationStatus::None + } + LanguageServerInstallationStatus::Downloading => { + latest::LanguageServerInstallationStatus::Downloading + } + LanguageServerInstallationStatus::CheckingForUpdate => { + latest::LanguageServerInstallationStatus::CheckingForUpdate + } + LanguageServerInstallationStatus::Failed(error) => { + latest::LanguageServerInstallationStatus::Failed(error) + } + } + } +} + +impl From for latest::Command { + fn from(value: Command) -> Self { + Self { + command: value.command, + args: value.args, + env: value.env, + } + } +} + +#[async_trait] +impl HostWorktree for WasmState { + async fn read_text_file( + &mut self, + delegate: Resource>, + path: String, + ) -> wasmtime::Result> { + latest::HostWorktree::read_text_file(self, delegate, path).await + } + + async fn shell_env( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + latest::HostWorktree::shell_env(self, delegate).await + } + + async fn which( + &mut self, + delegate: Resource>, + binary_name: String, + ) -> wasmtime::Result> { + latest::HostWorktree::which(self, delegate, binary_name).await + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of worktrees. + Ok(()) + } +} + +#[async_trait] +impl ExtensionImports for WasmState { + async fn node_binary_path(&mut self) -> wasmtime::Result> { + latest::nodejs::Host::node_binary_path(self).await + } + + async fn npm_package_latest_version( + &mut self, + package_name: String, + ) -> wasmtime::Result> { + latest::nodejs::Host::npm_package_latest_version(self, package_name).await + } + + async fn npm_package_installed_version( + &mut self, + package_name: String, + ) -> wasmtime::Result, String>> { + latest::nodejs::Host::npm_package_installed_version(self, package_name).await + } + + async fn npm_install_package( + &mut self, + package_name: String, + version: String, + ) -> wasmtime::Result> { + latest::nodejs::Host::npm_install_package(self, package_name, version).await + } + + async fn latest_github_release( + &mut self, + repo: String, + options: GithubReleaseOptions, + ) -> wasmtime::Result> { + latest::zed::extension::github::Host::latest_github_release(self, repo, options).await + } + + async fn current_platform(&mut self) -> Result<(Os, Architecture)> { + latest::zed::extension::platform::Host::current_platform(self).await + } + + async fn set_language_server_installation_status( + &mut self, + server_name: String, + status: LanguageServerInstallationStatus, + ) -> wasmtime::Result<()> { + latest::ExtensionImports::set_language_server_installation_status( + self, + server_name, + status.into(), + ) + .await + } + + async fn download_file( + &mut self, + url: String, + path: String, + file_type: DownloadedFileType, + ) -> wasmtime::Result> { + latest::ExtensionImports::download_file(self, url, path, file_type.into()).await + } + + async fn make_file_executable(&mut self, path: String) -> wasmtime::Result> { + latest::ExtensionImports::make_file_executable(self, path).await + } +} diff --git a/crates/extension/src/wasm_host/wit/since_v0_0_6.rs b/crates/extension/src/wasm_host/wit/since_v0_0_6.rs new file mode 100644 index 0000000..3f3b118 --- /dev/null +++ b/crates/extension/src/wasm_host/wit/since_v0_0_6.rs @@ -0,0 +1,385 @@ +use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; +use ::settings::Settings; +use anyhow::{anyhow, bail, Result}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use async_trait::async_trait; +use futures::{io::BufReader, FutureExt as _}; +use language::{ + language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, +}; +use project::project_settings::ProjectSettings; +use semantic_version::SemanticVersion; +use std::{ + env, + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use util::maybe; +use wasmtime::component::{Linker, Resource}; + +pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 6); +pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 6); + +wasmtime::component::bindgen!({ + async: true, + path: "../extension_api/wit/since_v0.0.6", + with: { + "worktree": ExtensionWorktree, + }, +}); + +pub use self::zed::extension::*; + +mod settings { + include!("../../../../extension_api/wit/since_v0.0.6/settings.rs"); +} + +pub type ExtensionWorktree = Arc; + +pub fn linker() -> &'static Linker { + static LINKER: OnceLock> = OnceLock::new(); + LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) +} + +#[async_trait] +impl HostWorktree for WasmState { + async fn id( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.worktree_id()) + } + + async fn root_path( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.worktree_root_path().to_string_lossy().to_string()) + } + + async fn read_text_file( + &mut self, + delegate: Resource>, + path: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .read_text_file(path.into()) + .await + .map_err(|error| error.to_string())) + } + + async fn shell_env( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.shell_env().await.into_iter().collect()) + } + + async fn which( + &mut self, + delegate: Resource>, + binary_name: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .which(binary_name.as_ref()) + .await + .map(|path| path.to_string_lossy().to_string())) + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of worktrees. + Ok(()) + } +} + +#[async_trait] +impl nodejs::Host for WasmState { + async fn node_binary_path(&mut self) -> wasmtime::Result> { + self.host + .node_runtime + .binary_path() + .await + .map(|path| path.to_string_lossy().to_string()) + .to_wasmtime_result() + } + + async fn npm_package_latest_version( + &mut self, + package_name: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_package_latest_version(&package_name) + .await + .to_wasmtime_result() + } + + async fn npm_package_installed_version( + &mut self, + package_name: String, + ) -> wasmtime::Result, String>> { + self.host + .node_runtime + .npm_package_installed_version(&self.work_dir(), &package_name) + .await + .to_wasmtime_result() + } + + async fn npm_install_package( + &mut self, + package_name: String, + version: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl lsp::Host for WasmState {} + +#[async_trait] +impl github::Host for WasmState { + async fn latest_github_release( + &mut self, + repo: String, + options: github::GithubReleaseOptions, + ) -> wasmtime::Result> { + maybe!(async { + let release = http::github::latest_github_release( + &repo, + options.require_assets, + options.pre_release, + self.host.http_client.clone(), + ) + .await?; + Ok(github::GithubRelease { + version: release.tag_name, + assets: release + .assets + .into_iter() + .map(|asset| github::GithubReleaseAsset { + name: asset.name, + download_url: asset.browser_download_url, + }) + .collect(), + }) + }) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl platform::Host for WasmState { + async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { + Ok(( + match env::consts::OS { + "macos" => platform::Os::Mac, + "linux" => platform::Os::Linux, + "windows" => platform::Os::Windows, + _ => panic!("unsupported os"), + }, + match env::consts::ARCH { + "aarch64" => platform::Architecture::Aarch64, + "x86" => platform::Architecture::X86, + "x86_64" => platform::Architecture::X8664, + _ => panic!("unsupported architecture"), + }, + )) + } +} + +#[async_trait] +impl ExtensionImports for WasmState { + async fn get_settings( + &mut self, + location: Option, + category: String, + key: Option, + ) -> wasmtime::Result> { + self.on_main_thread(|cx| { + async move { + let location = location + .as_ref() + .map(|location| ::settings::SettingsLocation { + worktree_id: location.worktree_id as usize, + path: Path::new(&location.path), + }); + + cx.update(|cx| match category.as_str() { + "language" => { + let settings = + AllLanguageSettings::get(location, cx).language(key.as_deref()); + Ok(serde_json::to_string(&settings::LanguageSettings { + tab_size: settings.tab_size, + })?) + } + "lsp" => { + let settings = key + .and_then(|key| { + ProjectSettings::get(location, cx) + .lsp + .get(&Arc::::from(key)) + }) + .cloned() + .unwrap_or_default(); + Ok(serde_json::to_string(&settings::LspSettings { + binary: settings.binary.map(|binary| settings::BinarySettings { + path: binary.path, + arguments: binary.arguments, + }), + settings: settings.settings, + initialization_options: settings.initialization_options, + })?) + } + _ => { + bail!("Unknown settings category: {}", category); + } + }) + } + .boxed_local() + }) + .await? + .to_wasmtime_result() + } + + async fn set_language_server_installation_status( + &mut self, + server_name: String, + status: LanguageServerInstallationStatus, + ) -> wasmtime::Result<()> { + let status = match status { + LanguageServerInstallationStatus::CheckingForUpdate => { + LanguageServerBinaryStatus::CheckingForUpdate + } + LanguageServerInstallationStatus::Downloading => { + LanguageServerBinaryStatus::Downloading + } + LanguageServerInstallationStatus::None => LanguageServerBinaryStatus::None, + LanguageServerInstallationStatus::Failed(error) => { + LanguageServerBinaryStatus::Failed { error } + } + }; + + self.host + .language_registry + .update_lsp_status(language::LanguageServerName(server_name.into()), status); + Ok(()) + } + + async fn download_file( + &mut self, + url: String, + path: String, + file_type: DownloadedFileType, + ) -> wasmtime::Result> { + maybe!(async { + let path = PathBuf::from(path); + let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref()); + + self.host.fs.create_dir(&extension_work_dir).await?; + + let destination_path = self + .host + .writeable_path_from_extension(&self.manifest.id, &path)?; + + let mut response = self + .host + .http_client + .get(&url, Default::default(), true) + .await + .map_err(|err| anyhow!("error downloading release: {}", err))?; + + if !response.status().is_success() { + Err(anyhow!( + "download failed with status {}", + response.status().to_string() + ))?; + } + let body = BufReader::new(response.body_mut()); + + match file_type { + DownloadedFileType::Uncompressed => { + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::Gzip => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::GzipTar => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .extract_tar_file(&destination_path, Archive::new(body)) + .await?; + } + DownloadedFileType::Zip => { + let file_name = destination_path + .file_name() + .ok_or_else(|| anyhow!("invalid download path"))? + .to_string_lossy(); + let zip_filename = format!("{file_name}.zip"); + let mut zip_path = destination_path.clone(); + zip_path.set_file_name(zip_filename); + + futures::pin_mut!(body); + self.host.fs.create_file_with(&zip_path, body).await?; + + let unzip_status = std::process::Command::new("unzip") + .current_dir(&extension_work_dir) + .arg("-d") + .arg(&destination_path) + .arg(&zip_path) + .output()? + .status; + if !unzip_status.success() { + Err(anyhow!("failed to unzip {} archive", path.display()))?; + } + } + } + + Ok(()) + }) + .await + .to_wasmtime_result() + } + + async fn make_file_executable(&mut self, path: String) -> wasmtime::Result> { + #[allow(unused)] + let path = self + .host + .writeable_path_from_extension(&self.manifest.id, Path::new(&path))?; + + #[cfg(unix)] + { + use std::fs::{self, Permissions}; + use std::os::unix::fs::PermissionsExt; + + return fs::set_permissions(&path, Permissions::from_mode(0o755)) + .map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}")) + .to_wasmtime_result(); + } + + #[cfg(not(unix))] + Ok(Ok(())) + } +} diff --git a/crates/extension_api/Cargo.toml b/crates/extension_api/Cargo.toml new file mode 100644 index 0000000..1fcb8d0 --- /dev/null +++ b/crates/extension_api/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "zed_extension_api" +version = "0.0.6" +description = "APIs for creating Zed extensions in Rust" +repository = "https://github.com/zed-industries/zed" +documentation = "https://docs.rs/zed_extension_api" +keywords = ["zed", "extension"] +edition = "2021" +license = "Apache-2.0" + +[lints] +workspace = true + +[lib] +path = "src/extension_api.rs" + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +wit-bindgen = "0.22" + +[package.metadata.component] +target = { path = "wit" } diff --git a/crates/extension_api/LICENSE-APACHE b/crates/extension_api/LICENSE-APACHE new file mode 100644 index 0000000..1cd601d --- /dev/null +++ b/crates/extension_api/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/extension_api/README.md b/crates/extension_api/README.md new file mode 100644 index 0000000..7e55d5f --- /dev/null +++ b/crates/extension_api/README.md @@ -0,0 +1,69 @@ +# The Zed Rust Extension API + +This crate lets you write extensions for Zed in Rust. + +## Extension Manifest + +You'll need an `extension.toml` file at the root of your extension directory, with the following structure: + +```toml +id = "my-extension" +name = "My Extension" +description = "..." +version = "0.0.1" +schema_version = 1 +authors = ["Your Name "] +repository = "https://github.com/your/extension-repository" +``` + +## Cargo metadata + +Zed extensions are packaged as WebAssembly files. In your Cargo.toml, you'll +need to set your `crate-type` accordingly: + +```toml +[dependencies] +zed_extension_api = "0.0.6" + +[lib] +crate-type = ["cdylib"] +``` + +## Implementing an Extension + +To define your extension, create a type that implements the `Extension` trait, and register it. + +```rust +use zed_extension_api as zed; + +struct MyExtension { + // ... state +} + +impl zed::Extension for MyExtension { + // ... +} + +zed::register_extension!(MyExtension); +``` + +## Testing your extension + +To run your extension in Zed as you're developing it: + +- Open the extensions view using the `zed: extensions` action in the command palette. +- Click the `Install Dev Extension` button in the top right +- Choose the path to your extension directory. + +## Compatible Zed versions + +Extensions created using newer versions of the Zed extension API won't be compatible with older versions of Zed. + +Here is the compatibility of the `zed_extension_api` with versions of Zed: + +| Zed version | `zed_extension_api` version | +| ----------- | --------------------------- | +| `0.131.x` | `0.0.1` - `0.0.6` | +| `0.130.x` | `0.0.1` - `0.0.5` | +| `0.129.x` | `0.0.1` - `0.0.4` | +| `0.128.x` | `0.0.1` | diff --git a/crates/extension_api/build.rs b/crates/extension_api/build.rs new file mode 100644 index 0000000..4637257 --- /dev/null +++ b/crates/extension_api/build.rs @@ -0,0 +1,15 @@ +fn main() { + let version = std::env::var("CARGO_PKG_VERSION").unwrap(); + let out_dir = std::env::var("OUT_DIR").unwrap(); + + let mut parts = version.split(|c: char| !c.is_digit(10)); + let major = parts.next().unwrap().parse::().unwrap().to_be_bytes(); + let minor = parts.next().unwrap().parse::().unwrap().to_be_bytes(); + let patch = parts.next().unwrap().parse::().unwrap().to_be_bytes(); + + std::fs::write( + std::path::Path::new(&out_dir).join("version_bytes"), + [major[0], major[1], minor[0], minor[1], patch[0], patch[1]], + ) + .unwrap(); +} diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs new file mode 100644 index 0000000..be3dcfc --- /dev/null +++ b/crates/extension_api/src/extension_api.rs @@ -0,0 +1,260 @@ +//! The Zed Rust Extension API allows you write extensions for [Zed](https://zed.dev/) in Rust. + +/// Provides access to Zed settings. +pub mod settings; + +use core::fmt; + +use wit::*; + +pub use serde_json; + +// WIT re-exports. +// +// We explicitly enumerate the symbols we want to re-export, as there are some +// that we may want to shadow to provide a cleaner Rust API. +pub use wit::{ + download_file, make_file_executable, + zed::extension::github::{ + latest_github_release, GithubRelease, GithubReleaseAsset, GithubReleaseOptions, + }, + zed::extension::nodejs::{ + node_binary_path, npm_install_package, npm_package_installed_version, + npm_package_latest_version, + }, + zed::extension::platform::{current_platform, Architecture, Os}, + CodeLabel, CodeLabelSpan, CodeLabelSpanLiteral, Command, DownloadedFileType, EnvVars, + LanguageServerInstallationStatus, Range, Worktree, +}; + +// Undocumented WIT re-exports. +// +// These are symbols that need to be public for the purposes of implementing +// the extension host, but aren't relevant to extension authors. +#[doc(hidden)] +pub use wit::Guest; + +/// Constructs for interacting with language servers over the +/// Language Server Protocol (LSP). +pub mod lsp { + pub use crate::wit::zed::extension::lsp::{ + Completion, CompletionKind, InsertTextFormat, Symbol, SymbolKind, + }; +} + +/// A result returned from a Zed extension. +pub type Result = core::result::Result; + +/// Updates the installation status for the given language server. +pub fn set_language_server_installation_status( + language_server_id: &LanguageServerId, + status: &LanguageServerInstallationStatus, +) { + wit::set_language_server_installation_status(&language_server_id.0, status) +} + +/// A Zed extension. +pub trait Extension: Send + Sync { + /// Returns a new instance of the extension. + fn new() -> Self + where + Self: Sized; + + /// Returns the command used to start the language server for the specified + /// language. + fn language_server_command( + &mut self, + language_server_id: &LanguageServerId, + worktree: &Worktree, + ) -> Result; + + /// Returns the initialization options to pass to the specified language server. + fn language_server_initialization_options( + &mut self, + _language_server_id: &LanguageServerId, + _worktree: &Worktree, + ) -> Result> { + Ok(None) + } + + /// Returns the workspace configuration options to pass to the language server. + fn language_server_workspace_configuration( + &mut self, + _language_server_id: &LanguageServerId, + _worktree: &Worktree, + ) -> Result> { + Ok(None) + } + + /// Returns the label for the given completion. + fn label_for_completion( + &self, + _language_server_id: &LanguageServerId, + _completion: Completion, + ) -> Option { + None + } + + /// Returns the label for the given symbol. + fn label_for_symbol( + &self, + _language_server_id: &LanguageServerId, + _symbol: Symbol, + ) -> Option { + None + } +} + +/// Registers the provided type as a Zed extension. +/// +/// The type must implement the [`Extension`] trait. +#[macro_export] +macro_rules! register_extension { + ($extension_type:ty) => { + #[export_name = "init-extension"] + pub extern "C" fn __init_extension() { + std::env::set_current_dir(std::env::var("PWD").unwrap()).unwrap(); + zed_extension_api::register_extension(|| { + Box::new(<$extension_type as zed_extension_api::Extension>::new()) + }); + } + }; +} + +#[doc(hidden)] +pub fn register_extension(build_extension: fn() -> Box) { + unsafe { EXTENSION = Some((build_extension)()) } +} + +fn extension() -> &'static mut dyn Extension { + unsafe { EXTENSION.as_deref_mut().unwrap() } +} + +static mut EXTENSION: Option> = None; + +#[cfg(target_arch = "wasm32")] +#[link_section = "zed:api-version"] +#[doc(hidden)] +pub static ZED_API_VERSION: [u8; 6] = *include_bytes!(concat!(env!("OUT_DIR"), "/version_bytes")); + +mod wit { + wit_bindgen::generate!({ + skip: ["init-extension"], + path: "./wit/since_v0.0.6", + }); +} + +wit::export!(Component); + +struct Component; + +impl wit::Guest for Component { + fn language_server_command( + language_server_id: String, + worktree: &wit::Worktree, + ) -> Result { + let language_server_id = LanguageServerId(language_server_id); + extension().language_server_command(&language_server_id, worktree) + } + + fn language_server_initialization_options( + language_server_id: String, + worktree: &Worktree, + ) -> Result, String> { + let language_server_id = LanguageServerId(language_server_id); + Ok(extension() + .language_server_initialization_options(&language_server_id, worktree)? + .and_then(|value| serde_json::to_string(&value).ok())) + } + + fn language_server_workspace_configuration( + language_server_id: String, + worktree: &Worktree, + ) -> Result, String> { + let language_server_id = LanguageServerId(language_server_id); + Ok(extension() + .language_server_workspace_configuration(&language_server_id, worktree)? + .and_then(|value| serde_json::to_string(&value).ok())) + } + + fn labels_for_completions( + language_server_id: String, + completions: Vec, + ) -> Result>, String> { + let language_server_id = LanguageServerId(language_server_id); + let mut labels = Vec::new(); + for (ix, completion) in completions.into_iter().enumerate() { + let label = extension().label_for_completion(&language_server_id, completion); + if let Some(label) = label { + labels.resize(ix + 1, None); + *labels.last_mut().unwrap() = Some(label); + } + } + Ok(labels) + } + + fn labels_for_symbols( + language_server_id: String, + symbols: Vec, + ) -> Result>, String> { + let language_server_id = LanguageServerId(language_server_id); + let mut labels = Vec::new(); + for (ix, symbol) in symbols.into_iter().enumerate() { + let label = extension().label_for_symbol(&language_server_id, symbol); + if let Some(label) = label { + labels.resize(ix + 1, None); + *labels.last_mut().unwrap() = Some(label); + } + } + Ok(labels) + } +} + +/// The ID of a language server. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)] +pub struct LanguageServerId(String); + +impl AsRef for LanguageServerId { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl fmt::Display for LanguageServerId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl CodeLabelSpan { + /// Returns a [`CodeLabelSpan::CodeRange`]. + pub fn code_range(range: impl Into) -> Self { + Self::CodeRange(range.into()) + } + + /// Returns a [`CodeLabelSpan::Literal`]. + pub fn literal(text: impl Into, highlight_name: Option) -> Self { + Self::Literal(CodeLabelSpanLiteral { + text: text.into(), + highlight_name, + }) + } +} + +impl From> for wit::Range { + fn from(value: std::ops::Range) -> Self { + Self { + start: value.start, + end: value.end, + } + } +} + +impl From> for wit::Range { + fn from(value: std::ops::Range) -> Self { + Self { + start: value.start as u32, + end: value.end as u32, + } + } +} diff --git a/crates/extension_api/src/settings.rs b/crates/extension_api/src/settings.rs new file mode 100644 index 0000000..ffed133 --- /dev/null +++ b/crates/extension_api/src/settings.rs @@ -0,0 +1,32 @@ +#[path = "../wit/since_v0.0.6/settings.rs"] +mod types; + +use crate::{wit, Result, SettingsLocation, Worktree}; +use serde_json; +pub use types::*; + +impl LanguageSettings { + /// Returns the [`LanguageSettings`] for the given language. + pub fn for_worktree(language: Option<&str>, worktree: &Worktree) -> Result { + let location = SettingsLocation { + worktree_id: worktree.id(), + path: worktree.root_path(), + }; + let settings_json = wit::get_settings(Some(&location), "language", language)?; + let settings: Self = serde_json::from_str(&settings_json).map_err(|err| err.to_string())?; + Ok(settings) + } +} + +impl LspSettings { + /// Returns the [`LspSettings`] for the given language server. + pub fn for_worktree(language_server_name: &str, worktree: &Worktree) -> Result { + let location = SettingsLocation { + worktree_id: worktree.id(), + path: worktree.root_path(), + }; + let settings_json = wit::get_settings(Some(&location), "lsp", Some(language_server_name))?; + let settings: Self = serde_json::from_str(&settings_json).map_err(|err| err.to_string())?; + Ok(settings) + } +} diff --git a/crates/extension_api/wit/since_v0.0.1/extension.wit b/crates/extension_api/wit/since_v0.0.1/extension.wit new file mode 100644 index 0000000..339a974 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.1/extension.wit @@ -0,0 +1,70 @@ +package zed:extension; + +world extension { + use github.{github-release, github-release-options}; + use platform.{os, architecture}; + + export init-extension: func(); + + enum downloaded-file-type { + gzip, + gzip-tar, + zip, + uncompressed, + } + + variant language-server-installation-status { + checking-for-update, + downloaded, + downloading, + cached, + failed(string), + } + + /// Gets the current operating system and architecture + import current-platform: func() -> tuple; + + /// Get the path to the node binary used by Zed. + import node-binary-path: func() -> result; + + /// Gets the latest version of the given NPM package. + import npm-package-latest-version: func(package-name: string) -> result; + + /// Returns the installed version of the given NPM package, if it exists. + import npm-package-installed-version: func(package-name: string) -> result, string>; + + /// Installs the specified NPM package. + import npm-install-package: func(package-name: string, version: string) -> result<_, string>; + + /// Gets the latest release for the given GitHub repository. + import latest-github-release: func(repo: string, options: github-release-options) -> result; + + /// Downloads a file from the given url, and saves it to the given filename within the extension's + /// working directory. Extracts the file according to the given file type. + import download-file: func(url: string, output-filename: string, file-type: downloaded-file-type) -> result<_, string>; + + /// Updates the installation status for the given language server. + import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status); + + type env-vars = list>; + + record command { + command: string, + args: list, + env: env-vars, + } + + resource worktree { + read-text-file: func(path: string) -> result; + which: func(binary-name: string) -> option; + shell-env: func() -> env-vars; + } + + record language-server-config { + name: string, + language-name: string, + } + + export language-server-command: func(config: language-server-config, worktree: borrow) -> result; + export language-server-initialization-options: func(config: language-server-config, worktree: borrow) -> result, string>; +} diff --git a/crates/extension_api/wit/since_v0.0.1/github.wit b/crates/extension_api/wit/since_v0.0.1/github.wit new file mode 100644 index 0000000..53ecacb --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.1/github.wit @@ -0,0 +1,28 @@ +interface github { + /// A GitHub release. + record github-release { + /// The version of the release. + version: string, + /// The list of assets attached to the release. + assets: list, + } + + /// An asset from a GitHub release. + record github-release-asset { + /// The name of the asset. + name: string, + /// The download URL for the asset. + download-url: string, + } + + /// The options used to filter down GitHub releases. + record github-release-options { + /// Whether releases without assets should be included. + require-assets: bool, + /// Whether pre-releases should be included. + pre-release: bool, + } + + /// Returns the latest release for the given GitHub repository. + latest-github-release: func(repo: string, options: github-release-options) -> result; +} diff --git a/crates/extension_api/wit/since_v0.0.1/platform.wit b/crates/extension_api/wit/since_v0.0.1/platform.wit new file mode 100644 index 0000000..48472a9 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.1/platform.wit @@ -0,0 +1,24 @@ +interface platform { + /// An operating system. + enum os { + /// macOS. + mac, + /// Linux. + linux, + /// Windows. + windows, + } + + /// A platform architecture. + enum architecture { + /// AArch64 (e.g., Apple Silicon). + aarch64, + /// x86. + x86, + /// x86-64. + x8664, + } + + /// Gets the current operating system and architecture. + current-platform: func() -> tuple; +} diff --git a/crates/extension_api/wit/since_v0.0.4/extension.wit b/crates/extension_api/wit/since_v0.0.4/extension.wit new file mode 100644 index 0000000..c6f3e73 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.4/extension.wit @@ -0,0 +1,72 @@ +package zed:extension; + +world extension { + use github.{github-release, github-release-options}; + use platform.{os, architecture}; + + export init-extension: func(); + + enum downloaded-file-type { + gzip, + gzip-tar, + zip, + uncompressed, + } + + variant language-server-installation-status { + none, + downloading, + checking-for-update, + failed(string), + } + + /// Gets the current operating system and architecture + import current-platform: func() -> tuple; + + /// Get the path to the node binary used by Zed. + import node-binary-path: func() -> result; + + /// Gets the latest version of the given NPM package. + import npm-package-latest-version: func(package-name: string) -> result; + + /// Returns the installed version of the given NPM package, if it exists. + import npm-package-installed-version: func(package-name: string) -> result, string>; + + /// Installs the specified NPM package. + import npm-install-package: func(package-name: string, version: string) -> result<_, string>; + + /// Gets the latest release for the given GitHub repository. + import latest-github-release: func(repo: string, options: github-release-options) -> result; + + /// Downloads a file from the given url, and saves it to the given path within the extension's + /// working directory. Extracts the file according to the given file type. + import download-file: func(url: string, file-path: string, file-type: downloaded-file-type) -> result<_, string>; + + /// Makes the file at the given path executable. + import make-file-executable: func(filepath: string) -> result<_, string>; + + /// Updates the installation status for the given language server. + import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status); + + type env-vars = list>; + + record command { + command: string, + args: list, + env: env-vars, + } + + resource worktree { + read-text-file: func(path: string) -> result; + which: func(binary-name: string) -> option; + shell-env: func() -> env-vars; + } + + record language-server-config { + name: string, + language-name: string, + } + + export language-server-command: func(config: language-server-config, worktree: borrow) -> result; + export language-server-initialization-options: func(config: language-server-config, worktree: borrow) -> result, string>; +} diff --git a/crates/extension_api/wit/since_v0.0.4/github.wit b/crates/extension_api/wit/since_v0.0.4/github.wit new file mode 100644 index 0000000..53ecacb --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.4/github.wit @@ -0,0 +1,28 @@ +interface github { + /// A GitHub release. + record github-release { + /// The version of the release. + version: string, + /// The list of assets attached to the release. + assets: list, + } + + /// An asset from a GitHub release. + record github-release-asset { + /// The name of the asset. + name: string, + /// The download URL for the asset. + download-url: string, + } + + /// The options used to filter down GitHub releases. + record github-release-options { + /// Whether releases without assets should be included. + require-assets: bool, + /// Whether pre-releases should be included. + pre-release: bool, + } + + /// Returns the latest release for the given GitHub repository. + latest-github-release: func(repo: string, options: github-release-options) -> result; +} diff --git a/crates/extension_api/wit/since_v0.0.4/platform.wit b/crates/extension_api/wit/since_v0.0.4/platform.wit new file mode 100644 index 0000000..48472a9 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.4/platform.wit @@ -0,0 +1,24 @@ +interface platform { + /// An operating system. + enum os { + /// macOS. + mac, + /// Linux. + linux, + /// Windows. + windows, + } + + /// A platform architecture. + enum architecture { + /// AArch64 (e.g., Apple Silicon). + aarch64, + /// x86. + x86, + /// x86-64. + x8664, + } + + /// Gets the current operating system and architecture. + current-platform: func() -> tuple; +} diff --git a/crates/extension_api/wit/since_v0.0.6/extension.wit b/crates/extension_api/wit/since_v0.0.6/extension.wit new file mode 100644 index 0000000..2f42cc0 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.6/extension.wit @@ -0,0 +1,130 @@ +package zed:extension; + +world extension { + import github; + import platform; + import nodejs; + + use lsp.{completion, symbol}; + + /// Initializes the extension. + export init-extension: func(); + + /// The type of a downloaded file. + enum downloaded-file-type { + /// A gzipped file (`.gz`). + gzip, + /// A gzipped tar archive (`.tar.gz`). + gzip-tar, + /// A ZIP file (`.zip`). + zip, + /// An uncompressed file. + uncompressed, + } + + /// The installation status for a language server. + variant language-server-installation-status { + /// The language server has no installation status. + none, + /// The language server is being downloaded. + downloading, + /// The language server is checking for updates. + checking-for-update, + /// The language server installation failed for specified reason. + failed(string), + } + + record settings-location { + worktree-id: u64, + path: string, + } + + import get-settings: func(path: option, category: string, key: option) -> result; + + /// Downloads a file from the given URL and saves it to the given path within the extension's + /// working directory. + /// + /// The file will be extracted according to the given file type. + import download-file: func(url: string, file-path: string, file-type: downloaded-file-type) -> result<_, string>; + + /// Makes the file at the given path executable. + import make-file-executable: func(filepath: string) -> result<_, string>; + + /// Updates the installation status for the given language server. + import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status); + + /// A list of environment variables. + type env-vars = list>; + + /// A command. + record command { + /// The command to execute. + command: string, + /// The arguments to pass to the command. + args: list, + /// The environment variables to set for the command. + env: env-vars, + } + + /// A Zed worktree. + resource worktree { + /// Returns the ID of the worktree. + id: func() -> u64; + /// Returns the root path of the worktree. + root-path: func() -> string; + /// Returns the textual contents of the specified file in the worktree. + read-text-file: func(path: string) -> result; + /// Returns the path to the given binary name, if one is present on the `$PATH`. + which: func(binary-name: string) -> option; + /// Returns the current shell environment. + shell-env: func() -> env-vars; + } + + /// Returns the command used to start up the language server. + export language-server-command: func(language-server-id: string, worktree: borrow) -> result; + + /// Returns the initialization options to pass to the language server on startup. + /// + /// The initialization options are represented as a JSON string. + export language-server-initialization-options: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// Returns the workspace configuration options to pass to the language server. + export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// A label containing some code. + record code-label { + /// The source code to parse with Tree-sitter. + code: string, + /// The spans to display in the label. + spans: list, + /// The range of the displayed label to include when filtering. + filter-range: range, + } + + /// A span within a code label. + variant code-label-span { + /// A range into the parsed code. + code-range(range), + /// A span containing a code literal. + literal(code-label-span-literal), + } + + /// A span containing a code literal. + record code-label-span-literal { + /// The literal text. + text: string, + /// The name of the highlight to use for this literal. + highlight-name: option, + } + + /// A (half-open) range (`[start, end)`). + record range { + /// The start of the range (inclusive). + start: u32, + /// The end of the range (exclusive). + end: u32, + } + + export labels-for-completions: func(language-server-id: string, completions: list) -> result>, string>; + export labels-for-symbols: func(language-server-id: string, symbols: list) -> result>, string>; +} diff --git a/crates/extension_api/wit/since_v0.0.6/github.wit b/crates/extension_api/wit/since_v0.0.6/github.wit new file mode 100644 index 0000000..53ecacb --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.6/github.wit @@ -0,0 +1,28 @@ +interface github { + /// A GitHub release. + record github-release { + /// The version of the release. + version: string, + /// The list of assets attached to the release. + assets: list, + } + + /// An asset from a GitHub release. + record github-release-asset { + /// The name of the asset. + name: string, + /// The download URL for the asset. + download-url: string, + } + + /// The options used to filter down GitHub releases. + record github-release-options { + /// Whether releases without assets should be included. + require-assets: bool, + /// Whether pre-releases should be included. + pre-release: bool, + } + + /// Returns the latest release for the given GitHub repository. + latest-github-release: func(repo: string, options: github-release-options) -> result; +} diff --git a/crates/extension_api/wit/since_v0.0.6/lsp.wit b/crates/extension_api/wit/since_v0.0.6/lsp.wit new file mode 100644 index 0000000..19e81b6 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.6/lsp.wit @@ -0,0 +1,83 @@ +interface lsp { + /// An LSP completion. + record completion { + label: string, + detail: option, + kind: option, + insert-text-format: option, + } + + /// The kind of an LSP completion. + variant completion-kind { + text, + method, + function, + %constructor, + field, + variable, + class, + %interface, + module, + property, + unit, + value, + %enum, + keyword, + snippet, + color, + file, + reference, + folder, + enum-member, + constant, + struct, + event, + operator, + type-parameter, + other(s32), + } + + /// Defines how to interpret the insert text in a completion item. + variant insert-text-format { + plain-text, + snippet, + other(s32), + } + + /// An LSP symbol. + record symbol { + kind: symbol-kind, + name: string, + } + + /// The kind of an LSP symbol. + variant symbol-kind { + file, + module, + namespace, + %package, + class, + method, + property, + field, + %constructor, + %enum, + %interface, + function, + variable, + constant, + %string, + number, + boolean, + array, + object, + key, + null, + enum-member, + struct, + event, + operator, + type-parameter, + other(s32), + } +} diff --git a/crates/extension_api/wit/since_v0.0.6/nodejs.wit b/crates/extension_api/wit/since_v0.0.6/nodejs.wit new file mode 100644 index 0000000..c814548 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.6/nodejs.wit @@ -0,0 +1,13 @@ +interface nodejs { + /// Returns the path to the Node binary used by Zed. + node-binary-path: func() -> result; + + /// Returns the latest version of the given NPM package. + npm-package-latest-version: func(package-name: string) -> result; + + /// Returns the installed version of the given NPM package, if it exists. + npm-package-installed-version: func(package-name: string) -> result, string>; + + /// Installs the specified NPM package. + npm-install-package: func(package-name: string, version: string) -> result<_, string>; +} diff --git a/crates/extension_api/wit/since_v0.0.6/platform.wit b/crates/extension_api/wit/since_v0.0.6/platform.wit new file mode 100644 index 0000000..48472a9 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.6/platform.wit @@ -0,0 +1,24 @@ +interface platform { + /// An operating system. + enum os { + /// macOS. + mac, + /// Linux. + linux, + /// Windows. + windows, + } + + /// A platform architecture. + enum architecture { + /// AArch64 (e.g., Apple Silicon). + aarch64, + /// x86. + x86, + /// x86-64. + x8664, + } + + /// Gets the current operating system and architecture. + current-platform: func() -> tuple; +} diff --git a/crates/extension_api/wit/since_v0.0.6/settings.rs b/crates/extension_api/wit/since_v0.0.6/settings.rs new file mode 100644 index 0000000..5c6cae7 --- /dev/null +++ b/crates/extension_api/wit/since_v0.0.6/settings.rs @@ -0,0 +1,29 @@ +use serde::{Deserialize, Serialize}; +use std::num::NonZeroU32; + +/// The settings for a particular language. +#[derive(Debug, Serialize, Deserialize)] +pub struct LanguageSettings { + /// How many columns a tab should occupy. + pub tab_size: NonZeroU32, +} + +/// The settings for a particular language server. +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct LspSettings { + /// The settings for the language server binary. + pub binary: Option, + /// The initialization options to pass to the language server. + pub initialization_options: Option, + /// The settings to pass to language server. + pub settings: Option, +} + +/// The settings for a language server binary. +#[derive(Debug, Serialize, Deserialize)] +pub struct BinarySettings { + /// The path to the binary. + pub path: Option, + /// The arguments to pass to the binary. + pub arguments: Option>, +} diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml new file mode 100644 index 0000000..f459c93 --- /dev/null +++ b/crates/extension_cli/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "extension_cli" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[[bin]] +name = "zed-extension" +path = "src/main.rs" + +[dependencies] +anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } +env_logger.workspace = true +fs.workspace = true +extension.workspace = true +language.workspace = true +log.workspace = true +rpc.workspace = true +serde.workspace = true +serde_json.workspace = true +theme.workspace = true +tokio.workspace = true +toml.workspace = true +tree-sitter.workspace = true +wasmtime.workspace = true diff --git a/crates/extension_cli/LICENSE-GPL b/crates/extension_cli/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/extension_cli/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs new file mode 100644 index 0000000..509e451 --- /dev/null +++ b/crates/extension_cli/src/main.rs @@ -0,0 +1,265 @@ +use std::{ + collections::HashMap, + env, fs, + path::{Path, PathBuf}, + process::Command, + sync::Arc, +}; + +use ::fs::{copy_recursive, CopyOptions, Fs, RealFs}; +use anyhow::{anyhow, bail, Context, Result}; +use clap::Parser; +use extension::{ + extension_builder::{CompileExtensionOptions, ExtensionBuilder}, + ExtensionManifest, +}; +use language::LanguageConfig; +use theme::ThemeRegistry; +use tree_sitter::{Language, Query, WasmStore}; + +#[derive(Parser, Debug)] +#[command(name = "zed-extension")] +struct Args { + /// The path to the extension directory + #[arg(long)] + source_dir: PathBuf, + /// The output directory to place the packaged extension. + #[arg(long)] + output_dir: PathBuf, + /// The path to a directory where build dependencies are downloaded + #[arg(long)] + scratch_dir: PathBuf, +} + +#[tokio::main] +async fn main() -> Result<()> { + env_logger::init(); + + let args = Args::parse(); + let fs = Arc::new(RealFs::default()); + let engine = wasmtime::Engine::default(); + let mut wasm_store = WasmStore::new(engine)?; + + let extension_path = args + .source_dir + .canonicalize() + .context("failed to canonicalize source_dir")?; + let scratch_dir = args + .scratch_dir + .canonicalize() + .context("failed to canonicalize scratch_dir")?; + let output_dir = if args.output_dir.is_relative() { + env::current_dir()?.join(&args.output_dir) + } else { + args.output_dir + }; + + log::info!("loading extension manifest"); + let mut manifest = ExtensionManifest::load(fs.clone(), &extension_path).await?; + + log::info!("compiling extension"); + let builder = ExtensionBuilder::new(scratch_dir); + builder + .compile_extension( + &extension_path, + &mut manifest, + CompileExtensionOptions { release: true }, + ) + .await + .context("failed to compile extension")?; + + let grammars = test_grammars(&manifest, &extension_path, &mut wasm_store)?; + test_languages(&manifest, &extension_path, &grammars)?; + test_themes(&manifest, &extension_path, fs.clone()).await?; + + let archive_dir = output_dir.join("archive"); + fs::remove_dir_all(&archive_dir).ok(); + copy_extension_resources(&manifest, &extension_path, &archive_dir, fs.clone()) + .await + .context("failed to copy extension resources")?; + + let tar_output = Command::new("tar") + .current_dir(&output_dir) + .args(&["-czvf", "archive.tar.gz", "-C", "archive", "."]) + .output() + .context("failed to run tar")?; + if !tar_output.status.success() { + bail!( + "failed to create archive.tar.gz: {}", + String::from_utf8_lossy(&tar_output.stderr) + ); + } + + let manifest_json = serde_json::to_string(&rpc::ExtensionApiManifest { + name: manifest.name, + version: manifest.version, + description: manifest.description, + authors: manifest.authors, + schema_version: Some(manifest.schema_version.0), + repository: manifest + .repository + .ok_or_else(|| anyhow!("missing repository in extension manifest"))?, + wasm_api_version: manifest.lib.version.map(|version| version.to_string()), + })?; + fs::remove_dir_all(&archive_dir)?; + fs::write(output_dir.join("manifest.json"), manifest_json.as_bytes())?; + + Ok(()) +} + +async fn copy_extension_resources( + manifest: &ExtensionManifest, + extension_path: &Path, + output_dir: &Path, + fs: Arc, +) -> Result<()> { + fs::create_dir_all(&output_dir).context("failed to create output dir")?; + + let manifest_toml = toml::to_string(&manifest).context("failed to serialize manifest")?; + fs::write(output_dir.join("extension.toml"), &manifest_toml) + .context("failed to write extension.toml")?; + + if manifest.lib.kind.is_some() { + fs::copy( + extension_path.join("extension.wasm"), + output_dir.join("extension.wasm"), + ) + .context("failed to copy extension.wasm")?; + } + + if !manifest.grammars.is_empty() { + let source_grammars_dir = extension_path.join("grammars"); + let output_grammars_dir = output_dir.join("grammars"); + fs::create_dir_all(&output_grammars_dir)?; + for grammar_name in manifest.grammars.keys() { + let mut grammar_filename = PathBuf::from(grammar_name.as_ref()); + grammar_filename.set_extension("wasm"); + fs::copy( + &source_grammars_dir.join(&grammar_filename), + &output_grammars_dir.join(&grammar_filename), + ) + .with_context(|| format!("failed to copy grammar '{}'", grammar_filename.display()))?; + } + } + + if !manifest.themes.is_empty() { + let output_themes_dir = output_dir.join("themes"); + fs::create_dir_all(&output_themes_dir)?; + for theme_path in &manifest.themes { + fs::copy( + extension_path.join(theme_path), + output_themes_dir.join( + theme_path + .file_name() + .ok_or_else(|| anyhow!("invalid theme path"))?, + ), + ) + .with_context(|| format!("failed to copy theme '{}'", theme_path.display()))?; + } + } + + if !manifest.languages.is_empty() { + let output_languages_dir = output_dir.join("languages"); + fs::create_dir_all(&output_languages_dir)?; + for language_path in &manifest.languages { + copy_recursive( + fs.as_ref(), + &extension_path.join(language_path), + &output_languages_dir.join( + language_path + .file_name() + .ok_or_else(|| anyhow!("invalid language path"))?, + ), + CopyOptions { + overwrite: true, + ignore_if_exists: false, + }, + ) + .await + .with_context(|| { + format!("failed to copy language dir '{}'", language_path.display()) + })?; + } + } + + Ok(()) +} + +fn test_grammars( + manifest: &ExtensionManifest, + extension_path: &Path, + wasm_store: &mut WasmStore, +) -> Result> { + let mut grammars = HashMap::default(); + let grammars_dir = extension_path.join("grammars"); + + for grammar_name in manifest.grammars.keys() { + let mut grammar_path = grammars_dir.join(grammar_name.as_ref()); + grammar_path.set_extension("wasm"); + + let wasm = fs::read(&grammar_path)?; + let language = wasm_store.load_language(grammar_name, &wasm)?; + log::info!("loaded grammar {grammar_name}"); + grammars.insert(grammar_name.to_string(), language); + } + + Ok(grammars) +} + +fn test_languages( + manifest: &ExtensionManifest, + extension_path: &Path, + grammars: &HashMap, +) -> Result<()> { + for relative_language_dir in &manifest.languages { + let language_dir = extension_path.join(relative_language_dir); + let config_path = language_dir.join("config.toml"); + let config_content = fs::read_to_string(&config_path)?; + let config: LanguageConfig = toml::from_str(&config_content)?; + let grammar = if let Some(name) = &config.grammar { + Some( + grammars + .get(name.as_ref()) + .ok_or_else(|| anyhow!("grammar not found: '{name}'"))?, + ) + } else { + None + }; + + let query_entries = fs::read_dir(&language_dir)?; + for entry in query_entries { + let entry = entry?; + let query_path = entry.path(); + if query_path.extension() == Some("scm".as_ref()) { + let grammar = grammar.ok_or_else(|| { + anyhow!( + "language {} provides query {} but no grammar", + config.name, + query_path.display() + ) + })?; + + let query_source = fs::read_to_string(&query_path)?; + let _query = Query::new(grammar, &query_source)?; + } + } + + log::info!("loaded language {}", config.name); + } + + Ok(()) +} + +async fn test_themes( + manifest: &ExtensionManifest, + extension_path: &Path, + fs: Arc, +) -> Result<()> { + for relative_theme_path in &manifest.themes { + let theme_path = extension_path.join(relative_theme_path); + let theme_family = ThemeRegistry::read_user_theme(&theme_path, fs.clone()).await?; + log::info!("loaded theme family {}", theme_family.name); + } + + Ok(()) +} diff --git a/crates/extensions_ui/Cargo.toml b/crates/extensions_ui/Cargo.toml new file mode 100644 index 0000000..586739f --- /dev/null +++ b/crates/extensions_ui/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "extensions_ui" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/extensions_ui.rs" + +[features] +test-support = [] + +[dependencies] +anyhow.workspace = true +client.workspace = true +db.workspace = true +editor.workspace = true +extension.workspace = true +fs.workspace = true +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +picker.workspace = true +project.workspace = true +semantic_version.workspace = true +serde.workspace = true +settings.workspace = true +smallvec.workspace = true +theme.workspace = true +theme_selector.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[dev-dependencies] +editor = { workspace = true, features = ["test-support"] } diff --git a/crates/extensions_ui/LICENSE-GPL b/crates/extensions_ui/LICENSE-GPL new file mode 100644 index 0000000..89e542f --- /dev/null +++ b/crates/extensions_ui/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/extensions_ui/src/components.rs b/crates/extensions_ui/src/components.rs new file mode 100644 index 0000000..bf11abd --- /dev/null +++ b/crates/extensions_ui/src/components.rs @@ -0,0 +1,3 @@ +mod extension_card; + +pub use extension_card::*; diff --git a/crates/extensions_ui/src/components/extension_card.rs b/crates/extensions_ui/src/components/extension_card.rs new file mode 100644 index 0000000..2dc472f --- /dev/null +++ b/crates/extensions_ui/src/components/extension_card.rs @@ -0,0 +1,64 @@ +use gpui::{prelude::*, AnyElement}; +use smallvec::SmallVec; +use ui::prelude::*; + +#[derive(IntoElement)] +pub struct ExtensionCard { + overridden_by_dev_extension: bool, + children: SmallVec<[AnyElement; 2]>, +} + +impl ExtensionCard { + pub fn new() -> Self { + Self { + overridden_by_dev_extension: false, + children: SmallVec::new(), + } + } + + pub fn overridden_by_dev_extension(mut self, overridden: bool) -> Self { + self.overridden_by_dev_extension = overridden; + self + } +} + +impl ParentElement for ExtensionCard { + fn extend(&mut self, elements: impl IntoIterator) { + self.children.extend(elements) + } +} + +impl RenderOnce for ExtensionCard { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + div().w_full().child( + v_flex() + .w_full() + .h(rems(7.)) + .p_3() + .mt_4() + .gap_2() + .bg(cx.theme().colors().elevated_surface_background) + .border_1() + .border_color(cx.theme().colors().border) + .rounded_md() + .children(self.children) + .when(self.overridden_by_dev_extension, |card| { + card.child( + h_flex() + .absolute() + .top_0() + .left_0() + .occlude() + .size_full() + .items_center() + .justify_center() + .bg(theme::color_alpha( + cx.theme().colors().elevated_surface_background, + 0.8, + )) + .child(Label::new("Overridden by dev extension.")), + ) + }), + ) + } +} diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs new file mode 100644 index 0000000..6f5e808 --- /dev/null +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -0,0 +1,240 @@ +use std::collections::HashMap; +use std::path::Path; +use std::sync::{Arc, OnceLock}; + +use db::kvp::KEY_VALUE_STORE; +use editor::Editor; +use extension::ExtensionStore; +use gpui::{Model, VisualContext}; +use language::Buffer; +use ui::{SharedString, ViewContext}; +use workspace::{ + notifications::{simple_message_notification, NotificationId}, + Workspace, +}; + +const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[ + ("astro", &["astro"]), + ("beancount", &["beancount"]), + ("clojure", &["bb", "clj", "cljc", "cljs", "edn"]), + ("csharp", &["cs"]), + ("dart", &["dart"]), + ("dockerfile", &["Dockerfile"]), + ("elisp", &["el"]), + ("elixir", &["ex", "exs", "heex"]), + ("elm", &["elm"]), + ("erlang", &["erl", "hrl"]), + ("fish", &["fish"]), + ( + "git-firefly", + &[ + ".gitconfig", + ".gitignore", + "COMMIT_EDITMSG", + "EDIT_DESCRIPTION", + "MERGE_MSG", + "NOTES_EDITMSG", + "TAG_EDITMSG", + "git-rebase-todo", + ], + ), + ("gleam", &["gleam"]), + ("glsl", &["vert", "frag"]), + ("graphql", &["gql", "graphql"]), + ("haskell", &["hs"]), + ("html", &["htm", "html", "shtml"]), + ("java", &["java"]), + ("kotlin", &["kt"]), + ("latex", &["tex"]), + ("log", &["log"]), + ("lua", &["lua"]), + ("make", &["Makefile"]), + ("nix", &["nix"]), + ("nu", &["nu"]), + ("ocaml", &["ml", "mli"]), + ("php", &["php"]), + ("prisma", &["prisma"]), + ("purescript", &["purs"]), + ("r", &["r", "R"]), + ("racket", &["rkt"]), + ("rescript", &["res", "resi"]), + ("ruby", &["rb", "erb"]), + ("scheme", &["scm"]), + ("scss", &["scss"]), + ("sql", &["sql"]), + ("svelte", &["svelte"]), + ("swift", &["swift"]), + ("templ", &["templ"]), + ("terraform", &["tf", "tfvars", "hcl"]), + ("toml", &["Cargo.lock", "toml"]), + ("vue", &["vue"]), + ("wgsl", &["wgsl"]), + ("wit", &["wit"]), + ("zig", &["zig"]), +]; + +fn suggested_extensions() -> &'static HashMap<&'static str, Arc> { + static SUGGESTIONS_BY_PATH_SUFFIX: OnceLock>> = OnceLock::new(); + SUGGESTIONS_BY_PATH_SUFFIX.get_or_init(|| { + SUGGESTIONS_BY_EXTENSION_ID + .into_iter() + .flat_map(|(name, path_suffixes)| { + let name = Arc::::from(*name); + path_suffixes + .into_iter() + .map(move |suffix| (*suffix, name.clone())) + }) + .collect() + }) +} + +#[derive(Debug, PartialEq, Eq, Clone)] +struct SuggestedExtension { + pub extension_id: Arc, + pub file_name_or_extension: Arc, +} + +/// Returns the suggested extension for the given [`Path`]. +fn suggested_extension(path: impl AsRef) -> Option { + let path = path.as_ref(); + + let file_extension: Option> = path + .extension() + .and_then(|extension| Some(extension.to_str()?.into())); + let file_name: Option> = path + .file_name() + .and_then(|file_name| Some(file_name.to_str()?.into())); + + let (file_name_or_extension, extension_id) = None + // We suggest against file names first, as these suggestions will be more + // specific than ones based on the file extension. + .or_else(|| { + file_name.clone().zip( + file_name + .as_deref() + .and_then(|file_name| suggested_extensions().get(file_name)), + ) + }) + .or_else(|| { + file_extension.clone().zip( + file_extension + .as_deref() + .and_then(|file_extension| suggested_extensions().get(file_extension)), + ) + })?; + + Some(SuggestedExtension { + extension_id: extension_id.clone(), + file_name_or_extension, + }) +} + +fn language_extension_key(extension_id: &str) -> String { + format!("{}_extension_suggest", extension_id) +} + +pub(crate) fn suggest(buffer: Model, cx: &mut ViewContext) { + let Some(file) = buffer.read(cx).file().cloned() else { + return; + }; + + let Some(SuggestedExtension { + extension_id, + file_name_or_extension, + }) = suggested_extension(file.path()) + else { + return; + }; + + let key = language_extension_key(&extension_id); + let Ok(None) = KEY_VALUE_STORE.read_kvp(&key) else { + return; + }; + + cx.on_next_frame(move |workspace, cx| { + let Some(editor) = workspace.active_item_as::(cx) else { + return; + }; + + if editor.read(cx).buffer().read(cx).as_singleton().as_ref() != Some(&buffer) { + return; + } + + struct ExtensionSuggestionNotification; + + let notification_id = NotificationId::identified::( + SharedString::from(extension_id.clone()), + ); + + workspace.show_notification(notification_id, cx, |cx| { + cx.new_view(move |_cx| { + simple_message_notification::MessageNotification::new(format!( + "Do you want to install the recommended '{}' extension for '{}' files?", + extension_id, file_name_or_extension + )) + .with_click_message("Yes") + .on_click({ + let extension_id = extension_id.clone(); + move |cx| { + let extension_id = extension_id.clone(); + let extension_store = ExtensionStore::global(cx); + extension_store.update(cx, move |store, cx| { + store.install_latest_extension(extension_id, cx); + }); + } + }) + .with_secondary_click_message("No") + .on_secondary_click(move |cx| { + let key = language_extension_key(&extension_id); + db::write_and_log(cx, move || { + KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string()) + }); + }) + }) + }); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + pub fn test_suggested_extension() { + assert_eq!( + suggested_extension("Cargo.toml"), + Some(SuggestedExtension { + extension_id: "toml".into(), + file_name_or_extension: "toml".into() + }) + ); + assert_eq!( + suggested_extension("Cargo.lock"), + Some(SuggestedExtension { + extension_id: "toml".into(), + file_name_or_extension: "Cargo.lock".into() + }) + ); + assert_eq!( + suggested_extension("Dockerfile"), + Some(SuggestedExtension { + extension_id: "dockerfile".into(), + file_name_or_extension: "Dockerfile".into() + }) + ); + assert_eq!( + suggested_extension("a/b/c/d/.gitignore"), + Some(SuggestedExtension { + extension_id: "git-firefly".into(), + file_name_or_extension: ".gitignore".into() + }) + ); + assert_eq!( + suggested_extension("a/b/c/d/test.gleam"), + Some(SuggestedExtension { + extension_id: "gleam".into(), + file_name_or_extension: "gleam".into() + }) + ); + } +} diff --git a/crates/extensions_ui/src/extension_version_selector.rs b/crates/extensions_ui/src/extension_version_selector.rs new file mode 100644 index 0000000..986a71a --- /dev/null +++ b/crates/extensions_ui/src/extension_version_selector.rs @@ -0,0 +1,238 @@ +use std::str::FromStr; +use std::sync::Arc; + +use client::ExtensionMetadata; +use extension::{ExtensionSettings, ExtensionStore}; +use fs::Fs; +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{ + prelude::*, AppContext, DismissEvent, EventEmitter, FocusableView, Task, View, WeakView, +}; +use picker::{Picker, PickerDelegate}; +use semantic_version::SemanticVersion; +use settings::update_settings_file; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; +use util::ResultExt; +use workspace::ModalView; + +pub struct ExtensionVersionSelector { + picker: View>, +} + +impl ModalView for ExtensionVersionSelector {} + +impl EventEmitter for ExtensionVersionSelector {} + +impl FocusableView for ExtensionVersionSelector { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for ExtensionVersionSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +impl ExtensionVersionSelector { + pub fn new(delegate: ExtensionVersionSelectorDelegate, cx: &mut ViewContext) -> Self { + let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); + Self { picker } + } +} + +pub struct ExtensionVersionSelectorDelegate { + fs: Arc, + view: WeakView, + extension_versions: Vec, + selected_index: usize, + matches: Vec, +} + +impl ExtensionVersionSelectorDelegate { + pub fn new( + fs: Arc, + weak_view: WeakView, + mut extension_versions: Vec, + ) -> Self { + extension_versions.sort_unstable_by(|a, b| { + let a_version = SemanticVersion::from_str(&a.manifest.version); + let b_version = SemanticVersion::from_str(&b.manifest.version); + + match (a_version, b_version) { + (Ok(a_version), Ok(b_version)) => b_version.cmp(&a_version), + _ => b.published_at.cmp(&a.published_at), + } + }); + + let matches = extension_versions + .iter() + .map(|extension| StringMatch { + candidate_id: 0, + score: 0.0, + positions: Default::default(), + string: format!("v{}", extension.manifest.version), + }) + .collect(); + + Self { + fs, + view: weak_view, + extension_versions, + selected_index: 0, + matches, + } + } +} + +impl PickerDelegate for ExtensionVersionSelectorDelegate { + type ListItem = ui::ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Select extension version...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches(&mut self, query: String, cx: &mut ViewContext>) -> Task<()> { + let background_executor = cx.background_executor().clone(); + let candidates = self + .extension_versions + .iter() + .enumerate() + .map(|(id, extension)| { + let text = format!("v{}", extension.manifest.version); + + StringMatchCandidate { + id, + char_bag: text.as_str().into(), + string: text, + } + }) + .collect::>(); + + cx.spawn(move |this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .into_iter() + .enumerate() + .map(|(index, candidate)| StringMatch { + candidate_id: index, + string: candidate.string, + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background_executor, + ) + .await + }; + + this.update(&mut cx, |this, _cx| { + this.delegate.matches = matches; + this.delegate.selected_index = this + .delegate + .selected_index + .min(this.delegate.matches.len().saturating_sub(1)); + }) + .log_err(); + }) + } + + fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext>) { + if self.matches.is_empty() { + self.dismissed(cx); + return; + } + + let candidate_id = self.matches[self.selected_index].candidate_id; + let extension_version = &self.extension_versions[candidate_id]; + + if !extension::is_version_compatible(extension_version) { + return; + } + + let extension_store = ExtensionStore::global(cx); + extension_store.update(cx, |store, cx| { + let extension_id = extension_version.id.clone(); + let version = extension_version.manifest.version.clone(); + + update_settings_file::(self.fs.clone(), cx, { + let extension_id = extension_id.clone(); + move |settings| { + settings.auto_update_extensions.insert(extension_id, false); + } + }); + + store.install_extension(extension_id, version, cx); + }); + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.view + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _cx: &mut ViewContext>, + ) -> Option { + let version_match = &self.matches[ix]; + let extension_version = &self.extension_versions[version_match.candidate_id]; + + let is_version_compatible = extension::is_version_compatible(extension_version); + let disabled = !is_version_compatible; + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .disabled(disabled) + .child( + HighlightedLabel::new( + version_match.string.clone(), + version_match.positions.clone(), + ) + .when(disabled, |label| label.color(Color::Muted)), + ) + .end_slot( + h_flex() + .gap_2() + .when(!is_version_compatible, |this| { + this.child(Label::new("Incompatible").color(Color::Muted)) + }) + .child( + Label::new( + extension_version + .published_at + .format("%Y-%m-%d") + .to_string(), + ) + .when(disabled, |label| label.color(Color::Muted)), + ), + ), + ) + } +} diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs new file mode 100644 index 0000000..b2c2451 --- /dev/null +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -0,0 +1,1002 @@ +mod components; +mod extension_suggest; +mod extension_version_selector; + +use crate::components::ExtensionCard; +use crate::extension_version_selector::{ + ExtensionVersionSelector, ExtensionVersionSelectorDelegate, +}; +use client::telemetry::Telemetry; +use client::ExtensionMetadata; +use editor::{Editor, EditorElement, EditorStyle}; +use extension::{ExtensionManifest, ExtensionOperation, ExtensionStore}; +use fuzzy::{match_strings, StringMatchCandidate}; +use gpui::{ + actions, canvas, uniform_list, AnyElement, AppContext, EventEmitter, FocusableView, FontStyle, + FontWeight, InteractiveElement, KeyContext, ParentElement, Render, Styled, Task, TextStyle, + UniformListScrollHandle, View, ViewContext, VisualContext, WeakView, WhiteSpace, WindowContext, +}; +use settings::Settings; +use std::ops::DerefMut; +use std::time::Duration; +use std::{ops::Range, sync::Arc}; +use theme::ThemeSettings; +use ui::{popover_menu, prelude::*, ContextMenu, ToggleButton, Tooltip}; +use util::ResultExt as _; +use workspace::item::TabContentParams; +use workspace::{ + item::{Item, ItemEvent}, + Workspace, WorkspaceId, +}; + +actions!(zed, [Extensions, InstallDevExtension]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(move |workspace: &mut Workspace, cx| { + workspace + .register_action(move |workspace, _: &Extensions, cx| { + let existing = workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()); + + if let Some(existing) = existing { + workspace.activate_item(&existing, cx); + } else { + let extensions_page = ExtensionsPage::new(workspace, cx); + workspace.add_item_to_active_pane(Box::new(extensions_page), None, cx) + } + }) + .register_action(move |_, _: &InstallDevExtension, cx| { + let store = ExtensionStore::global(cx); + let prompt = cx.prompt_for_paths(gpui::PathPromptOptions { + files: false, + directories: true, + multiple: false, + }); + + cx.deref_mut() + .spawn(|mut cx| async move { + let extension_path = prompt.await.log_err()??.pop()?; + store + .update(&mut cx, |store, cx| { + store + .install_dev_extension(extension_path, cx) + .detach_and_log_err(cx) + }) + .ok()?; + Some(()) + }) + .detach(); + }); + + cx.subscribe(workspace.project(), |_, _, event, cx| match event { + project::Event::LanguageNotFound(buffer) => { + extension_suggest::suggest(buffer.clone(), cx); + } + _ => {} + }) + .detach(); + }) + .detach(); +} + +#[derive(Clone)] +pub enum ExtensionStatus { + NotInstalled, + Installing, + Upgrading, + Installed(Arc), + Removing, +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +enum ExtensionFilter { + All, + Installed, + NotInstalled, +} + +impl ExtensionFilter { + pub fn include_dev_extensions(&self) -> bool { + match self { + Self::All | Self::Installed => true, + Self::NotInstalled => false, + } + } +} + +pub struct ExtensionsPage { + workspace: WeakView, + list: UniformListScrollHandle, + telemetry: Arc, + is_fetching_extensions: bool, + filter: ExtensionFilter, + remote_extension_entries: Vec, + dev_extension_entries: Vec>, + filtered_remote_extension_indices: Vec, + query_editor: View, + query_contains_error: bool, + _subscriptions: [gpui::Subscription; 2], + extension_fetch_task: Option>, +} + +impl ExtensionsPage { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> View { + cx.new_view(|cx: &mut ViewContext| { + let store = ExtensionStore::global(cx); + let workspace_handle = workspace.weak_handle(); + let subscriptions = [ + cx.observe(&store, |_, _, cx| cx.notify()), + cx.subscribe(&store, move |this, _, event, cx| match event { + extension::Event::ExtensionsUpdated => this.fetch_extensions_debounced(cx), + extension::Event::ExtensionInstalled(extension_id) => { + this.on_extension_installed(workspace_handle.clone(), extension_id, cx) + } + _ => {} + }), + ]; + + let query_editor = cx.new_view(|cx| { + let mut input = Editor::single_line(cx); + input.set_placeholder_text("Search extensions...", cx); + input + }); + cx.subscribe(&query_editor, Self::on_query_change).detach(); + + let mut this = Self { + workspace: workspace.weak_handle(), + list: UniformListScrollHandle::new(), + telemetry: workspace.client().telemetry().clone(), + is_fetching_extensions: false, + filter: ExtensionFilter::All, + dev_extension_entries: Vec::new(), + filtered_remote_extension_indices: Vec::new(), + remote_extension_entries: Vec::new(), + query_contains_error: false, + extension_fetch_task: None, + _subscriptions: subscriptions, + query_editor, + }; + this.fetch_extensions(None, cx); + this + }) + } + + fn on_extension_installed( + &mut self, + workspace: WeakView, + extension_id: &str, + cx: &mut ViewContext, + ) { + let extension_store = ExtensionStore::global(cx).read(cx); + let themes = extension_store + .extension_themes(extension_id) + .map(|name| name.to_string()) + .collect::>(); + if !themes.is_empty() { + workspace + .update(cx, |workspace, cx| { + theme_selector::toggle( + workspace, + &theme_selector::Toggle { + themes_filter: Some(themes), + }, + cx, + ) + }) + .ok(); + } + } + + /// Returns whether a dev extension currently exists for the extension with the given ID. + fn dev_extension_exists(extension_id: &str, cx: &mut ViewContext) -> bool { + let extension_store = ExtensionStore::global(cx).read(cx); + + extension_store + .dev_extensions() + .any(|dev_extension| dev_extension.id.as_ref() == extension_id) + } + + fn extension_status(extension_id: &str, cx: &mut ViewContext) -> ExtensionStatus { + let extension_store = ExtensionStore::global(cx).read(cx); + + match extension_store.outstanding_operations().get(extension_id) { + Some(ExtensionOperation::Install) => ExtensionStatus::Installing, + Some(ExtensionOperation::Remove) => ExtensionStatus::Removing, + Some(ExtensionOperation::Upgrade) => ExtensionStatus::Upgrading, + None => match extension_store.installed_extensions().get(extension_id) { + Some(extension) => ExtensionStatus::Installed(extension.manifest.version.clone()), + None => ExtensionStatus::NotInstalled, + }, + } + } + + fn filter_extension_entries(&mut self, cx: &mut ViewContext) { + self.filtered_remote_extension_indices.clear(); + self.filtered_remote_extension_indices.extend( + self.remote_extension_entries + .iter() + .enumerate() + .filter(|(_, extension)| match self.filter { + ExtensionFilter::All => true, + ExtensionFilter::Installed => { + let status = Self::extension_status(&extension.id, cx); + matches!(status, ExtensionStatus::Installed(_)) + } + ExtensionFilter::NotInstalled => { + let status = Self::extension_status(&extension.id, cx); + + matches!(status, ExtensionStatus::NotInstalled) + } + }) + .map(|(ix, _)| ix), + ); + cx.notify(); + } + + fn fetch_extensions(&mut self, search: Option, cx: &mut ViewContext) { + self.is_fetching_extensions = true; + cx.notify(); + + let extension_store = ExtensionStore::global(cx); + + let dev_extensions = extension_store.update(cx, |store, _| { + store.dev_extensions().cloned().collect::>() + }); + + let remote_extensions = extension_store.update(cx, |store, cx| { + store.fetch_extensions(search.as_deref(), cx) + }); + + cx.spawn(move |this, mut cx| async move { + let dev_extensions = if let Some(search) = search { + let match_candidates = dev_extensions + .iter() + .enumerate() + .map(|(ix, manifest)| StringMatchCandidate { + id: ix, + string: manifest.name.clone(), + char_bag: manifest.name.as_str().into(), + }) + .collect::>(); + + let matches = match_strings( + &match_candidates, + &search, + false, + match_candidates.len(), + &Default::default(), + cx.background_executor().clone(), + ) + .await; + matches + .into_iter() + .map(|mat| dev_extensions[mat.candidate_id].clone()) + .collect() + } else { + dev_extensions + }; + + let fetch_result = remote_extensions.await; + this.update(&mut cx, |this, cx| { + cx.notify(); + this.dev_extension_entries = dev_extensions; + this.is_fetching_extensions = false; + this.remote_extension_entries = fetch_result?; + this.filter_extension_entries(cx); + anyhow::Ok(()) + })? + }) + .detach_and_log_err(cx); + } + + fn render_extensions( + &mut self, + range: Range, + cx: &mut ViewContext, + ) -> Vec { + let dev_extension_entries_len = if self.filter.include_dev_extensions() { + self.dev_extension_entries.len() + } else { + 0 + }; + range + .map(|ix| { + if ix < dev_extension_entries_len { + let extension = &self.dev_extension_entries[ix]; + self.render_dev_extension(extension, cx) + } else { + let extension_ix = + self.filtered_remote_extension_indices[ix - dev_extension_entries_len]; + let extension = &self.remote_extension_entries[extension_ix]; + self.render_remote_extension(extension, cx) + } + }) + .collect() + } + + fn render_dev_extension( + &self, + extension: &ExtensionManifest, + cx: &mut ViewContext, + ) -> ExtensionCard { + let status = Self::extension_status(&extension.id, cx); + + let repository_url = extension.repository.clone(); + + ExtensionCard::new() + .child( + h_flex() + .justify_between() + .child( + h_flex() + .gap_2() + .items_end() + .child(Headline::new(extension.name.clone()).size(HeadlineSize::Medium)) + .child( + Headline::new(format!("v{}", extension.version)) + .size(HeadlineSize::XSmall), + ), + ) + .child( + h_flex() + .gap_2() + .justify_between() + .child( + Button::new( + SharedString::from(format!("rebuild-{}", extension.id)), + "Rebuild", + ) + .on_click({ + let extension_id = extension.id.clone(); + move |_, cx| { + ExtensionStore::global(cx).update(cx, |store, cx| { + store.rebuild_dev_extension(extension_id.clone(), cx) + }); + } + }) + .color(Color::Accent) + .disabled(matches!(status, ExtensionStatus::Upgrading)), + ) + .child( + Button::new(SharedString::from(extension.id.clone()), "Uninstall") + .on_click({ + let extension_id = extension.id.clone(); + move |_, cx| { + ExtensionStore::global(cx).update(cx, |store, cx| { + store.uninstall_extension(extension_id.clone(), cx) + }); + } + }) + .color(Color::Accent) + .disabled(matches!(status, ExtensionStatus::Removing)), + ), + ), + ) + .child( + h_flex() + .justify_between() + .child( + Label::new(format!( + "{}: {}", + if extension.authors.len() > 1 { + "Authors" + } else { + "Author" + }, + extension.authors.join(", ") + )) + .size(LabelSize::Small), + ) + .child(Label::new("<>").size(LabelSize::Small)), + ) + .child( + h_flex() + .justify_between() + .children(extension.description.as_ref().map(|description| { + Label::new(description.clone()) + .size(LabelSize::Small) + .color(Color::Default) + })) + .children(repository_url.map(|repository_url| { + IconButton::new( + SharedString::from(format!("repository-{}", extension.id)), + IconName::Github, + ) + .icon_color(Color::Accent) + .icon_size(IconSize::Small) + .style(ButtonStyle::Filled) + .on_click(cx.listener({ + let repository_url = repository_url.clone(); + move |_, _, cx| { + cx.open_url(&repository_url); + } + })) + .tooltip(move |cx| Tooltip::text(repository_url.clone(), cx)) + })), + ) + } + + fn render_remote_extension( + &self, + extension: &ExtensionMetadata, + cx: &mut ViewContext, + ) -> ExtensionCard { + let this = cx.view().clone(); + let status = Self::extension_status(&extension.id, cx); + let has_dev_extension = Self::dev_extension_exists(&extension.id, cx); + + let extension_id = extension.id.clone(); + let (install_or_uninstall_button, upgrade_button) = + self.buttons_for_entry(extension, &status, has_dev_extension, cx); + let version = extension.manifest.version.clone(); + let repository_url = extension.manifest.repository.clone(); + + let installed_version = match status { + ExtensionStatus::Installed(installed_version) => Some(installed_version), + _ => None, + }; + + ExtensionCard::new() + .overridden_by_dev_extension(has_dev_extension) + .child( + h_flex() + .justify_between() + .child( + h_flex() + .gap_2() + .items_end() + .child( + Headline::new(extension.manifest.name.clone()) + .size(HeadlineSize::Medium), + ) + .child(Headline::new(format!("v{version}")).size(HeadlineSize::XSmall)) + .children( + installed_version + .filter(|installed_version| *installed_version != version) + .map(|installed_version| { + Headline::new(format!("(v{installed_version} installed)",)) + .size(HeadlineSize::XSmall) + }), + ), + ) + .child( + h_flex() + .gap_2() + .justify_between() + .children(upgrade_button) + .child(install_or_uninstall_button), + ), + ) + .child( + h_flex() + .justify_between() + .child( + Label::new(format!( + "{}: {}", + if extension.manifest.authors.len() > 1 { + "Authors" + } else { + "Author" + }, + extension.manifest.authors.join(", ") + )) + .size(LabelSize::Small), + ) + .child( + Label::new(format!("Downloads: {}", extension.download_count)) + .size(LabelSize::Small), + ), + ) + .child( + h_flex() + .gap_2() + .justify_between() + .children(extension.manifest.description.as_ref().map(|description| { + h_flex().overflow_x_hidden().child( + Label::new(description.clone()) + .size(LabelSize::Small) + .color(Color::Default), + ) + })) + .child( + h_flex() + .gap_2() + .child( + IconButton::new( + SharedString::from(format!("repository-{}", extension.id)), + IconName::Github, + ) + .icon_color(Color::Accent) + .icon_size(IconSize::Small) + .style(ButtonStyle::Filled) + .on_click(cx.listener({ + let repository_url = repository_url.clone(); + move |_, _, cx| { + cx.open_url(&repository_url); + } + })) + .tooltip(move |cx| Tooltip::text(repository_url.clone(), cx)), + ) + .child( + popover_menu(SharedString::from(format!("more-{}", extension.id))) + .trigger( + IconButton::new( + SharedString::from(format!("more-{}", extension.id)), + IconName::Ellipsis, + ) + .icon_color(Color::Accent) + .icon_size(IconSize::Small) + .style(ButtonStyle::Filled), + ) + .menu(move |cx| { + Some(Self::render_remote_extension_context_menu( + &this, + extension_id.clone(), + cx, + )) + }), + ), + ), + ) + } + + fn render_remote_extension_context_menu( + this: &View, + extension_id: Arc, + cx: &mut WindowContext, + ) -> View { + let context_menu = ContextMenu::build(cx, |context_menu, cx| { + context_menu.entry( + "Install Another Version...", + None, + cx.handler_for(&this, move |this, cx| { + this.show_extension_version_list(extension_id.clone(), cx) + }), + ) + }); + + context_menu + } + + fn show_extension_version_list(&mut self, extension_id: Arc, cx: &mut ViewContext) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + + cx.spawn(move |this, mut cx| async move { + let extension_versions_task = this.update(&mut cx, |_, cx| { + let extension_store = ExtensionStore::global(cx); + + extension_store.update(cx, |store, cx| { + store.fetch_extension_versions(&extension_id, cx) + }) + })?; + + let extension_versions = extension_versions_task.await?; + + workspace.update(&mut cx, |workspace, cx| { + let fs = workspace.project().read(cx).fs().clone(); + workspace.toggle_modal(cx, |cx| { + let delegate = ExtensionVersionSelectorDelegate::new( + fs, + cx.view().downgrade(), + extension_versions, + ); + + ExtensionVersionSelector::new(delegate, cx) + }); + })?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn buttons_for_entry( + &self, + extension: &ExtensionMetadata, + status: &ExtensionStatus, + has_dev_extension: bool, + cx: &mut ViewContext, + ) -> (Button, Option