diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3843a3343b4a79..84ed0dd5d44ab8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,6 +25,7 @@ env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: 0 RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" jobs: migration_checks: @@ -91,6 +92,7 @@ jobs: macos_tests: timeout-minutes: 60 name: (macOS) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: - self-hosted - test @@ -115,17 +117,18 @@ jobs: uses: ./.github/actions/run_tests - name: Build collab - run: RUSTFLAGS="-D warnings" cargo build -p collab + run: cargo build -p collab - name: Build other binaries and features run: | - RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features + cargo build --workspace --bins --all-features cargo check -p gpui --features "macos-blade" - RUSTFLAGS="-D warnings" cargo build -p remote_server + cargo build -p remote_server linux_tests: timeout-minutes: 60 name: (Linux) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: - buildjet-16vcpu-ubuntu-2204 steps: @@ -153,11 +156,12 @@ jobs: uses: ./.github/actions/run_tests - name: Build Zed - run: RUSTFLAGS="-D warnings" cargo build -p zed + run: cargo build -p zed build_remote_server: timeout-minutes: 60 name: (Linux) Build Remote Server + if: github.repository_owner == 'zed-industries' runs-on: - buildjet-16vcpu-ubuntu-2204 steps: @@ -179,14 +183,18 @@ jobs: run: ./script/remote-server && ./script/install-mold 2.34.0 - name: Build Remote Server - run: RUSTFLAGS="-D warnings" cargo build -p remote_server + run: cargo build -p remote_server # todo(windows): Actually run the tests windows_tests: timeout-minutes: 60 name: (Windows) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: hosted-windows-1 steps: + # more info here:- https://github.com/rust-lang/cargo/issues/13020 + - name: Enable longer pathnames for git + run: git config --system core.longpaths true - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: @@ -203,7 +211,7 @@ jobs: run: cargo xtask clippy - name: Build Zed - run: $env:RUSTFLAGS="-D warnings"; cargo build + run: cargo build bundle-mac: timeout-minutes: 60 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 57e3cc7c59ff77..8d064b64f5bcac 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,3 +1,3 @@ # Code of Conduct -The Code of Conduct for this repository can be found online at [zed.dev/docs/code-of-conduct](https://zed.dev/docs/code-of-conduct). +The Code of Conduct for this repository can be found online at [zed.dev/code-of-conduct](https://zed.dev/code-of-conduct). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f7657b9ccd4603..4a0a632413911f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,7 @@ Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor! -All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged. +All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged. ## Contribution ideas diff --git a/Cargo.lock b/Cargo.lock index d5f0c7cad927a5..b4a1682bc6ed44 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16,6 +16,7 @@ dependencies = [ "project", "smallvec", "ui", + "util", "workspace", ] @@ -291,6 +292,12 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "arraydeque" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" + [[package]] name = "arrayref" version = "0.3.8" @@ -385,7 +392,7 @@ dependencies = [ "ctor", "db", "editor", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "futures 0.3.30", @@ -847,7 +854,7 @@ dependencies = [ "chrono", "futures-util", "http-types", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls 0.24.2", "serde", "serde_json", @@ -1343,7 +1350,7 @@ dependencies = [ "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", @@ -1434,7 +1441,7 @@ dependencies = [ "headers", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "itoa", "matchit", "memchr", @@ -1580,7 +1587,7 @@ dependencies = [ "bitflags 2.6.0", "cexpr", "clang-sys", - "itertools 0.10.5", + "itertools 0.12.1", "lazy_static", "lazycell", "proc-macro2", @@ -2359,7 +2366,7 @@ dependencies = [ "clickhouse-derive", "clickhouse-rs-cityhash-sys", "futures 0.3.30", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-tls", "lz4", "sealed", @@ -2551,7 +2558,7 @@ dependencies = [ "dashmap 6.0.1", "derive_more", "editor", - "env_logger", + "env_logger 0.11.5", "envy", "file_finder", "fs", @@ -2562,7 +2569,7 @@ dependencies = [ "gpui", "hex", "http_client", - "hyper 0.14.30", + "hyper 0.14.31", "indoc", "jsonwebtoken", "language", @@ -2706,7 +2713,7 @@ dependencies = [ "command_palette_hooks", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "fuzzy", "go_to_line", "gpui", @@ -3572,7 +3579,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -3760,7 +3767,7 @@ dependencies = [ "ctor", "db", "emojis", - "env_logger", + "env_logger 0.11.5", "file_icons", "futures 0.3.30", "fuzzy", @@ -3801,6 +3808,7 @@ dependencies = [ "tree-sitter-rust", "tree-sitter-typescript", "ui", + "unicode-segmentation", "unindent", "url", "util", @@ -3967,6 +3975,19 @@ dependencies = [ "regex", ] +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + [[package]] name = "env_logger" version = "0.11.5" @@ -4075,7 +4096,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "git", @@ -4170,7 +4191,7 @@ dependencies = [ "client", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "gpui", @@ -4212,7 +4233,7 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", - "env_logger", + "env_logger 0.11.5", "extension", "fs", "language", @@ -4371,7 +4392,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "file_icons", "futures 0.3.30", "fuzzy", @@ -4979,12 +5000,13 @@ dependencies = [ "git", "gpui", "http_client", + "indoc", "pretty_assertions", "regex", "serde", "serde_json", - "unindent", "url", + "util", ] [[package]] @@ -5126,7 +5148,7 @@ dependencies = [ "ctor", "derive_more", "embed-resource", - "env_logger", + "env_logger 0.11.5", "etagere", "filedescriptor", "flume", @@ -5316,6 +5338,15 @@ dependencies = [ "serde", ] +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hashlink" version = "0.9.1" @@ -5629,9 +5660,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.30" +version = "0.14.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes 1.7.2", "futures-channel", @@ -5679,7 +5710,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "log", "rustls 0.21.12", "rustls-native-certs 0.6.3", @@ -5712,7 +5743,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.7.2", - "hyper 0.14.30", + "hyper 0.14.31", "native-tls", "tokio", "tokio-native-tls", @@ -6214,6 +6245,20 @@ dependencies = [ "simple_asn1", ] +[[package]] +name = "jupyter-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a444fb3f87ee6885eb316028cc998c7d84811663ef95d78c419419423d5a054" +dependencies = [ + "anyhow", + "chrono", + "serde", + "serde_json", + "thiserror", + "uuid", +] + [[package]] name = "khronos-egl" version = "6.0.0" @@ -6274,7 +6319,7 @@ dependencies = [ "collections", "ctor", "ec4rs", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "fuzzy", "git", @@ -6331,7 +6376,7 @@ dependencies = [ "copilot", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "feature_flags", "futures 0.3.30", "google_ai", @@ -6388,9 +6433,10 @@ dependencies = [ "collections", "copilot", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", + "itertools 0.13.0", "language", "lsp", "project", @@ -6402,6 +6448,7 @@ dependencies = [ "ui", "util", "workspace", + "zed_actions", ] [[package]] @@ -6422,6 +6469,11 @@ dependencies = [ "lsp", "node_runtime", "paths", + "pet", + "pet-conda", + "pet-core", + "pet-poetry", + "pet-reporter", "project", "regex", "rope", @@ -6718,7 +6770,7 @@ dependencies = [ "async-pipe", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "log", @@ -6801,7 +6853,7 @@ version = "0.1.0" dependencies = [ "anyhow", "assets", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -6914,7 +6966,7 @@ dependencies = [ "clap", "clap_complete", "elasticlunr-rs", - "env_logger", + "env_logger 0.11.5", "futures-util", "handlebars 5.1.2", "ignore", @@ -7096,6 +7148,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "msvc_spectre_libs" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8661ace213a0a130c7c5b9542df5023aedf092a02008ccf477b39ff108990305" +dependencies = [ + "cc", +] + [[package]] name = "multi_buffer" version = "0.1.0" @@ -7104,7 +7165,7 @@ dependencies = [ "clock", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "itertools 0.13.0", @@ -7183,6 +7244,21 @@ dependencies = [ "tempfile", ] +[[package]] +name = "nbformat" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146074ad45cab20f5d98ccded164826158471f21d04f96e40b9872529e10979d" +dependencies = [ + "anyhow", + "chrono", + "jupyter-serde", + "serde", + "serde_json", + "thiserror", + "uuid", +] + [[package]] name = "ndk" version = "0.8.0" @@ -7818,8 +7894,10 @@ dependencies = [ "serde", "serde_json", "settings", + "smallvec", "smol", "theme", + "ui", "util", "workspace", "worktree", @@ -8062,6 +8140,366 @@ dependencies = [ "sha2", ] +[[package]] +name = "pet" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "clap", + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-env-var-path", + "pet-fs", + "pet-global-virtualenvs", + "pet-homebrew", + "pet-jsonrpc", + "pet-linux-global-python", + "pet-mac-commandlinetools", + "pet-mac-python-org", + "pet-mac-xcode", + "pet-pipenv", + "pet-poetry", + "pet-pyenv", + "pet-python-utils", + "pet-reporter", + "pet-telemetry", + "pet-venv", + "pet-virtualenv", + "pet-virtualenvwrapper", + "pet-windows-registry", + "pet-windows-store", + "serde", + "serde_json", +] + +[[package]] +name = "pet-conda" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "regex", + "serde", + "serde_json", + "yaml-rust2", +] + +[[package]] +name = "pet-core" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "clap", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-fs", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-env-var-path" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", +] + +[[package]] +name = "pet-fs" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", +] + +[[package]] +name = "pet-global-virtualenvs" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-virtualenv", +] + +[[package]] +name = "pet-homebrew" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-jsonrpc" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "log", + "msvc_spectre_libs", + "pet-core", + "serde", + "serde_json", +] + +[[package]] +name = "pet-linux-global-python" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-commandlinetools" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-python-org" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-xcode" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-pipenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-poetry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "base64 0.22.1", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "pet-virtualenv", + "regex", + "serde", + "serde_json", + "sha2", + "toml 0.8.19", +] + +[[package]] +name = "pet-pyenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-python-utils" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "regex", + "serde", + "serde_json", + "sha2", +] + +[[package]] +name = "pet-reporter" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-jsonrpc", + "serde", + "serde_json", +] + +[[package]] +name = "pet-telemetry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "regex", +] + +[[package]] +name = "pet-venv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-virtualenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", +] + +[[package]] +name = "pet-virtualenvwrapper" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-windows-registry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "pet-windows-store", + "regex", + "winreg 0.52.0", +] + +[[package]] +name = "pet-windows-store" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", + "winreg 0.52.0", +] + [[package]] name = "petgraph" version = "0.6.5" @@ -8150,7 +8588,7 @@ dependencies = [ "anyhow", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "gpui", "menu", "serde", @@ -8498,7 +8936,7 @@ dependencies = [ "collections", "dap", "dap_adapters", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "fuzzy", @@ -9211,10 +9649,11 @@ dependencies = [ "async-watch", "backtrace", "cargo_toml", + "chrono", "clap", "client", "clock", - "env_logger", + "env_logger 0.11.5", "fork", "fs", "futures 0.3.30", @@ -9230,6 +9669,8 @@ dependencies = [ "node_runtime", "paths", "project", + "proto", + "release_channel", "remote", "reqwest_client", "rpc", @@ -9239,6 +9680,7 @@ dependencies = [ "settings", "shellexpand 2.1.2", "smol", + "telemetry_events", "toml 0.8.19", "util", "worktree", @@ -9265,7 +9707,8 @@ dependencies = [ "collections", "command_palette_hooks", "editor", - "env_logger", + "env_logger 0.11.5", + "feature_flags", "futures 0.3.30", "gpui", "http_client", @@ -9275,7 +9718,9 @@ dependencies = [ "languages", "log", "markdown_preview", + "menu", "multi_buffer", + "nbformat", "project", "runtimelib", "schemars", @@ -9310,7 +9755,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-tls", "ipnet", "js-sys", @@ -9545,10 +9990,11 @@ dependencies = [ "arrayvec", "criterion", "ctor", - "env_logger", + "env_logger 0.11.5", "gpui", "log", "rand 0.8.5", + "rayon", "smallvec", "sum_tree", "unicode-segmentation", @@ -9576,7 +10022,7 @@ dependencies = [ "base64 0.22.1", "chrono", "collections", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "parking_lot", @@ -9614,9 +10060,9 @@ dependencies = [ [[package]] name = "runtimelib" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7d76d28b882a7b889ebb04e79bc2b160b3061821ea596ff0f4a838fc7a76db0" +checksum = "263588fe9593333c4bfde258c9021fc64e766ea434e070c6b67c7100536d6499" dependencies = [ "anyhow", "async-dispatcher", @@ -9628,6 +10074,7 @@ dependencies = [ "dirs 5.0.1", "futures 0.3.30", "glob", + "jupyter-serde", "rand 0.8.5", "ring 0.17.8", "serde", @@ -10165,7 +10612,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "futures 0.3.30", @@ -10859,7 +11306,7 @@ dependencies = [ "futures-io", "futures-util", "hashbrown 0.14.5", - "hashlink", + "hashlink 0.9.1", "hex", "indexmap 2.4.0", "log", @@ -11183,7 +11630,7 @@ version = "0.1.0" dependencies = [ "arrayvec", "ctor", - "env_logger", + "env_logger 0.11.5", "log", "rand 0.8.5", "rayon", @@ -11197,7 +11644,7 @@ dependencies = [ "client", "collections", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "http_client", @@ -11496,7 +11943,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "gpui", "language", "menu", @@ -11704,7 +12151,7 @@ dependencies = [ "clock", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "gpui", "http_client", "log", @@ -12193,6 +12640,21 @@ dependencies = [ "winnow 0.6.18", ] +[[package]] +name = "toolchain_selector" +version = "0.1.0" +dependencies = [ + "editor", + "fuzzy", + "gpui", + "language", + "picker", + "project", + "ui", + "util", + "workspace", +] + [[package]] name = "topological-sort" version = "0.2.2" @@ -12938,6 +13400,7 @@ dependencies = [ "git", "gpui", "picker", + "project", "ui", "util", "workspace", @@ -13086,7 +13549,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "log", "mime", "mime_guess", @@ -13799,7 +14262,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -14361,7 +14824,7 @@ dependencies = [ "collections", "db", "derive_more", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "git", @@ -14398,12 +14861,13 @@ dependencies = [ "anyhow", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "fuzzy", "git", "git2", + "git_hosting_providers", "gpui", "http_client", "ignore", @@ -14568,6 +15032,17 @@ dependencies = [ "clap", ] +[[package]] +name = "yaml-rust2" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8" +dependencies = [ + "arraydeque", + "encoding_rs", + "hashlink 0.8.4", +] + [[package]] name = "yansi" version = "1.0.1" @@ -14655,7 +15130,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.160.0" +version = "0.161.0" dependencies = [ "activity_indicator", "anyhow", @@ -14683,7 +15158,7 @@ dependencies = [ "debugger_ui", "diagnostics", "editor", - "env_logger", + "env_logger 0.11.5", "extension", "extensions_ui", "feature_flags", @@ -14722,6 +15197,7 @@ dependencies = [ "project", "project_panel", "project_symbols", + "proto", "quick_action_bar", "recent_projects", "release_channel", @@ -14750,6 +15226,7 @@ dependencies = [ "theme", "theme_selector", "time", + "toolchain_selector", "tree-sitter-md", "tree-sitter-rust", "ui", @@ -14795,13 +15272,6 @@ dependencies = [ "zed_extension_api 0.1.0", ] -[[package]] -name = "zed_dart" -version = "0.1.1" -dependencies = [ - "zed_extension_api 0.1.0", -] - [[package]] name = "zed_deno" version = "0.0.2" diff --git a/Cargo.toml b/Cargo.toml index 3faa3da8c90ece..4d583f10ca64a6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -120,6 +120,7 @@ members = [ "crates/theme_selector", "crates/time_format", "crates/title_bar", + "crates/toolchain_selector", "crates/ui", "crates/ui_input", "crates/ui_macros", @@ -140,7 +141,6 @@ members = [ "extensions/astro", "extensions/clojure", "extensions/csharp", - "extensions/dart", "extensions/deno", "extensions/elixir", "extensions/elm", @@ -298,6 +298,7 @@ theme_importer = { path = "crates/theme_importer" } theme_selector = { path = "crates/theme_selector" } time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } +toolchain_selector = { path = "crates/toolchain_selector" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } @@ -377,6 +378,7 @@ linkify = "0.10.0" log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } markup5ever_rcdom = "0.3.0" nanoid = "0.4" +nbformat = "0.3.1" nix = "0.29" num-format = "0.4.4" once_cell = "1.19.0" @@ -384,6 +386,11 @@ ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" pathdiff = "0.2" +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = "1.3.0" profiling = "1" @@ -392,6 +399,7 @@ prost-build = "0.9" prost-types = "0.9" pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" +rayon = "1.8" regex = "1.5" repair_json = "0.1.0" reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = [ @@ -403,7 +411,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f "stream", ] } rsa = "0.9.6" -runtimelib = { version = "0.15", default-features = false, features = [ +runtimelib = { version = "0.16.0", default-features = false, features = [ "async-dispatcher-runtime", ] } rustc-demangle = "0.1.23" diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json index a9fe4a2eff59b6..fe293256b393cc 100644 --- a/assets/icons/file_icons/file_types.json +++ b/assets/icons/file_icons/file_types.json @@ -58,6 +58,7 @@ "gitignore": "vcs", "gitkeep": "vcs", "gitmodules": "vcs", + "gleam": "gleam", "go": "go", "gql": "graphql", "graphql": "graphql", @@ -83,6 +84,7 @@ "j2k": "image", "java": "java", "jfif": "image", + "jl": "julia", "jp2": "image", "jpeg": "image", "jpg": "image", @@ -90,7 +92,6 @@ "json": "storage", "jsonc": "storage", "jsx": "react", - "julia": "julia", "jxl": "image", "kt": "kotlin", "ldf": "storage", @@ -264,6 +265,9 @@ "fsharp": { "icon": "icons/file_icons/fsharp.svg" }, + "gleam": { + "icon": "icons/file_icons/gleam.svg" + }, "go": { "icon": "icons/file_icons/go.svg" }, diff --git a/assets/icons/file_icons/gleam.svg b/assets/icons/file_icons/gleam.svg new file mode 100644 index 00000000000000..6a3dc2c96fe76b --- /dev/null +++ b/assets/icons/file_icons/gleam.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/list_x.svg b/assets/icons/list_x.svg new file mode 100644 index 00000000000000..683f38ab5dfe5b --- /dev/null +++ b/assets/icons/list_x.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 4f55fa9772b4db..0ba76fba3f6265 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -532,6 +532,7 @@ "context": "ContextEditor > Editor", "bindings": { "ctrl-enter": "assistant::Assist", + "ctrl-shift-enter": "assistant::Edit", "ctrl-s": "workspace::Save", "ctrl->": "assistant::QuoteSelection", "ctrl-<": "assistant::InsertIntoEditor", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index ade3ece1eda930..964af3ce3d3c06 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -201,6 +201,7 @@ "context": "ContextEditor > Editor", "bindings": { "cmd-enter": "assistant::Assist", + "cmd-shift-enter": "assistant::Edit", "cmd-s": "workspace::Save", "cmd->": "assistant::QuoteSelection", "cmd-<": "assistant::InsertIntoEditor", @@ -349,6 +350,7 @@ "alt-cmd-]": "editor::UnfoldLines", "cmd-k cmd-l": "editor::ToggleFold", "cmd-k cmd-[": "editor::FoldRecursive", + "cmd-k cmd-]": "editor::UnfoldRecursive", "cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }], "cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }], "cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }], diff --git a/assets/prompts/edit_workflow.hbs b/assets/prompts/suggest_edits.hbs similarity index 100% rename from assets/prompts/edit_workflow.hbs rename to assets/prompts/suggest_edits.hbs diff --git a/assets/settings/default.json b/assets/settings/default.json index 04bd67643436c7..565e959cf4bb5f 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -346,8 +346,6 @@ "git_status": true, // Amount of indentation for nested items. "indent_size": 20, - // Whether to show indent guides in the project panel. - "indent_guides": true, // Whether to reveal it in the project panel automatically, // when a corresponding project entry becomes active. // Gitignored entries are never auto revealed. @@ -371,6 +369,17 @@ /// 5. Never show the scrollbar: /// "never" "show": null + }, + // Settings related to indent guides in the project panel. + "indent_guides": { + // When to show indent guides in the project panel. + // This setting can take two values: + // + // 1. Always show indent guides: + // "always" + // 2. Never show indent guides: + // "never" + "show": "always" } }, "outline_panel": { @@ -394,7 +403,35 @@ "auto_reveal_entries": true, /// Whether to fold directories automatically /// when a directory has only one directory inside. - "auto_fold_dirs": true + "auto_fold_dirs": true, + // Settings related to indent guides in the outline panel. + "indent_guides": { + // When to show indent guides in the outline panel. + // This setting can take two values: + // + // 1. Always show indent guides: + // "always" + // 2. Never show indent guides: + // "never" + "show": "always" + }, + /// Scrollbar-related settings + "scrollbar": { + /// When to show the scrollbar in the project panel. + /// This setting can take four values: + /// + /// 1. null (default): Inherit editor settings + /// 2. Show the scrollbar if there's important information or + /// follow the system's configured behavior (default): + /// "auto" + /// 3. Match the system's configured behavior: + /// "system" + /// 4. Always show the scrollbar: + /// "always" + /// 5. Never show the scrollbar: + /// "never" + "show": null + } }, "collaboration_panel": { // Whether to show the collaboration panel button in the status bar. @@ -777,6 +814,7 @@ "tasks": { "variables": {} }, + "toolchain": { "name": "default", "path": "default" }, // An object whose keys are language names, and whose values // are arrays of filenames or extensions of files that should // use those languages. diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 9761a082385ac7..b4fb2ec5b089ae 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -23,6 +23,7 @@ language.workspace = true project.workspace = true smallvec.workspace = true ui.workspace = true +util.workspace = true workspace.workspace = true [dev-dependencies] diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index e2fb516f88f7b6..4959b1192dead5 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -13,7 +13,8 @@ use language::{ use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId}; use smallvec::SmallVec; use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration}; -use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle}; +use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip}; +use util::truncate_and_trailoff; use workspace::{item::ItemHandle, StatusItemView, Workspace}; actions!(activity_indicator, [ShowErrorMessage]); @@ -463,6 +464,8 @@ impl ActivityIndicator { impl EventEmitter for ActivityIndicator {} +const MAX_MESSAGE_LEN: usize = 50; + impl Render for ActivityIndicator { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let result = h_flex() @@ -473,6 +476,7 @@ impl Render for ActivityIndicator { return result; }; let this = cx.view().downgrade(); + let truncate_content = content.message.len() > MAX_MESSAGE_LEN; result.gap_2().child( PopoverMenu::new("activity-indicator-popover") .trigger( @@ -481,7 +485,21 @@ impl Render for ActivityIndicator { .id("activity-indicator-status") .gap_2() .children(content.icon) - .child(Label::new(content.message).size(LabelSize::Small)) + .map(|button| { + if truncate_content { + button + .child( + Label::new(truncate_and_trailoff( + &content.message, + MAX_MESSAGE_LEN, + )) + .size(LabelSize::Small), + ) + .tooltip(move |cx| Tooltip::text(&content.message, cx)) + } else { + button.child(Label::new(content.message).size(LabelSize::Small)) + } + }) .when_some(content.on_click, |this, handler| { this.on_click(cx.listener(move |this, _, cx| { handler(this, cx); diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index e1e574744fff61..c2857d06d437f7 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,12 +41,10 @@ use prompts::PromptLoadingParams; use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; -use slash_command::workflow_command::WorkflowSlashCommand; use slash_command::{ auto_command, cargo_workspace_command, context_server_command, default_command, delta_command, diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command, prompt_command, search_command, symbols_command, tab_command, terminal_command, - workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -59,6 +57,7 @@ actions!( assistant, [ Assist, + Edit, Split, CopyCode, CycleMessageRole, @@ -298,25 +297,64 @@ fn register_context_server_handlers(cx: &mut AppContext) { return; }; - if let Some(prompts) = protocol.list_prompts().await.log_err() { - for prompt in prompts - .into_iter() - .filter(context_server_command::acceptable_prompt) - { - log::info!( - "registering context server command: {:?}", - prompt.name - ); - context_server_registry.register_command( - server.id.clone(), - prompt.name.as_str(), - ); - slash_command_registry.register_command( - context_server_command::ContextServerSlashCommand::new( - &server, prompt, - ), - true, - ); + if protocol.capable(context_servers::protocol::ServerCapability::Prompts) { + if let Some(prompts) = protocol.list_prompts().await.log_err() { + for prompt in prompts + .into_iter() + .filter(context_server_command::acceptable_prompt) + { + log::info!( + "registering context server command: {:?}", + prompt.name + ); + context_server_registry.register_command( + server.id.clone(), + prompt.name.as_str(), + ); + slash_command_registry.register_command( + context_server_command::ContextServerSlashCommand::new( + &server, prompt, + ), + true, + ); + } + } + } + }) + .detach(); + } + }, + ); + + cx.update_model( + &manager, + |manager: &mut context_servers::manager::ContextServerManager, cx| { + let tool_registry = ToolRegistry::global(cx); + let context_server_registry = ContextServerRegistry::global(cx); + if let Some(server) = manager.get_server(server_id) { + cx.spawn(|_, _| async move { + let Some(protocol) = server.client.read().clone() else { + return; + }; + + if protocol.capable(context_servers::protocol::ServerCapability::Tools) { + if let Some(tools) = protocol.list_tools().await.log_err() { + for tool in tools.tools { + log::info!( + "registering context server tool: {:?}", + tool.name + ); + context_server_registry.register_tool( + server.id.clone(), + tool.name.as_str(), + ); + tool_registry.register_tool( + tools::context_server_tool::ContextServerTool::new( + server.id.clone(), + tool + ), + ); + } } } }) @@ -334,6 +372,14 @@ fn register_context_server_handlers(cx: &mut AppContext) { context_server_registry.unregister_command(&server_id, &command_name); } } + + if let Some(tools) = context_server_registry.get_tools(server_id) { + let tool_registry = ToolRegistry::global(cx); + for tool_name in tools { + tool_registry.unregister_tool_by_name(&tool_name); + context_server_registry.unregister_tool(&server_id, &tool_name); + } + } } }, ) @@ -397,22 +443,6 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); if let Some(prompt_builder) = prompt_builder { - cx.observe_global::({ - let slash_command_registry = slash_command_registry.clone(); - let prompt_builder = prompt_builder.clone(); - move |cx| { - if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) { - slash_command_registry.register_command( - workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), - true, - ); - } else { - slash_command_registry.unregister_command_by_name(WorkflowSlashCommand::NAME); - } - } - }) - .detach(); - cx.observe_flag::({ let slash_command_registry = slash_command_registry.clone(); move |is_enabled, _cx| { diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 42857406976c2b..eef82c610681f0 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -13,10 +13,11 @@ use crate::{ terminal_inline_assistant::TerminalInlineAssistant, Assist, AssistantPatch, AssistantPatchStatus, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, ContextStoreEvent, CopyCode, CycleMessageRole, - DeployHistory, DeployPromptLibrary, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, - Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, - NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, - RemoteContextMetadata, SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, + DeployHistory, DeployPromptLibrary, Edit, InlineAssistant, InsertDraggedFiles, + InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, + ModelSelector, NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, + RemoteContextMetadata, RequestType, SavedContextMetadata, Split, ToggleFocus, + ToggleModelSelector, }; use anyhow::Result; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection}; @@ -1461,6 +1462,7 @@ type MessageHeader = MessageMetadata; #[derive(Clone)] enum AssistError { + FileRequired, PaymentRequired, MaxMonthlySpendReached, Message(SharedString), @@ -1588,23 +1590,11 @@ impl ContextEditor { } fn assist(&mut self, _: &Assist, cx: &mut ViewContext) { - let provider = LanguageModelRegistry::read_global(cx).active_provider(); - if provider - .as_ref() - .map_or(false, |provider| provider.must_accept_terms(cx)) - { - self.show_accept_terms = true; - cx.notify(); - return; - } - - if self.focus_active_patch(cx) { - return; - } + self.send_to_model(RequestType::Chat, cx); + } - self.last_error = None; - self.send_to_model(cx); - cx.notify(); + fn edit(&mut self, _: &Edit, cx: &mut ViewContext) { + self.send_to_model(RequestType::SuggestEdits, cx); } fn focus_active_patch(&mut self, cx: &mut ViewContext) -> bool { @@ -1622,8 +1612,30 @@ impl ContextEditor { false } - fn send_to_model(&mut self, cx: &mut ViewContext) { - if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) { + fn send_to_model(&mut self, request_type: RequestType, cx: &mut ViewContext) { + let provider = LanguageModelRegistry::read_global(cx).active_provider(); + if provider + .as_ref() + .map_or(false, |provider| provider.must_accept_terms(cx)) + { + self.show_accept_terms = true; + cx.notify(); + return; + } + + if self.focus_active_patch(cx) { + return; + } + + self.last_error = None; + + if request_type == RequestType::SuggestEdits && !self.context.read(cx).contains_files(cx) { + self.last_error = Some(AssistError::FileRequired); + cx.notify(); + } else if let Some(user_message) = self + .context + .update(cx, |context, cx| context.assist(request_type, cx)) + { let new_selection = { let cursor = user_message .start @@ -1640,6 +1652,8 @@ impl ContextEditor { // Avoid scrolling to the new cursor position so the assistant's output is stable. cx.defer(|this, _| this.scroll_position = None); } + + cx.notify(); } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { @@ -1667,8 +1681,10 @@ impl ContextEditor { }); } - fn cursors(&self, cx: &AppContext) -> Vec { - let selections = self.editor.read(cx).selections.all::(cx); + fn cursors(&self, cx: &mut WindowContext) -> Vec { + let selections = self + .editor + .update(cx, |editor, cx| editor.selections.all::(cx)); selections .into_iter() .map(|selection| selection.head()) @@ -2375,7 +2391,9 @@ impl ContextEditor { } fn update_active_patch(&mut self, cx: &mut ViewContext) { - let newest_cursor = self.editor.read(cx).selections.newest::(cx).head(); + let newest_cursor = self.editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); let context = self.context.read(cx); let new_patch = context.patch_containing(newest_cursor, cx).cloned(); @@ -2782,39 +2800,40 @@ impl ContextEditor { ) -> Option<(String, bool)> { const CODE_FENCE_DELIMITER: &'static str = "```"; - let context_editor = context_editor_view.read(cx).editor.read(cx); - - if context_editor.selections.newest::(cx).is_empty() { - let snapshot = context_editor.buffer().read(cx).snapshot(cx); - let (_, _, snapshot) = snapshot.as_singleton()?; - - let head = context_editor.selections.newest::(cx).head(); - let offset = snapshot.point_to_offset(head); + let context_editor = context_editor_view.read(cx).editor.clone(); + context_editor.update(cx, |context_editor, cx| { + if context_editor.selections.newest::(cx).is_empty() { + let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let (_, _, snapshot) = snapshot.as_singleton()?; + + let head = context_editor.selections.newest::(cx).head(); + let offset = snapshot.point_to_offset(head); + + let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; + let mut text = snapshot + .text_for_range(surrounding_code_block_range) + .collect::(); + + // If there is no newline trailing the closing three-backticks, then + // tree-sitter-md extends the range of the content node to include + // the backticks. + if text.ends_with(CODE_FENCE_DELIMITER) { + text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + } - let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; - let mut text = snapshot - .text_for_range(surrounding_code_block_range) - .collect::(); + (!text.is_empty()).then_some((text, true)) + } else { + let anchor = context_editor.selections.newest_anchor(); + let text = context_editor + .buffer() + .read(cx) + .read(cx) + .text_for_range(anchor.range()) + .collect::(); - // If there is no newline trailing the closing three-backticks, then - // tree-sitter-md extends the range of the content node to include - // the backticks. - if text.ends_with(CODE_FENCE_DELIMITER) { - text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + (!text.is_empty()).then_some((text, false)) } - - (!text.is_empty()).then_some((text, true)) - } else { - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); - - (!text.is_empty()).then_some((text, false)) - } + }) } fn insert_selection( @@ -3644,7 +3663,13 @@ impl ContextEditor { button.tooltip(move |_| tooltip.clone()) }) .layer(ElevationIndex::ModalSurface) - .child(Label::new("Send")) + .child(Label::new( + if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) { + "Chat" + } else { + "Send" + }, + )) .children( KeyBinding::for_action_in(&Assist, &focus_handle, cx) .map(|binding| binding.into_any_element()), @@ -3654,6 +3679,57 @@ impl ContextEditor { }) } + fn render_edit_button(&self, cx: &mut ViewContext) -> impl IntoElement { + let focus_handle = self.focus_handle(cx).clone(); + + let (style, tooltip) = match token_state(&self.context, cx) { + Some(TokenState::NoTokensLeft { .. }) => ( + ButtonStyle::Tinted(TintColor::Negative), + Some(Tooltip::text("Token limit reached", cx)), + ), + Some(TokenState::HasMoreTokens { + over_warn_threshold, + .. + }) => { + let (style, tooltip) = if over_warn_threshold { + ( + ButtonStyle::Tinted(TintColor::Warning), + Some(Tooltip::text("Token limit is close to exhaustion", cx)), + ) + } else { + (ButtonStyle::Filled, None) + }; + (style, tooltip) + } + None => (ButtonStyle::Filled, None), + }; + + let provider = LanguageModelRegistry::read_global(cx).active_provider(); + + let has_configuration_error = configuration_error(cx).is_some(); + let needs_to_accept_terms = self.show_accept_terms + && provider + .as_ref() + .map_or(false, |provider| provider.must_accept_terms(cx)); + let disabled = has_configuration_error || needs_to_accept_terms; + + ButtonLike::new("edit_button") + .disabled(disabled) + .style(style) + .when_some(tooltip, |button, tooltip| { + button.tooltip(move |_| tooltip.clone()) + }) + .layer(ElevationIndex::ModalSurface) + .child(Label::new("Suggest Edits")) + .children( + KeyBinding::for_action_in(&Edit, &focus_handle, cx) + .map(|binding| binding.into_any_element()), + ) + .on_click(move |_event, cx| { + focus_handle.dispatch_action(&Edit, cx); + }) + } + fn render_last_error(&self, cx: &mut ViewContext) -> Option { let last_error = self.last_error.as_ref()?; @@ -3668,6 +3744,7 @@ impl ContextEditor { .elevation_2(cx) .occlude() .child(match last_error { + AssistError::FileRequired => self.render_file_required_error(cx), AssistError::PaymentRequired => self.render_payment_required_error(cx), AssistError::MaxMonthlySpendReached => { self.render_max_monthly_spend_reached_error(cx) @@ -3680,6 +3757,41 @@ impl ContextEditor { ) } + fn render_file_required_error(&self, cx: &mut ViewContext) -> AnyElement { + v_flex() + .gap_0p5() + .child( + h_flex() + .gap_1p5() + .items_center() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child( + Label::new("Suggest Edits needs a file to edit").weight(FontWeight::MEDIUM), + ), + ) + .child( + div() + .id("error-message") + .max_h_24() + .overflow_y_scroll() + .child(Label::new( + "To include files, type /file or /tab in your prompt.", + )), + ) + .child( + h_flex() + .justify_end() + .mt_1() + .child(Button::new("dismiss", "Dismiss").on_click(cx.listener( + |this, _, cx| { + this.last_error = None; + cx.notify(); + }, + ))), + ) + .into_any() + } + fn render_payment_required_error(&self, cx: &mut ViewContext) -> AnyElement { const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used."; @@ -3910,6 +4022,7 @@ impl Render for ContextEditor { .capture_action(cx.listener(ContextEditor::paste)) .capture_action(cx.listener(ContextEditor::cycle_message_role)) .capture_action(cx.listener(ContextEditor::confirm_command)) + .on_action(cx.listener(ContextEditor::edit)) .on_action(cx.listener(ContextEditor::assist)) .on_action(cx.listener(ContextEditor::split)) .size_full() @@ -3974,7 +4087,21 @@ impl Render for ContextEditor { h_flex() .w_full() .justify_end() - .child(div().child(self.render_send_button(cx))), + .when( + AssistantSettings::get_global(cx).are_live_diffs_enabled(cx), + |buttons| { + buttons + .items_center() + .gap_1p5() + .child(self.render_edit_button(cx)) + .child( + Label::new("or") + .size(LabelSize::Small) + .color(Color::Muted), + ) + }, + ) + .child(self.render_send_button(cx)), ), ), ) @@ -4707,7 +4834,7 @@ impl Render for ConfigurationView { let mut element = v_flex() .id("assistant-configuration-view") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .bg(cx.theme().colors().editor_background) .size_full() .overflow_y_scroll() diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 78237e51b21656..a1de9d3b4069a1 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -2,8 +2,9 @@ mod context_tests; use crate::{ - prompts::PromptBuilder, slash_command::SlashCommandLine, AssistantEdit, AssistantPatch, - AssistantPatchStatus, MessageId, MessageStatus, + prompts::PromptBuilder, + slash_command::{file_command::FileCommandMetadata, SlashCommandLine}, + AssistantEdit, AssistantPatch, AssistantPatchStatus, MessageId, MessageStatus, }; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ @@ -66,6 +67,14 @@ impl ContextId { } } +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum RequestType { + /// Request a normal chat response from the model. + Chat, + /// Add a preamble to the message, which tells the model to return a structured response that suggests edits. + SuggestEdits, +} + #[derive(Clone, Debug)] pub enum ContextOperation { InsertMessage { @@ -981,6 +990,20 @@ impl Context { &self.slash_command_output_sections } + pub fn contains_files(&self, cx: &AppContext) -> bool { + let buffer = self.buffer.read(cx); + self.slash_command_output_sections.iter().any(|section| { + section.is_valid(buffer) + && section + .metadata + .as_ref() + .and_then(|metadata| { + serde_json::from_value::(metadata.clone()).ok() + }) + .is_some() + }) + } + pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> { self.pending_tool_uses_by_id.values().collect() } @@ -1028,7 +1051,7 @@ impl Context { } pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext) { - let request = self.to_completion_request(cx); + let request = self.to_completion_request(RequestType::SuggestEdits, cx); // Conservatively assume SuggestEdits, since it takes more tokens. let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else { return; }; @@ -1171,7 +1194,7 @@ impl Context { } let request = { - let mut req = self.to_completion_request(cx); + let mut req = self.to_completion_request(RequestType::Chat, cx); // Skip the last message because it's likely to change and // therefore would be a waste to cache. req.messages.pop(); @@ -1859,7 +1882,11 @@ impl Context { }) } - pub fn assist(&mut self, cx: &mut ModelContext) -> Option { + pub fn assist( + &mut self, + request_type: RequestType, + cx: &mut ModelContext, + ) -> Option { let model_registry = LanguageModelRegistry::read_global(cx); let provider = model_registry.active_provider()?; let model = model_registry.active_model()?; @@ -1872,7 +1899,7 @@ impl Context { // Compute which messages to cache, including the last one. self.mark_cache_anchors(&model.cache_configuration(), false, cx); - let mut request = self.to_completion_request(cx); + let mut request = self.to_completion_request(request_type, cx); if cx.has_flag::() { let tool_registry = ToolRegistry::global(cx); @@ -2074,7 +2101,11 @@ impl Context { Some(user_message) } - pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest { + pub fn to_completion_request( + &self, + request_type: RequestType, + cx: &AppContext, + ) -> LanguageModelRequest { let buffer = self.buffer.read(cx); let mut contents = self.contents(cx).peekable(); @@ -2163,6 +2194,25 @@ impl Context { completion_request.messages.push(request_message); } + if let RequestType::SuggestEdits = request_type { + if let Ok(preamble) = self.prompt_builder.generate_workflow_prompt() { + let last_elem_index = completion_request.messages.len(); + + completion_request + .messages + .push(LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::Text(preamble)], + cache: false, + }); + + // The preamble message should be sent right before the last actual user message. + completion_request + .messages + .swap(last_elem_index, last_elem_index.saturating_sub(1)); + } + } + completion_request } @@ -2477,7 +2527,7 @@ impl Context { return; } - let mut request = self.to_completion_request(cx); + let mut request = self.to_completion_request(RequestType::Chat, cx); request.messages.push(LanguageModelRequestMessage { role: Role::User, content: vec![ diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 9af8193605f00f..fdf00c8b044484 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1,7 +1,7 @@ use crate::{ assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist, - CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff, + CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, RequestType, StreamingDiff, }; use anyhow::{anyhow, Context as _, Result}; use client::{telemetry::Telemetry, ErrorExt}; @@ -189,11 +189,16 @@ impl InlineAssistant { initial_prompt: Option, cx: &mut WindowContext, ) { - let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); + let (snapshot, initial_selections) = editor.update(cx, |editor, cx| { + ( + editor.buffer().read(cx).snapshot(cx), + editor.selections.all::(cx), + ) + }); let mut selections = Vec::>::new(); let mut newest_selection = None; - for mut selection in editor.read(cx).selections.all::(cx) { + for mut selection in initial_selections { if selection.end > selection.start { selection.start.column = 0; // If the selection ends at the start of the line, we don't want to include it. @@ -566,10 +571,13 @@ impl InlineAssistant { return; }; - let editor = editor.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); + if editor.read(cx).selections.count() == 1 { + let (selection, buffer) = editor.update(cx, |editor, cx| { + ( + editor.selections.newest::(cx), + editor.buffer().read(cx).snapshot(cx), + ) + }); for assist_id in &editor_assists.assist_ids { let assist = &self.assists[assist_id]; let assist_range = assist.range.to_offset(&buffer); @@ -594,10 +602,13 @@ impl InlineAssistant { return; }; - let editor = editor.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); + if editor.read(cx).selections.count() == 1 { + let (selection, buffer) = editor.update(cx, |editor, cx| { + ( + editor.selections.newest::(cx), + editor.buffer().read(cx).snapshot(cx), + ) + }); let mut closest_assist_fallback = None; for assist_id in &editor_assists.assist_ids { let assist = &self.assists[assist_id]; @@ -2234,7 +2245,7 @@ impl InlineAssist { .read(cx) .active_context(cx)? .read(cx) - .to_completion_request(cx), + .to_completion_request(RequestType::Chat, cx), ) } else { None diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 2d0829086c8bfc..50fee242eab42d 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -311,7 +311,7 @@ impl PromptBuilder { } pub fn generate_workflow_prompt(&self) -> Result { - self.handlebars.lock().render("edit_workflow", &()) + self.handlebars.lock().render("suggest_edits", &()) } pub fn generate_project_slash_command_prompt( diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index e430e35622a222..ed20791d9560ed 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -34,7 +34,6 @@ pub mod search_command; pub mod symbols_command; pub mod tab_command; pub mod terminal_command; -pub mod workflow_command; pub(crate) struct SlashCommandCompletionProvider { cancel_flag: Mutex>, diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs deleted file mode 100644 index ca6ccde92ee0c7..00000000000000 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ /dev/null @@ -1,82 +0,0 @@ -use std::sync::atomic::AtomicBool; -use std::sync::Arc; - -use anyhow::Result; -use assistant_slash_command::{ - ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, - SlashCommandResult, -}; -use gpui::{Task, WeakView}; -use language::{BufferSnapshot, LspAdapterDelegate}; -use ui::prelude::*; -use workspace::Workspace; - -use crate::prompts::PromptBuilder; - -pub(crate) struct WorkflowSlashCommand { - prompt_builder: Arc, -} - -impl WorkflowSlashCommand { - pub const NAME: &'static str = "workflow"; - - pub fn new(prompt_builder: Arc) -> Self { - Self { prompt_builder } - } -} - -impl SlashCommand for WorkflowSlashCommand { - fn name(&self) -> String { - Self::NAME.into() - } - - fn description(&self) -> String { - "Insert prompt to opt into the edit workflow".into() - } - - fn menu_text(&self) -> String { - self.description() - } - - fn requires_argument(&self) -> bool { - false - } - - fn complete_argument( - self: Arc, - _arguments: &[String], - _cancel: Arc, - _workspace: Option>, - _cx: &mut WindowContext, - ) -> Task>> { - Task::ready(Ok(Vec::new())) - } - - fn run( - self: Arc, - _arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, - _workspace: WeakView, - _delegate: Option>, - cx: &mut WindowContext, - ) -> Task { - let prompt_builder = self.prompt_builder.clone(); - cx.spawn(|_cx| async move { - let text = prompt_builder.generate_workflow_prompt()?; - let range = 0..text.len(); - - Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::Route, - label: "Workflow".into(), - metadata: None, - }], - run_commands_in_text: false, - } - .to_event_stream()) - }) - } -} diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 41b8d9eb88ac25..3e472ae4a97fb4 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -1,6 +1,6 @@ use crate::{ humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent, - ModelSelector, DEFAULT_CONTEXT_LINES, + ModelSelector, RequestType, DEFAULT_CONTEXT_LINES, }; use anyhow::{Context as _, Result}; use client::telemetry::Telemetry; @@ -251,7 +251,7 @@ impl TerminalInlineAssistant { .read(cx) .active_context(cx)? .read(cx) - .to_completion_request(cx), + .to_completion_request(RequestType::Chat, cx), ) }) } else { diff --git a/crates/assistant/src/tools.rs b/crates/assistant/src/tools.rs index abde04e760e3ee..83a396c0203cb2 100644 --- a/crates/assistant/src/tools.rs +++ b/crates/assistant/src/tools.rs @@ -1 +1,2 @@ +pub mod context_server_tool; pub mod now_tool; diff --git a/crates/assistant/src/tools/context_server_tool.rs b/crates/assistant/src/tools/context_server_tool.rs new file mode 100644 index 00000000000000..93edb32b75b725 --- /dev/null +++ b/crates/assistant/src/tools/context_server_tool.rs @@ -0,0 +1,82 @@ +use anyhow::{anyhow, bail}; +use assistant_tool::Tool; +use context_servers::manager::ContextServerManager; +use context_servers::types; +use gpui::Task; + +pub struct ContextServerTool { + server_id: String, + tool: types::Tool, +} + +impl ContextServerTool { + pub fn new(server_id: impl Into, tool: types::Tool) -> Self { + Self { + server_id: server_id.into(), + tool, + } + } +} + +impl Tool for ContextServerTool { + fn name(&self) -> String { + self.tool.name.clone() + } + + fn description(&self) -> String { + self.tool.description.clone().unwrap_or_default() + } + + fn input_schema(&self) -> serde_json::Value { + match &self.tool.input_schema { + serde_json::Value::Null => { + serde_json::json!({ "type": "object", "properties": [] }) + } + serde_json::Value::Object(map) if map.is_empty() => { + serde_json::json!({ "type": "object", "properties": [] }) + } + _ => self.tool.input_schema.clone(), + } + } + + fn run( + self: std::sync::Arc, + input: serde_json::Value, + _workspace: gpui::WeakView, + cx: &mut ui::WindowContext, + ) -> gpui::Task> { + let manager = ContextServerManager::global(cx); + let manager = manager.read(cx); + if let Some(server) = manager.get_server(&self.server_id) { + cx.foreground_executor().spawn({ + let tool_name = self.tool.name.clone(); + async move { + let Some(protocol) = server.client.read().clone() else { + bail!("Context server not initialized"); + }; + + let arguments = if let serde_json::Value::Object(map) = input { + Some(map.into_iter().collect()) + } else { + None + }; + + log::trace!( + "Running tool: {} with arguments: {:?}", + tool_name, + arguments + ); + let response = protocol.run_tool(tool_name, arguments).await?; + + let tool_result = match response.tool_result { + serde_json::Value::String(s) => s, + _ => serde_json::to_string(&response.tool_result)?, + }; + Ok(tool_result) + } + }) + } else { + Task::ready(Err(anyhow!("Context server not found"))) + } + } +} diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 61154cb5043eb8..fbbd23907a7153 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -84,9 +84,9 @@ pub struct AutoUpdater { } #[derive(Deserialize)] -struct JsonRelease { - version: String, - url: String, +pub struct JsonRelease { + pub version: String, + pub url: String, } struct MacOsUnmounter { @@ -482,7 +482,7 @@ impl AutoUpdater { release_channel: ReleaseChannel, version: Option, cx: &mut AsyncAppContext, - ) -> Result<(String, String)> { + ) -> Result<(JsonRelease, String)> { let this = cx.update(|cx| { cx.default_global::() .0 @@ -504,7 +504,7 @@ impl AutoUpdater { let update_request_body = build_remote_server_update_request_body(cx)?; let body = serde_json::to_string(&update_request_body)?; - Ok((release.url, body)) + Ok((release, body)) } async fn get_release( diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index fc5b12cfae1c39..d627d8fe15a988 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -3,7 +3,7 @@ mod channel_index; use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage}; use anyhow::{anyhow, Result}; use channel_index::ChannelIndex; -use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore}; +use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore}; use collections::{hash_map, HashMap, HashSet}; use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt}; use gpui::{ @@ -33,30 +33,11 @@ struct NotesVersion { version: clock::Global, } -#[derive(Debug, Clone)] -pub struct HostedProject { - project_id: ProjectId, - channel_id: ChannelId, - name: SharedString, - _visibility: proto::ChannelVisibility, -} -impl From for HostedProject { - fn from(project: proto::HostedProject) -> Self { - Self { - project_id: ProjectId(project.project_id), - channel_id: ChannelId(project.channel_id), - _visibility: project.visibility(), - name: project.name.into(), - } - } -} pub struct ChannelStore { pub channel_index: ChannelIndex, channel_invitations: Vec>, channel_participants: HashMap>>, channel_states: HashMap, - hosted_projects: HashMap, - outgoing_invites: HashSet<(ChannelId, UserId)>, update_channels_tx: mpsc::UnboundedSender, opened_buffers: HashMap>, @@ -85,7 +66,6 @@ pub struct ChannelState { observed_notes_version: NotesVersion, observed_chat_message: Option, role: Option, - projects: HashSet, } impl Channel { @@ -216,7 +196,6 @@ impl ChannelStore { channel_invitations: Vec::default(), channel_index: ChannelIndex::default(), channel_participants: Default::default(), - hosted_projects: Default::default(), outgoing_invites: Default::default(), opened_buffers: Default::default(), opened_chats: Default::default(), @@ -316,19 +295,6 @@ impl ChannelStore { self.channel_index.by_id().get(&channel_id) } - pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> { - let mut projects: Vec<(SharedString, ProjectId)> = self - .channel_states - .get(&channel_id) - .map(|state| state.projects.clone()) - .unwrap_or_default() - .into_iter() - .flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id))) - .collect(); - projects.sort(); - projects - } - pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool { if let Some(buffer) = self.opened_buffers.get(&channel_id) { if let OpenedModelHandle::Open(buffer) = buffer { @@ -1102,9 +1068,7 @@ impl ChannelStore { let channels_changed = !payload.channels.is_empty() || !payload.delete_channels.is_empty() || !payload.latest_channel_message_ids.is_empty() - || !payload.latest_channel_buffer_versions.is_empty() - || !payload.hosted_projects.is_empty() - || !payload.deleted_hosted_projects.is_empty(); + || !payload.latest_channel_buffer_versions.is_empty(); if channels_changed { if !payload.delete_channels.is_empty() { @@ -1161,34 +1125,6 @@ impl ChannelStore { .or_default() .update_latest_message_id(latest_channel_message.message_id); } - - for hosted_project in payload.hosted_projects { - let hosted_project: HostedProject = hosted_project.into(); - if let Some(old_project) = self - .hosted_projects - .insert(hosted_project.project_id, hosted_project.clone()) - { - self.channel_states - .entry(old_project.channel_id) - .or_default() - .remove_hosted_project(old_project.project_id); - } - self.channel_states - .entry(hosted_project.channel_id) - .or_default() - .add_hosted_project(hosted_project.project_id); - } - - for hosted_project_id in payload.deleted_hosted_projects { - let hosted_project_id = ProjectId(hosted_project_id); - - if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) { - self.channel_states - .entry(old_project.channel_id) - .or_default() - .remove_hosted_project(old_project.project_id); - } - } } cx.notify(); @@ -1295,12 +1231,4 @@ impl ChannelState { }; } } - - fn add_hosted_project(&mut self, project_id: ProjectId) { - self.projects.insert(project_id); - } - - fn remove_hosted_project(&mut self, project_id: ProjectId) { - self.projects.remove(&project_id); - } } diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index f6ee279dc83220..fab5687c418cf9 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -48,6 +48,7 @@ pub struct Collaborator { pub peer_id: proto::PeerId, pub replica_id: ReplicaId, pub user_id: UserId, + pub is_host: bool, } impl PartialOrd for User { @@ -824,6 +825,7 @@ impl Collaborator { peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?, replica_id: message.replica_id as ReplicaId, user_id: message.user_id as UserId, + is_host: message.is_host, }) } } diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index c6bd87a8a57156..c59091d66d0e27 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -52,9 +52,7 @@ CREATE TABLE "projects" ( "host_user_id" INTEGER REFERENCES users (id), "host_connection_id" INTEGER, "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, - "unregistered" BOOLEAN NOT NULL DEFAULT FALSE, - "hosted_project_id" INTEGER REFERENCES hosted_projects (id), - "dev_server_project_id" INTEGER REFERENCES dev_server_projects(id) + "unregistered" BOOLEAN NOT NULL DEFAULT FALSE ); CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id"); @@ -399,30 +397,6 @@ CREATE TABLE rate_buckets ( ); CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); -CREATE TABLE hosted_projects ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - channel_id INTEGER NOT NULL REFERENCES channels(id), - name TEXT NOT NULL, - visibility TEXT NOT NULL, - deleted_at TIMESTAMP NULL -); -CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id); -CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL); - -CREATE TABLE dev_servers ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL REFERENCES users(id), - name TEXT NOT NULL, - ssh_connection_string TEXT, - hashed_token TEXT NOT NULL -); - -CREATE TABLE dev_server_projects ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id), - paths TEXT NOT NULL -); - CREATE TABLE IF NOT EXISTS billing_preferences ( id INTEGER PRIMARY KEY AUTOINCREMENT, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, diff --git a/crates/collab/migrations/20241023201725_remove_dev_servers.sql b/crates/collab/migrations/20241023201725_remove_dev_servers.sql new file mode 100644 index 00000000000000..c5da673a29b1e0 --- /dev/null +++ b/crates/collab/migrations/20241023201725_remove_dev_servers.sql @@ -0,0 +1,6 @@ +ALTER TABLE projects DROP COLUMN dev_server_project_id; +ALTER TABLE projects DROP COLUMN hosted_project_id; + +DROP TABLE hosted_projects; +DROP TABLE dev_server_projects; +DROP TABLE dev_servers; diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 9c02e0c801c826..81db7158e83ab7 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -617,7 +617,6 @@ pub struct ChannelsForUser { pub channels: Vec, pub channel_memberships: Vec, pub channel_participants: HashMap>, - pub hosted_projects: Vec, pub invited_channels: Vec, pub observed_buffer_versions: Vec, @@ -741,6 +740,7 @@ impl ProjectCollaborator { peer_id: Some(self.connection_id.into()), replica_id: self.replica_id.0 as u32, user_id: self.user_id.to_proto(), + is_host: self.is_host, } } } diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 79523444ab2760..bfcd111e3f4861 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -10,7 +10,6 @@ pub mod contacts; pub mod contributors; pub mod embeddings; pub mod extensions; -pub mod hosted_projects; pub mod messages; pub mod notifications; pub mod processed_stripe_events; diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 06ad2b45946511..dee4d820e86ff7 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -116,6 +116,7 @@ impl Database { peer_id: Some(collaborator.connection().into()), user_id: collaborator.user_id.to_proto(), replica_id: collaborator.replica_id.0 as u32, + is_host: false, }) .collect(), }) @@ -222,6 +223,7 @@ impl Database { peer_id: Some(collaborator.connection().into()), user_id: collaborator.user_id.to_proto(), replica_id: collaborator.replica_id.0 as u32, + is_host: false, }) .collect(), }, @@ -257,6 +259,7 @@ impl Database { peer_id: Some(db_collaborator.connection().into()), replica_id: db_collaborator.replica_id.0 as u32, user_id: db_collaborator.user_id.to_proto(), + is_host: false, }) } else { collaborator_ids_to_remove.push(db_collaborator.id); @@ -385,6 +388,7 @@ impl Database { peer_id: Some(connection.into()), replica_id: row.replica_id.0 as u32, user_id: row.user_id.to_proto(), + is_host: false, }); } diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs index f9da0187fec7a0..10120ea8143010 100644 --- a/crates/collab/src/db/queries/channels.rs +++ b/crates/collab/src/db/queries/channels.rs @@ -615,15 +615,10 @@ impl Database { .observed_channel_messages(&channel_ids, user_id, tx) .await?; - let hosted_projects = self - .get_hosted_projects(&channel_ids, &roles_by_channel_id, tx) - .await?; - Ok(ChannelsForUser { channel_memberships, channels, invited_channels, - hosted_projects, channel_participants, latest_buffer_versions, latest_channel_messages, diff --git a/crates/collab/src/db/queries/hosted_projects.rs b/crates/collab/src/db/queries/hosted_projects.rs deleted file mode 100644 index eb38eaa9ccac9b..00000000000000 --- a/crates/collab/src/db/queries/hosted_projects.rs +++ /dev/null @@ -1,85 +0,0 @@ -use rpc::{proto, ErrorCode}; - -use super::*; - -impl Database { - pub async fn get_hosted_projects( - &self, - channel_ids: &[ChannelId], - roles: &HashMap, - tx: &DatabaseTransaction, - ) -> Result> { - let projects = hosted_project::Entity::find() - .find_also_related(project::Entity) - .filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0))) - .all(tx) - .await? - .into_iter() - .flat_map(|(hosted_project, project)| { - if hosted_project.deleted_at.is_some() { - return None; - } - match hosted_project.visibility { - ChannelVisibility::Public => {} - ChannelVisibility::Members => { - let is_visible = roles - .get(&hosted_project.channel_id) - .map(|role| role.can_see_all_descendants()) - .unwrap_or(false); - if !is_visible { - return None; - } - } - }; - Some(proto::HostedProject { - project_id: project?.id.to_proto(), - channel_id: hosted_project.channel_id.to_proto(), - name: hosted_project.name.clone(), - visibility: hosted_project.visibility.into(), - }) - }) - .collect(); - - Ok(projects) - } - - pub async fn get_hosted_project( - &self, - hosted_project_id: HostedProjectId, - user_id: UserId, - tx: &DatabaseTransaction, - ) -> Result<(hosted_project::Model, ChannelRole)> { - let project = hosted_project::Entity::find_by_id(hosted_project_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?; - let channel = channel::Entity::find_by_id(project.channel_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?; - - let role = match project.visibility { - ChannelVisibility::Public => { - self.check_user_is_channel_participant(&channel, user_id, tx) - .await? - } - ChannelVisibility::Members => { - self.check_user_is_channel_member(&channel, user_id, tx) - .await? - } - }; - - Ok((project, role)) - } - - pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result { - self.transaction(|tx| async move { - Ok(project::Entity::find_by_id(project_id) - .one(&*tx) - .await? - .map(|project| project.hosted_project_id.is_some()) - .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 27bec21ca1cddd..7ff8aa7a9fbb1f 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -68,7 +68,6 @@ impl Database { connection.owner_id as i32, ))), id: ActiveValue::NotSet, - hosted_project_id: ActiveValue::Set(None), } .insert(&*tx) .await?; @@ -536,39 +535,6 @@ impl Database { .await } - /// Adds the given connection to the specified hosted project - pub async fn join_hosted_project( - &self, - id: ProjectId, - user_id: UserId, - connection: ConnectionId, - ) -> Result<(Project, ReplicaId)> { - self.transaction(|tx| async move { - let (project, hosted_project) = project::Entity::find_by_id(id) - .find_also_related(hosted_project::Entity) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("hosted project is no longer shared"))?; - - let Some(hosted_project) = hosted_project else { - return Err(anyhow!("project is not hosted"))?; - }; - - let channel = channel::Entity::find_by_id(hosted_project.channel_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such channel"))?; - - let role = self - .check_user_is_channel_participant(&channel, user_id, &tx) - .await?; - - self.join_project_internal(project, user_id, connection, role, &tx) - .await - }) - .await - } - pub async fn get_project(&self, id: ProjectId) -> Result { self.transaction(|tx| async move { Ok(project::Entity::find_by_id(id) @@ -784,49 +750,6 @@ impl Database { Ok((project, replica_id as ReplicaId)) } - pub async fn leave_hosted_project( - &self, - project_id: ProjectId, - connection: ConnectionId, - ) -> Result { - self.transaction(|tx| async move { - let result = project_collaborator::Entity::delete_many() - .filter( - Condition::all() - .add(project_collaborator::Column::ProjectId.eq(project_id)) - .add(project_collaborator::Column::ConnectionId.eq(connection.id as i32)) - .add( - project_collaborator::Column::ConnectionServerId - .eq(connection.owner_id as i32), - ), - ) - .exec(&*tx) - .await?; - if result.rows_affected == 0 { - return Err(anyhow!("not in the project"))?; - } - - let project = project::Entity::find_by_id(project_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such project"))?; - let collaborators = project - .find_related(project_collaborator::Entity) - .all(&*tx) - .await?; - let connection_ids = collaborators - .into_iter() - .map(|collaborator| collaborator.connection()) - .collect(); - Ok(LeftProject { - id: project.id, - connection_ids, - should_unshare: false, - }) - }) - .await - } - /// Removes the given connection from the specified project. pub async fn leave_project( &self, diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index 23dced800b56ba..8a4ec29998ac86 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -18,7 +18,6 @@ pub mod extension; pub mod extension_version; pub mod feature_flag; pub mod follower; -pub mod hosted_project; pub mod language_server; pub mod notification; pub mod notification_kind; diff --git a/crates/collab/src/db/tables/hosted_project.rs b/crates/collab/src/db/tables/hosted_project.rs deleted file mode 100644 index dd7cb1b5b107f9..00000000000000 --- a/crates/collab/src/db/tables/hosted_project.rs +++ /dev/null @@ -1,27 +0,0 @@ -use crate::db::{ChannelId, ChannelVisibility, HostedProjectId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "hosted_projects")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: HostedProjectId, - pub channel_id: ChannelId, - pub name: String, - pub visibility: ChannelVisibility, - pub deleted_at: Option, -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_one = "super::project::Entity")] - Project, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Project.def() - } -} diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index a357634aff614c..10e3da50e1dd09 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -1,4 +1,4 @@ -use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId}; +use crate::db::{ProjectId, Result, RoomId, ServerId, UserId}; use anyhow::anyhow; use rpc::ConnectionId; use sea_orm::entity::prelude::*; @@ -12,7 +12,6 @@ pub struct Model { pub host_user_id: Option, pub host_connection_id: Option, pub host_connection_server_id: Option, - pub hosted_project_id: Option, } impl Model { @@ -50,12 +49,6 @@ pub enum Relation { Collaborators, #[sea_orm(has_many = "super::language_server::Entity")] LanguageServers, - #[sea_orm( - belongs_to = "super::hosted_project::Entity", - from = "Column::HostedProjectId", - to = "super::hosted_project::Column::Id" - )] - HostedProject, } impl Related for Entity { @@ -88,10 +81,4 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::HostedProject.def() - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index adc571580a0724..9575ed505b5b11 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -121,11 +121,13 @@ async fn test_channel_buffers(db: &Arc) { user_id: a_id.to_proto(), peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }), replica_id: 0, + is_host: false, }, rpc::proto::Collaborator { user_id: b_id.to_proto(), peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }), replica_id: 1, + is_host: false, } ] ); diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index cb3478879e1490..654327c4637ad2 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -449,6 +449,10 @@ async fn check_usage_limit( model_name: &str, claims: &LlmTokenClaims, ) -> Result<()> { + if claims.is_staff { + return Ok(()); + } + let model = state.db.model(provider, model_name)?; let usage = state .db @@ -513,11 +517,6 @@ async fn check_usage_limit( ]; for (used, limit, usage_measure) in checks { - // Temporarily bypass rate-limiting for staff members. - if claims.is_staff { - continue; - } - if used > limit { let resource = match usage_measure { UsageMeasure::RequestsPerMinute => "requests_per_minute", diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 90277242f1b1c6..f83bebbbb1f566 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -287,7 +287,6 @@ impl Server { .add_request_handler(share_project) .add_message_handler(unshare_project) .add_request_handler(join_project) - .add_request_handler(join_hosted_project) .add_message_handler(leave_project) .add_request_handler(update_project) .add_request_handler(update_worktree) @@ -308,6 +307,8 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler( forward_mutating_project_request::, @@ -1793,11 +1794,6 @@ impl JoinProjectInternalResponse for Response { Response::::send(self, result) } } -impl JoinProjectInternalResponse for Response { - fn send(self, result: proto::JoinProjectResponse) -> Result<()> { - Response::::send(self, result) - } -} fn join_project_internal( response: impl JoinProjectInternalResponse, @@ -1831,6 +1827,7 @@ fn join_project_internal( peer_id: Some(session.connection_id.into()), replica_id: replica_id.0 as u32, user_id: guest_user_id.to_proto(), + is_host: false, }), }; @@ -1921,11 +1918,6 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result let sender_id = session.connection_id; let project_id = ProjectId::from_proto(request.project_id); let db = session.db().await; - if db.is_hosted_project(project_id).await? { - let project = db.leave_hosted_project(project_id, sender_id).await?; - project_left(&project, &session); - return Ok(()); - } let (room, project) = &*db.leave_project(project_id, sender_id).await?; tracing::info!( @@ -1941,24 +1933,6 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result Ok(()) } -async fn join_hosted_project( - request: proto::JoinHostedProject, - response: Response, - session: Session, -) -> Result<()> { - let (mut project, replica_id) = session - .db() - .await - .join_hosted_project( - ProjectId(request.project_id as i32), - session.user_id(), - session.connection_id, - ) - .await?; - - join_project_internal(response, session, &mut project, &replica_id) -} - /// Updates other participants with changes to the project async fn update_project( request: proto::UpdateProject, @@ -4200,7 +4174,6 @@ fn build_channels_update(channels: ChannelsForUser) -> proto::UpdateChannels { update.channel_invitations.push(channel.to_proto()); } - update.hosted_projects = channels.hosted_projects; update } diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 2a3c643f6deeb5..beb1ef61ef9886 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1978,6 +1978,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA enabled: false, delay_ms: None, min_column: None, + show_commit_summary: false, }); cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 1367bf49c008e1..d708194f58396b 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1957,9 +1957,10 @@ async fn test_following_to_channel_notes_without_a_shared_project( }); channel_notes_1_b.update(cx_b, |notes, cx| { assert_eq!(notes.channel(cx).unwrap().name, "channel-1"); - let editor = notes.editor.read(cx); - assert_eq!(editor.text(cx), "Hello from A."); - assert_eq!(editor.selections.ranges::(cx), &[3..4]); + notes.editor.update(cx, |editor, cx| { + assert_eq!(editor.text(cx), "Hello from A."); + assert_eq!(editor.selections.ranges::(cx), &[3..4]); + }) }); // Client A opens the notes for channel 2. diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 80cc2500f5f4ca..b1e8e5686109a9 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -21,8 +21,8 @@ use language::{ language_settings::{ AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter, }, - tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig, - LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, + tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter, + Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, }; use live_kit_client::MacOSDisplay; use lsp::LanguageServerId; @@ -4461,7 +4461,7 @@ async fn test_prettier_formatting_buffer( }, ..Default::default() }, - Some(tree_sitter_rust::LANGUAGE.into()), + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), ))); let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "TypeScript", @@ -6575,3 +6575,95 @@ async fn test_context_collaboration_with_reconnect( assert!(context.buffer().read(cx).read_only()); }); } + +#[gpui::test] +async fn test_remote_git_branches( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/project", serde_json::json!({ ".git":{} })) + .await; + let branches = ["main", "dev", "feature-1"]; + client_a + .fs() + .insert_branches(Path::new("/project/.git"), &branches); + + let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let root_path = ProjectPath::root_path(worktree_id); + // Client A sees that a guest has joined. + executor.run_until_parked(); + + let branches_b = cx_b + .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx))) + .await + .unwrap(); + + let new_branch = branches[2]; + + let branches_b = branches_b + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&branches_b, &branches); + + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let host_branch = cx_a.update(|cx| { + project_a.update(cx, |project, cx| { + project.worktree_store().update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(host_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let host_branch = cx_a.update(|cx| { + project_a.update(cx, |project, cx| { + project.worktree_store().update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(host_branch.as_ref(), "totally-new-branch"); +} diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 0e13c88d9464ea..0e29bd5ef3c912 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -1,14 +1,27 @@ use crate::tests::TestServer; use call::ActiveCall; +use collections::HashSet; use fs::{FakeFs, Fs as _}; -use gpui::{Context as _, TestAppContext}; +use futures::StreamExt as _; +use gpui::{BackgroundExecutor, Context as _, TestAppContext, UpdateGlobal as _}; use http_client::BlockedHttpClient; -use language::{language_settings::language_settings, LanguageRegistry}; +use language::{ + language_settings::{ + language_settings, AllLanguageSettings, Formatter, FormatterList, PrettierSettings, + SelectedFormatter, + }, + tree_sitter_typescript, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, + LanguageRegistry, +}; use node_runtime::NodeRuntime; -use project::ProjectPath; +use project::{ + lsp_store::{FormatTarget, FormatTrigger}, + ProjectPath, +}; use remote::SshRemoteClient; use remote_server::{HeadlessAppState, HeadlessProject}; use serde_json::json; +use settings::SettingsStore; use std::{path::Path, sync::Arc}; #[gpui::test(iterations = 10)] @@ -174,3 +187,311 @@ async fn test_sharing_an_ssh_remote_project( ); }); } + +#[gpui::test] +async fn test_ssh_collaboration_git_branches( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + // Set up project on remote FS + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree("/project", serde_json::json!({ ".git":{} })) + .await; + + let branches = ["main", "dev", "feature-1"]; + remote_fs.insert_branches(Path::new("/project/.git"), &branches); + + // User A connects to the remote project via SSH. + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let node = NodeRuntime::unavailable(); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let headless_project = server_cx.new_model(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: node, + languages, + }, + cx, + ) + }); + + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; + let (project_a, worktree_id) = client_a + .build_ssh_project("/project", client_ssh, cx_a) + .await; + + // While the SSH worktree is being scanned, user A shares the remote project. + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // User B joins the project. + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + // Give client A sometime to see that B has joined, and that the headless server + // has some git repositories + executor.run_until_parked(); + + let root_path = ProjectPath::root_path(worktree_id); + + let branches_b = cx_b + .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx))) + .await + .unwrap(); + + let new_branch = branches[2]; + + let branches_b = branches_b + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&branches_b, &branches); + + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), "totally-new-branch"); +} + +#[gpui::test] +async fn test_ssh_collaboration_formatting_with_prettier( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + let buffer_text = "let one = \"two\""; + let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; + remote_fs + .insert_tree("/project", serde_json::json!({ "a.ts": buffer_text })) + .await; + + let test_plugin = "test_plugin"; + let ts_lang = Arc::new(Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..LanguageMatcher::default() + }, + ..LanguageConfig::default() + }, + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + )); + client_a.language_registry().add(ts_lang.clone()); + client_b.language_registry().add(ts_lang.clone()); + + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let mut fake_language_servers = languages.register_fake_lsp( + "TypeScript", + FakeLspAdapter { + prettier_plugins: vec![test_plugin], + ..Default::default() + }, + ); + + // User A connects to the remote project via SSH. + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let _headless_project = server_cx.new_model(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: NodeRuntime::unavailable(), + languages, + }, + cx, + ) + }); + + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; + let (project_a, worktree_id) = client_a + .build_ssh_project("/project", client_ssh, cx_a) + .await; + + // While the SSH worktree is being scanned, user A shares the remote project. + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // User B joins the project. + let project_b = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + // Opens the buffer and formats it + let buffer_b = project_b + .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)) + .await + .expect("user B opens buffer for formatting"); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(SelectedFormatter::Auto); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + cx_b.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(SelectedFormatter::List(FormatterList( + vec![Formatter::LanguageServer { name: None }].into(), + ))); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|_, _| async move { + panic!( + "Unexpected: prettier should be preferred since it's enabled and language supports it" + ) + }); + + project_b + .update(cx_b, |project, cx| { + project.format( + HashSet::from_iter([buffer_b.clone()]), + true, + FormatTrigger::Save, + FormatTarget::Buffer, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + buffer_text.to_string() + "\n" + prettier_format_suffix, + "Prettier formatting was not applied to client buffer after client's request" + ); + + // User A opens and formats the same buffer too + let buffer_a = project_a + .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)) + .await + .expect("user A opens buffer for formatting"); + + cx_a.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |file| { + file.defaults.formatter = Some(SelectedFormatter::Auto); + file.defaults.prettier = Some(PrettierSettings { + allowed: true, + ..PrettierSettings::default() + }); + }); + }); + }); + project_a + .update(cx_a, |project, cx| { + project.format( + HashSet::from_iter([buffer_a.clone()]), + true, + FormatTrigger::Manual, + FormatTarget::Buffer, + cx, + ) + }) + .await + .unwrap(); + + executor.run_until_parked(); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.text()), + buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix, + "Prettier formatting was not applied to client buffer after host's request" + ); +} diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 59f83e06548a6b..14cab63f636deb 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal; use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings}; use call::ActiveCall; use channel::{Channel, ChannelEvent, ChannelStore}; -use client::{ChannelId, Client, Contact, ProjectId, User, UserStore}; +use client::{ChannelId, Client, Contact, User, UserStore}; use contact_finder::ContactFinder; use db::kvp::KEY_VALUE_STORE; use editor::{Editor, EditorElement, EditorStyle}; @@ -182,10 +182,6 @@ enum ListEntry { ChannelEditor { depth: usize, }, - HostedProject { - id: ProjectId, - name: SharedString, - }, Contact { contact: Arc, calling: bool, @@ -566,7 +562,6 @@ impl CollabPanel { } } - let hosted_projects = channel_store.projects_for_id(channel.id); let has_children = channel_store .channel_at_index(mat.candidate_id + 1) .map_or(false, |next_channel| { @@ -600,10 +595,6 @@ impl CollabPanel { }); } } - - for (name, id) in hosted_projects { - self.entries.push(ListEntry::HostedProject { id, name }); - } } } @@ -1029,40 +1020,6 @@ impl CollabPanel { .tooltip(move |cx| Tooltip::text("Open Chat", cx)) } - fn render_channel_project( - &self, - id: ProjectId, - name: &SharedString, - is_selected: bool, - cx: &mut ViewContext, - ) -> impl IntoElement { - ListItem::new(ElementId::NamedInteger( - "channel-project".into(), - id.0 as usize, - )) - .indent_level(2) - .indent_step_size(px(20.)) - .selected(is_selected) - .on_click(cx.listener(move |this, _, cx| { - if let Some(workspace) = this.workspace.upgrade() { - let app_state = workspace.read(cx).app_state().clone(); - workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err( - "Failed to open project", - cx, - |_, _| None, - ) - } - })) - .start_slot( - h_flex() - .relative() - .gap_1() - .child(IconButton::new(0, IconName::FileTree)), - ) - .child(Label::new(name.clone())) - .tooltip(move |cx| Tooltip::text("Open Project", cx)) - } - fn has_subchannels(&self, ix: usize) -> bool { self.entries.get(ix).map_or(false, |entry| { if let ListEntry::Channel { has_children, .. } = entry { @@ -1538,12 +1495,6 @@ impl CollabPanel { ListEntry::ChannelChat { channel_id } => { self.join_channel_chat(*channel_id, cx) } - ListEntry::HostedProject { - id: _id, - name: _name, - } => { - // todo() - } ListEntry::OutgoingRequest(_) => {} ListEntry::ChannelEditor { .. } => {} } @@ -2157,10 +2108,6 @@ impl CollabPanel { ListEntry::ChannelChat { channel_id } => self .render_channel_chat(*channel_id, is_selected, cx) .into_any_element(), - - ListEntry::HostedProject { id, name } => self - .render_channel_project(*id, name, is_selected, cx) - .into_any_element(), } } @@ -2779,7 +2726,7 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::collapse_selected_channel)) .on_action(cx.listener(CollabPanel::expand_selected_channel)) .on_action(cx.listener(CollabPanel::start_move_selected_channel)) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .child(if self.user_store.read(cx).current_user().is_none() { self.render_signed_out(cx) @@ -2898,11 +2845,6 @@ impl PartialEq for ListEntry { return channel_1.id == channel_2.id; } } - ListEntry::HostedProject { id, .. } => { - if let ListEntry::HostedProject { id: other_id, .. } = other { - return id == other_id; - } - } ListEntry::ChannelNotes { channel_id } => { if let ListEntry::ChannelNotes { channel_id: other_id, diff --git a/crates/context_servers/src/protocol.rs b/crates/context_servers/src/protocol.rs index 80a7a7f991a23f..996fc34f462c5f 100644 --- a/crates/context_servers/src/protocol.rs +++ b/crates/context_servers/src/protocol.rs @@ -180,6 +180,39 @@ impl InitializedContextServerProtocol { Ok(completion) } + + /// List MCP tools. + pub async fn list_tools(&self) -> Result { + self.check_capability(ServerCapability::Tools)?; + + let response = self + .inner + .request::(types::RequestType::ListTools.as_str(), ()) + .await?; + + Ok(response) + } + + /// Executes a tool with the given arguments + pub async fn run_tool>( + &self, + tool: P, + arguments: Option>, + ) -> Result { + self.check_capability(ServerCapability::Tools)?; + + let params = types::CallToolParams { + name: tool.as_ref().to_string(), + arguments, + }; + + let response: types::CallToolResponse = self + .inner + .request(types::RequestType::CallTool.as_str(), params) + .await?; + + Ok(response) + } } impl InitializedContextServerProtocol { diff --git a/crates/context_servers/src/registry.rs b/crates/context_servers/src/registry.rs index 625f308c15228f..54901870349724 100644 --- a/crates/context_servers/src/registry.rs +++ b/crates/context_servers/src/registry.rs @@ -9,7 +9,8 @@ struct GlobalContextServerRegistry(Arc); impl Global for GlobalContextServerRegistry {} pub struct ContextServerRegistry { - registry: RwLock>>>, + command_registry: RwLock>>>, + tool_registry: RwLock>>>, } impl ContextServerRegistry { @@ -20,13 +21,14 @@ impl ContextServerRegistry { pub fn register(cx: &mut AppContext) { cx.set_global(GlobalContextServerRegistry(Arc::new( ContextServerRegistry { - registry: RwLock::new(HashMap::default()), + command_registry: RwLock::new(HashMap::default()), + tool_registry: RwLock::new(HashMap::default()), }, ))) } pub fn register_command(&self, server_id: String, command_name: &str) { - let mut registry = self.registry.write(); + let mut registry = self.command_registry.write(); registry .entry(server_id) .or_default() @@ -34,14 +36,34 @@ impl ContextServerRegistry { } pub fn unregister_command(&self, server_id: &str, command_name: &str) { - let mut registry = self.registry.write(); + let mut registry = self.command_registry.write(); if let Some(commands) = registry.get_mut(server_id) { commands.retain(|name| name.as_ref() != command_name); } } pub fn get_commands(&self, server_id: &str) -> Option>> { - let registry = self.registry.read(); + let registry = self.command_registry.read(); + registry.get(server_id).cloned() + } + + pub fn register_tool(&self, server_id: String, tool_name: &str) { + let mut registry = self.tool_registry.write(); + registry + .entry(server_id) + .or_default() + .push(tool_name.into()); + } + + pub fn unregister_tool(&self, server_id: &str, tool_name: &str) { + let mut registry = self.tool_registry.write(); + if let Some(tools) = registry.get_mut(server_id) { + tools.retain(|name| name.as_ref() != tool_name); + } + } + + pub fn get_tools(&self, server_id: &str) -> Option>> { + let registry = self.tool_registry.read(); registry.get(server_id).cloned() } } diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index 2bca0a021a1290..b6d8a958bb1264 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -16,6 +16,8 @@ pub enum RequestType { PromptsList, CompletionComplete, Ping, + ListTools, + ListResourceTemplates, } impl RequestType { @@ -32,6 +34,8 @@ impl RequestType { RequestType::PromptsList => "prompts/list", RequestType::CompletionComplete => "completion/complete", RequestType::Ping => "ping", + RequestType::ListTools => "tools/list", + RequestType::ListResourceTemplates => "resources/templates/list", } } } @@ -402,3 +406,17 @@ pub struct Completion { pub values: Vec, pub total: CompletionTotal, } + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CallToolResponse { + pub tool_result: serde_json::Value, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ListToolsResponse { + pub tools: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub next_cursor: Option, +} diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index da6b969b7222bb..d63710983b5a00 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -185,7 +185,7 @@ impl Render for CopilotCodeVerification { v_flex() .id("copilot code verification") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .elevation_3(cx) .w_96() .items_center() diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index cb6d07e9064610..cef634a41c8241 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -101,7 +101,7 @@ impl Render for ProjectDiagnosticsEditor { }; div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .when(self.path_states.is_empty(), |el| { el.key_context("EmptyPane") }) diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 72a4ac9bcfb01e..2c580c44def3f7 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -136,11 +136,12 @@ impl DiagnosticIndicator { } fn update(&mut self, editor: View, cx: &mut ViewContext) { - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx); - let cursor_position = editor.selections.newest::(cx).head(); + let (buffer, cursor_position) = editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let cursor_position = editor.selections.newest::(cx).head(); + (buffer, cursor_position) + }); let new_diagnostic = buffer - .snapshot(cx) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position, false) .filter(|entry| !entry.range.is_empty()) .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index ca9f264789b8e7..3d3a7834f2ac15 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -77,6 +77,7 @@ theme.workspace = true tree-sitter-html = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } +unicode-segmentation.workspace = true unindent = { workspace = true, optional = true } ui.workspace = true url.workspace = true diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index af8133fc78a904..18ea4db8384ed6 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -21,6 +21,7 @@ mod block_map; mod crease_map; mod fold_map; mod inlay_map; +pub(crate) mod invisibles; mod tab_map; mod wrap_map; @@ -42,6 +43,7 @@ use gpui::{ pub(crate) use inlay_map::Inlay; use inlay_map::{InlayMap, InlaySnapshot}; pub use inlay_map::{InlayOffset, InlayPoint}; +use invisibles::{is_invisible, replacement}; use language::{ language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point, Subscription as BufferSubscription, @@ -56,6 +58,7 @@ use std::{ any::TypeId, borrow::Cow, fmt::Debug, + iter, num::NonZeroU32, ops::{Add, Range, Sub}, sync::Arc, @@ -63,7 +66,8 @@ use std::{ use sum_tree::{Bias, TreeMap}; use tab_map::{TabMap, TabSnapshot}; use text::LineIndent; -use ui::WindowContext; +use ui::{div, px, IntoElement, ParentElement, SharedString, Styled, WindowContext}; +use unicode_segmentation::UnicodeSegmentation; use wrap_map::{WrapMap, WrapSnapshot}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -461,6 +465,98 @@ pub struct HighlightedChunk<'a> { pub renderer: Option, } +impl<'a> HighlightedChunk<'a> { + fn highlight_invisibles( + self, + editor_style: &'a EditorStyle, + ) -> impl Iterator + 'a { + let mut chars = self.text.chars().peekable(); + let mut text = self.text; + let style = self.style; + let is_tab = self.is_tab; + let renderer = self.renderer; + iter::from_fn(move || { + let mut prefix_len = 0; + while let Some(&ch) = chars.peek() { + if !is_invisible(ch) { + prefix_len += ch.len_utf8(); + chars.next(); + continue; + } + if prefix_len > 0 { + let (prefix, suffix) = text.split_at(prefix_len); + text = suffix; + return Some(HighlightedChunk { + text: prefix, + style, + is_tab, + renderer: renderer.clone(), + }); + } + chars.next(); + let (prefix, suffix) = text.split_at(ch.len_utf8()); + text = suffix; + if let Some(replacement) = replacement(ch) { + let background = editor_style.status.hint_background; + let underline = editor_style.status.hint; + return Some(HighlightedChunk { + text: prefix, + style: None, + is_tab: false, + renderer: Some(ChunkRenderer { + render: Arc::new(move |_| { + div() + .child(replacement) + .bg(background) + .text_decoration_1() + .text_decoration_color(underline) + .into_any_element() + }), + constrain_width: false, + }), + }); + } else { + let invisible_highlight = HighlightStyle { + background_color: Some(editor_style.status.hint_background), + underline: Some(UnderlineStyle { + color: Some(editor_style.status.hint), + thickness: px(1.), + wavy: false, + }), + ..Default::default() + }; + let invisible_style = if let Some(mut style) = style { + style.highlight(invisible_highlight); + style + } else { + invisible_highlight + }; + + return Some(HighlightedChunk { + text: prefix, + style: Some(invisible_style), + is_tab: false, + renderer: renderer.clone(), + }); + } + } + + if !text.is_empty() { + let remainder = text; + text = ""; + Some(HighlightedChunk { + text: remainder, + style, + is_tab, + renderer: renderer.clone(), + }) + } else { + None + } + }) + } +} + #[derive(Clone)] pub struct DisplaySnapshot { pub buffer_snapshot: MultiBufferSnapshot, @@ -686,7 +782,7 @@ impl DisplaySnapshot { suggestion: Some(editor_style.suggestions_style), }, ) - .map(|chunk| { + .flat_map(|chunk| { let mut highlight_style = chunk .syntax_highlight_id .and_then(|id| id.style(&editor_style.syntax)); @@ -729,6 +825,7 @@ impl DisplaySnapshot { is_tab: chunk.is_tab, renderer: chunk.renderer, } + .highlight_invisibles(editor_style) }) } @@ -795,12 +892,10 @@ impl DisplaySnapshot { layout_line.closest_index_for_x(x) as u32 } - pub fn display_chars_at( - &self, - mut point: DisplayPoint, - ) -> impl Iterator + '_ { + pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option { point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left)); - self.text_chunks(point.row()) + let chars = self + .text_chunks(point.row()) .flat_map(str::chars) .skip_while({ let mut column = 0; @@ -810,16 +905,24 @@ impl DisplaySnapshot { !at_point } }) - .map(move |ch| { - let result = (ch, point); - if ch == '\n' { - *point.row_mut() += 1; - *point.column_mut() = 0; - } else { - *point.column_mut() += ch.len_utf8() as u32; + .take_while({ + let mut prev = false; + move |char| { + let now = char.is_ascii(); + let end = char.is_ascii() && (char.is_ascii_whitespace() || prev); + prev = now; + !end } - result - }) + }); + chars.collect::().graphemes(true).next().map(|s| { + if let Some(invisible) = s.chars().next().filter(|&c| is_invisible(c)) { + replacement(invisible).unwrap_or(s).to_owned().into() + } else if s == "\n" { + " ".into() + } else { + s.to_owned().into() + } + }) } pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator + '_ { @@ -1168,16 +1271,21 @@ pub mod tests { use super::*; use crate::{movement, test::marked_display_snapshot}; use block_map::BlockPlacement; - use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla}; + use gpui::{ + div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla, Rgba, + }; use language::{ language_settings::{AllLanguageSettings, AllLanguageSettingsContent}, - Buffer, Language, LanguageConfig, LanguageMatcher, + Buffer, Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageConfig, + LanguageMatcher, }; + use lsp::LanguageServerId; use project::Project; use rand::{prelude::*, Rng}; use settings::SettingsStore; use smol::stream::StreamExt; use std::{env, sync::Arc}; + use text::PointUtf16; use theme::{LoadThemes, SyntaxTheme}; use unindent::Unindent as _; use util::test::{marked_text_ranges, sample_text}; @@ -1832,6 +1940,125 @@ pub mod tests { ); } + #[gpui::test] + async fn test_chunks_with_diagnostics_across_blocks(cx: &mut gpui::TestAppContext) { + cx.background_executor + .set_block_on_ticks(usize::MAX..=usize::MAX); + + let text = r#" + struct A { + b: usize; + } + const c: usize = 1; + "# + .unindent(); + + cx.update(|cx| init_test(cx, |_| {})); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx)); + + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics( + LanguageServerId(0), + DiagnosticSet::new( + [DiagnosticEntry { + range: PointUtf16::new(0, 0)..PointUtf16::new(2, 1), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + group_id: 1, + message: "hi".into(), + ..Default::default() + }, + }], + buffer, + ), + cx, + ) + }); + + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + + let map = cx.new_model(|cx| { + DisplayMap::new( + buffer, + font("Courier"), + px(16.0), + None, + true, + 1, + 1, + 0, + FoldPlaceholder::test(), + cx, + ) + }); + + let black = gpui::black().to_rgb(); + let red = gpui::red().to_rgb(); + + // Insert a block in the middle of a multi-line diagnostic. + map.update(cx, |map, cx| { + map.highlight_text( + TypeId::of::(), + vec![ + buffer_snapshot.anchor_before(Point::new(3, 9)) + ..buffer_snapshot.anchor_after(Point::new(3, 14)), + buffer_snapshot.anchor_before(Point::new(3, 17)) + ..buffer_snapshot.anchor_after(Point::new(3, 18)), + ], + red.into(), + ); + map.insert_blocks( + [BlockProperties { + placement: BlockPlacement::Below( + buffer_snapshot.anchor_before(Point::new(1, 0)), + ), + height: 1, + style: BlockStyle::Sticky, + render: Box::new(|_| div().into_any()), + priority: 0, + }], + cx, + ) + }); + + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + let mut chunks = Vec::<(String, Option, Rgba)>::new(); + for chunk in snapshot.chunks(DisplayRow(0)..DisplayRow(5), true, Default::default()) { + let color = chunk + .highlight_style + .and_then(|style| style.color) + .map_or(black, |color| color.to_rgb()); + if let Some((last_chunk, last_severity, last_color)) = chunks.last_mut() { + if *last_severity == chunk.diagnostic_severity && *last_color == color { + last_chunk.push_str(chunk.text); + continue; + } + } + + chunks.push((chunk.text.to_string(), chunk.diagnostic_severity, color)); + } + + assert_eq!( + chunks, + [ + ( + "struct A {\n b: usize;\n".into(), + Some(DiagnosticSeverity::ERROR), + black + ), + ("\n".into(), None, black), + ("}".into(), Some(DiagnosticSeverity::ERROR), black), + ("\nconst c: ".into(), None, black), + ("usize".into(), None, red), + (" = ".into(), None, black), + ("1".into(), None, red), + (";\n".into(), None, black), + ] + ); + } + // todo(linux) fails due to pixel differences in text rendering #[cfg(target_os = "macos")] #[gpui::test] diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index d4e39f2df9270e..673b9383bc58f3 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -255,6 +255,22 @@ impl<'a> InlayChunks<'a> { self.buffer_chunk = None; self.output_offset = new_range.start; self.max_output_offset = new_range.end; + + let mut highlight_endpoints = Vec::new(); + if let Some(text_highlights) = self.highlights.text_highlights { + if !text_highlights.is_empty() { + self.snapshot.apply_text_highlights( + &mut self.transforms, + &new_range, + text_highlights, + &mut highlight_endpoints, + ); + self.transforms.seek(&new_range.start, Bias::Right, &()); + highlight_endpoints.sort(); + } + } + self.highlight_endpoints = highlight_endpoints.into_iter().peekable(); + self.active_highlights.clear(); } pub fn offset(&self) -> InlayOffset { diff --git a/crates/editor/src/display_map/invisibles.rs b/crates/editor/src/display_map/invisibles.rs new file mode 100644 index 00000000000000..794b897603bd66 --- /dev/null +++ b/crates/editor/src/display_map/invisibles.rs @@ -0,0 +1,129 @@ +// Invisibility in a Unicode context is not well defined, so we have to guess. +// +// We highlight all ASCII control codes, and unicode whitespace because they are likely +// confused with an ASCII space in a programming context (U+0020). +// +// We also highlight the handful of blank non-space characters: +// U+2800 BRAILLE PATTERN BLANK - Category: So +// U+115F HANGUL CHOSEONG FILLER - Category: Lo +// U+1160 HANGUL CHOSEONG FILLER - Category: Lo +// U+3164 HANGUL FILLER - Category: Lo +// U+FFA0 HALFWIDTH HANGUL FILLER - Category: Lo +// U+FFFC OBJECT REPLACEMENT CHARACTER - Category: So +// +// For the rest of Unicode, invisibility happens for two reasons: +// * A Format character (like a byte order mark or right-to-left override) +// * An invisible Nonspacing Mark character (like U+034F, or variation selectors) +// +// We don't consider unassigned codepoints invisible as the font renderer already shows +// a replacement character in that case (and there are a *lot* of them) +// +// Control characters are mostly fine to highlight; except: +// * U+E0020..=U+E007F are used in emoji flags. We don't highlight them right now, but we could if we tightened our heuristics. +// * U+200D is used to join characters. We highlight this but don't replace it. As our font system ignores mid-glyph highlights this mostly works to highlight unexpected uses. +// +// Nonspacing marks are handled like U+200D. This means that mid-glyph we ignore them, but +// probably causes issues with end-of-glyph usage. +// +// ref: https://invisible-characters.com +// ref: https://www.compart.com/en/unicode/category/Cf +// ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1 +// ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt +// https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt +pub fn is_invisible(c: char) -> bool { + if c <= '\u{1f}' { + c != '\t' && c != '\n' && c != '\r' + } else if c >= '\u{7f}' { + c <= '\u{9f}' + || (c.is_whitespace() && c != IDEOGRAPHIC_SPACE) + || contains(c, &FORMAT) + || contains(c, &OTHER) + } else { + false + } +} +// ASCII control characters have fancy unicode glyphs, everything else +// is replaced by a space - unless it is used in combining characters in +// which case we need to leave it in the string. +pub(crate) fn replacement(c: char) -> Option<&'static str> { + if c <= '\x1f' { + Some(C0_SYMBOLS[c as usize]) + } else if c == '\x7f' { + Some(DEL) + } else if contains(c, &PRESERVE) { + None + } else { + Some("\u{2007}") // fixed width space + } +} +// IDEOGRAPHIC SPACE is common alongside Chinese and other wide character sets. +// We don't highlight this for now (as it already shows up wide in the editor), +// but could if we tracked state in the classifier. +const IDEOGRAPHIC_SPACE: char = '\u{3000}'; + +const C0_SYMBOLS: &'static [&'static str] = &[ + "␀", "␁", "␂", "␃", "␄", "␅", "␆", "␇", "␈", "␉", "␊", "␋", "␌", "␍", "␎", "␏", "␐", "␑", "␒", + "␓", "␔", "␕", "␖", "␗", "␘", "␙", "␚", "␛", "␜", "␝", "␞", "␟", +]; +const DEL: &'static str = "␡"; + +// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0 +pub const FORMAT: &'static [(char, char)] = &[ + ('\u{ad}', '\u{ad}'), + ('\u{600}', '\u{605}'), + ('\u{61c}', '\u{61c}'), + ('\u{6dd}', '\u{6dd}'), + ('\u{70f}', '\u{70f}'), + ('\u{890}', '\u{891}'), + ('\u{8e2}', '\u{8e2}'), + ('\u{180e}', '\u{180e}'), + ('\u{200b}', '\u{200f}'), + ('\u{202a}', '\u{202e}'), + ('\u{2060}', '\u{2064}'), + ('\u{2066}', '\u{206f}'), + ('\u{feff}', '\u{feff}'), + ('\u{fff9}', '\u{fffb}'), + ('\u{110bd}', '\u{110bd}'), + ('\u{110cd}', '\u{110cd}'), + ('\u{13430}', '\u{1343f}'), + ('\u{1bca0}', '\u{1bca3}'), + ('\u{1d173}', '\u{1d17a}'), + ('\u{e0001}', '\u{e0001}'), + ('\u{e0020}', '\u{e007f}'), +]; + +// hand-made base on https://invisible-characters.com (Excluding Cf) +pub const OTHER: &'static [(char, char)] = &[ + ('\u{034f}', '\u{034f}'), + ('\u{115F}', '\u{1160}'), + ('\u{17b4}', '\u{17b5}'), + ('\u{180b}', '\u{180d}'), + ('\u{2800}', '\u{2800}'), + ('\u{3164}', '\u{3164}'), + ('\u{fe00}', '\u{fe0d}'), + ('\u{ffa0}', '\u{ffa0}'), + ('\u{fffc}', '\u{fffc}'), + ('\u{e0100}', '\u{e01ef}'), +]; + +// a subset of FORMAT/OTHER that may appear within glyphs +const PRESERVE: &'static [(char, char)] = &[ + ('\u{034f}', '\u{034f}'), + ('\u{200d}', '\u{200d}'), + ('\u{17b4}', '\u{17b5}'), + ('\u{180b}', '\u{180d}'), + ('\u{e0061}', '\u{e007a}'), + ('\u{e007f}', '\u{e007f}'), +]; + +fn contains(c: char, list: &[(char, char)]) -> bool { + for (start, end) in list { + if c < *start { + return false; + } + if c <= *end { + return true; + } + } + false +} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 95126436488201..1962cf3fbde64c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -225,7 +225,6 @@ pub fn render_parsed_markdown( } }), ); - // hello let mut links = Vec::new(); let mut link_ranges = Vec::new(); @@ -3263,9 +3262,21 @@ impl Editor { } if enabled && pair.start.ends_with(text.as_ref()) { - bracket_pair = Some(pair.clone()); - is_bracket_pair_start = true; - break; + let prefix_len = pair.start.len() - text.len(); + let preceding_text_matches_prefix = prefix_len == 0 + || (selection.start.column >= (prefix_len as u32) + && snapshot.contains_str_at( + Point::new( + selection.start.row, + selection.start.column - (prefix_len as u32), + ), + &pair.start[..prefix_len], + )); + if preceding_text_matches_prefix { + bracket_pair = Some(pair.clone()); + is_bracket_pair_start = true; + break; + } } if pair.end.as_str() == text.as_ref() { bracket_pair = Some(pair.clone()); @@ -3282,8 +3293,6 @@ impl Editor { self.use_auto_surround && snapshot_settings.use_auto_surround; if selection.is_empty() { if is_bracket_pair_start { - let prefix_len = bracket_pair.start.len() - text.len(); - // If the inserted text is a suffix of an opening bracket and the // selection is preceded by the rest of the opening bracket, then // insert the closing bracket. @@ -3291,15 +3300,6 @@ impl Editor { .chars_at(selection.start) .next() .map_or(true, |c| scope.should_autoclose_before(c)); - let preceding_text_matches_prefix = prefix_len == 0 - || (selection.start.column >= (prefix_len as u32) - && snapshot.contains_str_at( - Point::new( - selection.start.row, - selection.start.column - (prefix_len as u32), - ), - &bracket_pair.start[..prefix_len], - )); let is_closing_quote = if bracket_pair.end == bracket_pair.start && bracket_pair.start.len() == 1 @@ -3318,7 +3318,6 @@ impl Editor { if autoclose && bracket_pair.close && following_text_allows_autoclose - && preceding_text_matches_prefix && !is_closing_quote { let anchor = snapshot.anchor_before(selection.end); @@ -3803,9 +3802,6 @@ impl Editor { pub fn newline_below(&mut self, _: &NewlineBelow, cx: &mut ViewContext) { let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); - // - // - // let mut edits = Vec::new(); let mut rows = Vec::new(); @@ -10006,8 +10002,8 @@ impl Editor { let Some(provider) = self.semantics_provider.clone() else { return Task::ready(Ok(Navigated::No)); }; - let buffer = self.buffer.read(cx); let head = self.selections.newest::(cx).head(); + let buffer = self.buffer.read(cx); let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) { text_anchor } else { @@ -10314,8 +10310,8 @@ impl Editor { _: &FindAllReferences, cx: &mut ViewContext, ) -> Option>> { - let multi_buffer = self.buffer.read(cx); let selection = self.selections.newest::(cx); + let multi_buffer = self.buffer.read(cx); let head = selection.head(); let multi_buffer_snapshot = multi_buffer.snapshot(cx); @@ -10722,8 +10718,9 @@ impl Editor { self.show_local_selections = true; if moving_cursor { - let rename_editor = rename.editor.read(cx); - let cursor_in_rename_editor = rename_editor.selections.newest::(cx).head(); + let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); // Update the selection to match the position of the selection inside // the rename editor. @@ -10837,7 +10834,7 @@ impl Editor { fn cancel_language_server_work( &mut self, - _: &CancelLanguageServerWork, + _: &actions::CancelLanguageServerWork, cx: &mut ViewContext, ) { if let Some(project) = self.project.clone() { @@ -11133,12 +11130,10 @@ impl Editor { let nested_start_row = foldable_range.0.start.row + 1; let nested_end_row = foldable_range.0.end.row; - if current_level == fold_at_level { - fold_ranges.push(foldable_range); - } - - if current_level <= fold_at_level { + if current_level < fold_at_level { stack.push((nested_start_row, nested_end_row, current_level + 1)); + } else if current_level == fold_at_level { + fold_ranges.push(foldable_range); } start_row = nested_end_row + 1; @@ -11996,9 +11991,9 @@ impl Editor { } pub fn copy_file_location(&mut self, _: &CopyFileLocation, cx: &mut ViewContext) { + let selection = self.selections.newest::(cx).start.row + 1; if let Some(file) = self.target_file(cx) { if let Some(path) = file.path().to_str() { - let selection = self.selections.newest::(cx).start.row + 1; cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); } } @@ -12774,9 +12769,10 @@ impl Editor { return; }; + let selections = self.selections.all::(cx); let buffer = self.buffer.read(cx); let mut new_selections_by_buffer = HashMap::default(); - for selection in self.selections.all::(cx) { + for selection in selections { for (buffer, range, _) in buffer.range_to_buffer_ranges(selection.start..selection.end, cx) { @@ -12821,6 +12817,7 @@ impl Editor { } fn open_excerpts_common(&mut self, split: bool, cx: &mut ViewContext) { + let selections = self.selections.all::(cx); let buffer = self.buffer.read(cx); if buffer.is_singleton() { cx.propagate(); @@ -12833,7 +12830,7 @@ impl Editor { }; let mut new_selections_by_buffer = HashMap::default(); - for selection in self.selections.all::(cx) { + for selection in selections { for (mut buffer_handle, mut range, _) in buffer.range_to_buffer_ranges(selection.range(), cx) { @@ -12949,7 +12946,7 @@ impl Editor { fn selection_replacement_ranges( &self, range: Range, - cx: &AppContext, + cx: &mut AppContext, ) -> Vec> { let selections = self.selections.all::(cx); let newest_selection = selections @@ -14592,7 +14589,7 @@ pub fn diagnostic_block_renderer( .relative() .size_full() .pl(cx.gutter_dimensions.width) - .w(cx.max_width + cx.gutter_dimensions.width) + .w(cx.max_width - cx.gutter_dimensions.full_width()) .child( div() .flex() diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 99b5cb663789b2..d56b22b4542085 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -1080,6 +1080,112 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_fold_at_level(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple( + &" + class Foo: + # Hello! + + def a(): + print(1) + + def b(): + print(2) + + + class Bar: + # World! + + def a(): + print(1) + + def b(): + print(2) + + + " + .unindent(), + cx, + ); + build_editor(buffer.clone(), cx) + }); + + _ = view.update(cx, |view, cx| { + view.fold_at_level(&FoldAtLevel { level: 2 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo: + # Hello! + + def a():⋯ + + def b():⋯ + + + class Bar: + # World! + + def a():⋯ + + def b():⋯ + + + " + .unindent(), + ); + + view.fold_at_level(&FoldAtLevel { level: 1 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo:⋯ + + + class Bar:⋯ + + + " + .unindent(), + ); + + view.unfold_all(&UnfoldAll, cx); + view.fold_at_level(&FoldAtLevel { level: 0 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo: + # Hello! + + def a(): + print(1) + + def b(): + print(2) + + + class Bar: + # World! + + def a(): + print(1) + + def b(): + print(2) + + + " + .unindent(), + ); + + assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text()); + }); +} + #[gpui::test] fn test_move_cursor(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 03c93c92358606..2d87cd4a3a7794 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -69,6 +69,7 @@ use sum_tree::Bias; use theme::{ActiveTheme, Appearance, PlayerColor}; use ui::prelude::*; use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip}; +use unicode_segmentation::UnicodeSegmentation; use util::RangeExt; use util::ResultExt; use workspace::{item::Item, Workspace}; @@ -836,129 +837,131 @@ impl EditorElement { let mut selections: Vec<(PlayerColor, Vec)> = Vec::new(); let mut active_rows = BTreeMap::new(); let mut newest_selection_head = None; - let editor = self.editor.read(cx); - - if editor.show_local_selections { - let mut local_selections: Vec> = editor - .selections - .disjoint_in_range(start_anchor..end_anchor, cx); - local_selections.extend(editor.selections.pending(cx)); - let mut layouts = Vec::new(); - let newest = editor.selections.newest(cx); - for selection in local_selections.drain(..) { - let is_empty = selection.start == selection.end; - let is_newest = selection == newest; - - let layout = SelectionLayout::new( - selection, - editor.selections.line_mode, - editor.cursor_shape, - &snapshot.display_snapshot, - is_newest, - editor.leader_peer_id.is_none(), - None, - ); - if is_newest { - newest_selection_head = Some(layout.head); - } + self.editor.update(cx, |editor, cx| { + if editor.show_local_selections { + let mut local_selections: Vec> = editor + .selections + .disjoint_in_range(start_anchor..end_anchor, cx); + local_selections.extend(editor.selections.pending(cx)); + let mut layouts = Vec::new(); + let newest = editor.selections.newest(cx); + for selection in local_selections.drain(..) { + let is_empty = selection.start == selection.end; + let is_newest = selection == newest; + + let layout = SelectionLayout::new( + selection, + editor.selections.line_mode, + editor.cursor_shape, + &snapshot.display_snapshot, + is_newest, + editor.leader_peer_id.is_none(), + None, + ); + if is_newest { + newest_selection_head = Some(layout.head); + } - for row in cmp::max(layout.active_rows.start.0, start_row.0) - ..=cmp::min(layout.active_rows.end.0, end_row.0) - { - let contains_non_empty_selection = - active_rows.entry(DisplayRow(row)).or_insert(!is_empty); - *contains_non_empty_selection |= !is_empty; + for row in cmp::max(layout.active_rows.start.0, start_row.0) + ..=cmp::min(layout.active_rows.end.0, end_row.0) + { + let contains_non_empty_selection = + active_rows.entry(DisplayRow(row)).or_insert(!is_empty); + *contains_non_empty_selection |= !is_empty; + } + layouts.push(layout); } - layouts.push(layout); - } - let player = if editor.read_only(cx) { - cx.theme().players().read_only() - } else { - self.style.local_player - }; + let player = if editor.read_only(cx) { + cx.theme().players().read_only() + } else { + self.style.local_player + }; - selections.push((player, layouts)); - } + selections.push((player, layouts)); + } - if let Some(collaboration_hub) = &editor.collaboration_hub { - // When following someone, render the local selections in their color. - if let Some(leader_id) = editor.leader_peer_id { - if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id) { - if let Some(participant_index) = collaboration_hub - .user_participant_indices(cx) - .get(&collaborator.user_id) + if let Some(collaboration_hub) = &editor.collaboration_hub { + // When following someone, render the local selections in their color. + if let Some(leader_id) = editor.leader_peer_id { + if let Some(collaborator) = collaboration_hub.collaborators(cx).get(&leader_id) { - if let Some((local_selection_style, _)) = selections.first_mut() { - *local_selection_style = cx - .theme() - .players() - .color_for_participant(participant_index.0); + if let Some(participant_index) = collaboration_hub + .user_participant_indices(cx) + .get(&collaborator.user_id) + { + if let Some((local_selection_style, _)) = selections.first_mut() { + *local_selection_style = cx + .theme() + .players() + .color_for_participant(participant_index.0); + } } } } - } - let mut remote_selections = HashMap::default(); - for selection in snapshot.remote_selections_in_range( - &(start_anchor..end_anchor), - collaboration_hub.as_ref(), - cx, - ) { - let selection_style = Self::get_participant_color(selection.participant_index, cx); + let mut remote_selections = HashMap::default(); + for selection in snapshot.remote_selections_in_range( + &(start_anchor..end_anchor), + collaboration_hub.as_ref(), + cx, + ) { + let selection_style = + Self::get_participant_color(selection.participant_index, cx); - // Don't re-render the leader's selections, since the local selections - // match theirs. - if Some(selection.peer_id) == editor.leader_peer_id { - continue; + // Don't re-render the leader's selections, since the local selections + // match theirs. + if Some(selection.peer_id) == editor.leader_peer_id { + continue; + } + let key = HoveredCursor { + replica_id: selection.replica_id, + selection_id: selection.selection.id, + }; + + let is_shown = + editor.show_cursor_names || editor.hovered_cursors.contains_key(&key); + + remote_selections + .entry(selection.replica_id) + .or_insert((selection_style, Vec::new())) + .1 + .push(SelectionLayout::new( + selection.selection, + selection.line_mode, + selection.cursor_shape, + &snapshot.display_snapshot, + false, + false, + if is_shown { selection.user_name } else { None }, + )); } - let key = HoveredCursor { - replica_id: selection.replica_id, - selection_id: selection.selection.id, - }; - let is_shown = - editor.show_cursor_names || editor.hovered_cursors.contains_key(&key); - - remote_selections - .entry(selection.replica_id) - .or_insert((selection_style, Vec::new())) - .1 - .push(SelectionLayout::new( - selection.selection, - selection.line_mode, - selection.cursor_shape, - &snapshot.display_snapshot, - false, - false, - if is_shown { selection.user_name } else { None }, - )); + selections.extend(remote_selections.into_values()); + } else if !editor.is_focused(cx) && editor.show_cursor_when_unfocused { + let player = if editor.read_only(cx) { + cx.theme().players().read_only() + } else { + self.style.local_player + }; + let layouts = snapshot + .buffer_snapshot + .selections_in_range(&(start_anchor..end_anchor), true) + .map(move |(_, line_mode, cursor_shape, selection)| { + SelectionLayout::new( + selection, + line_mode, + cursor_shape, + &snapshot.display_snapshot, + false, + false, + None, + ) + }) + .collect::>(); + selections.push((player, layouts)); } - - selections.extend(remote_selections.into_values()); - } else if !editor.is_focused(cx) && editor.show_cursor_when_unfocused { - let player = if editor.read_only(cx) { - cx.theme().players().read_only() - } else { - self.style.local_player - }; - let layouts = snapshot - .buffer_snapshot - .selections_in_range(&(start_anchor..end_anchor), true) - .map(move |(_, line_mode, cursor_shape, selection)| { - SelectionLayout::new( - selection, - line_mode, - cursor_shape, - &snapshot.display_snapshot, - false, - false, - None, - ) - }) - .collect::>(); - selections.push((player, layouts)); - } + }); (selections, active_rows, newest_selection_head) } @@ -1040,24 +1043,17 @@ impl EditorElement { } let block_text = if let CursorShape::Block = selection.cursor_shape { snapshot - .display_chars_at(cursor_position) - .next() + .grapheme_at(cursor_position) .or_else(|| { if cursor_column == 0 { - snapshot - .placeholder_text() - .and_then(|s| s.chars().next()) - .map(|c| (c, cursor_position)) + snapshot.placeholder_text().and_then(|s| { + s.graphemes(true).next().map(|s| s.to_string().into()) + }) } else { None } }) - .and_then(|(character, _)| { - let text = if character == '\n' { - SharedString::from(" ") - } else { - SharedString::from(character.to_string()) - }; + .and_then(|text| { let len = text.len(); let font = cursor_row_layout @@ -1939,23 +1935,25 @@ impl EditorElement { return Vec::new(); } - let editor = self.editor.read(cx); - let newest_selection_head = newest_selection_head.unwrap_or_else(|| { - let newest = editor.selections.newest::(cx); - SelectionLayout::new( - newest, - editor.selections.line_mode, - editor.cursor_shape, - &snapshot.display_snapshot, - true, - true, - None, - ) - .head + let (newest_selection_head, is_relative) = self.editor.update(cx, |editor, cx| { + let newest_selection_head = newest_selection_head.unwrap_or_else(|| { + let newest = editor.selections.newest::(cx); + SelectionLayout::new( + newest, + editor.selections.line_mode, + editor.cursor_shape, + &snapshot.display_snapshot, + true, + true, + None, + ) + .head + }); + let is_relative = editor.should_use_relative_line_numbers(cx); + (newest_selection_head, is_relative) }); let font_size = self.style.text.font_size.to_pixels(cx.rem_size()); - let is_relative = editor.should_use_relative_line_numbers(cx); let relative_to = if is_relative { Some(newest_selection_head.row()) } else { @@ -4250,7 +4248,16 @@ fn render_inline_blame_entry( let relative_timestamp = blame_entry_relative_timestamp(&blame_entry); let author = blame_entry.author.as_deref().unwrap_or_default(); - let text = format!("{}, {}", author, relative_timestamp); + let summary_enabled = ProjectSettings::get_global(cx) + .git + .show_inline_commit_summary(); + + let text = match blame_entry.summary.as_ref() { + Some(summary) if summary_enabled => { + format!("{}, {} - {}", author, relative_timestamp, summary) + } + _ => format!("{}, {}", author, relative_timestamp), + }; let details = blame.read(cx).details_for_entry(&blame_entry); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 1ac134530532c0..9dfc379ae70eda 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -368,12 +368,15 @@ impl GitBlame { .spawn({ let snapshot = snapshot.clone(); async move { - let Blame { + let Some(Blame { entries, permalinks, messages, remote_url, - } = blame.await?; + }) = blame.await? + else { + return Ok(None); + }; let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row); let commit_details = parse_commit_messages( @@ -385,13 +388,16 @@ impl GitBlame { ) .await; - anyhow::Ok((entries, commit_details)) + anyhow::Ok(Some((entries, commit_details))) } }) .await; this.update(&mut cx, |this, cx| match result { - Ok((entries, commit_details)) => { + Ok(None) => { + // Nothing to do, e.g. no repository found + } + Ok(Some((entries, commit_details))) => { this.buffer_edits = buffer_edits; this.buffer_snapshot = snapshot; this.entries = entries; @@ -410,11 +416,7 @@ impl GitBlame { } else { // If we weren't triggered by a user, we just log errors in the background, instead of sending // notifications. - // Except for `NoRepositoryError`, which can happen often if a user has inline-blame turned on - // and opens a non-git file. - if error.downcast_ref::().is_none() { - log::error!("failed to get git blame data: {error:?}"); - } + log::error!("failed to get git blame data: {error:?}"); } }), }) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 4a636f673abb7d..31be9e93a94807 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -706,10 +706,11 @@ pub(crate) async fn find_file( ) -> Option { project .update(cx, |project, cx| { - project.resolve_existing_file_path(&candidate_file_path, buffer, cx) + project.resolve_path_in_buffer(&candidate_file_path, buffer, cx) }) .ok()? .await + .filter(|s| s.is_file()) } if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await { @@ -1612,4 +1613,46 @@ mod tests { assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs"); }); } + + #[gpui::test] + async fn test_hover_directories(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + // Insert a new file + let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone()); + fs.as_fake() + .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) + .await; + + cx.set_state(indoc! {" + You can't open ../diˇr because it's a directory. + "}); + + // File does not exist + let screen_coord = cx.pixel_position(indoc! {" + You can't open ../diˇr because it's a directory. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + + // No highlight + cx.update_editor(|editor, cx| { + assert!(editor + .snapshot(cx) + .text_highlight_ranges::() + .unwrap_or_default() + .1 + .is_empty()); + }); + + // Does not open the directory + cx.simulate_click(screen_coord, Modifiers::secondary_key()); + cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 1)); + } } diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 9200dd7b8c697c..fb198c837c3fa5 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1,5 +1,5 @@ use crate::{ - display_map::{InlayOffset, ToDisplayPoint}, + display_map::{invisibles::is_invisible, InlayOffset, ToDisplayPoint}, hover_links::{InlayHighlight, RangeInEditor}, scroll::ScrollAmount, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot, @@ -11,7 +11,7 @@ use gpui::{ StyleRefinement, Styled, Task, TextStyleRefinement, View, ViewContext, }; use itertools::Itertools; -use language::{DiagnosticEntry, Language, LanguageRegistry}; +use language::{Diagnostic, DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{Markdown, MarkdownStyle}; use multi_buffer::ToOffset; @@ -259,7 +259,7 @@ fn show_hover( } // If there's a diagnostic, assign it on the hover state and notify - let local_diagnostic = snapshot + let mut local_diagnostic = snapshot .buffer_snapshot .diagnostics_in_range::<_, usize>(anchor..anchor, false) // Find the entry with the most specific range @@ -280,6 +280,41 @@ fn show_hover( range: entry.range.to_anchors(&snapshot.buffer_snapshot), }) }); + if let Some(invisible) = snapshot + .buffer_snapshot + .chars_at(anchor) + .next() + .filter(|&c| is_invisible(c)) + { + let after = snapshot.buffer_snapshot.anchor_after( + anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(), + ); + local_diagnostic = Some(DiagnosticEntry { + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: format!("Unicode character U+{:02X}", invisible as u32), + ..Default::default() + }, + range: anchor..after, + }) + } else if let Some(invisible) = snapshot + .buffer_snapshot + .reversed_chars_at(anchor) + .next() + .filter(|&c| is_invisible(c)) + { + let before = snapshot.buffer_snapshot.anchor_before( + anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(), + ); + local_diagnostic = Some(DiagnosticEntry { + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: format!("Unicode character U+{:02X}", invisible as u32), + ..Default::default() + }, + range: before..anchor, + }) + } let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic { let text = match local_diagnostic.diagnostic.source { diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index d3e40021737194..853f014ddb4f92 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -41,9 +41,9 @@ pub(super) fn refresh_linked_ranges(this: &mut Editor, cx: &mut ViewContext(cx); let buffer = this.buffer.read(cx); let mut applicable_selections = vec![]; - let selections = this.selections.all::(cx); let snapshot = buffer.snapshot(cx); for selection in selections { let cursor_position = selection.head(); diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index c85e60fdaa92e5..8e1c12b8cd5334 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -8,14 +8,14 @@ use std::{ use collections::HashMap; use gpui::{AppContext, Model, Pixels}; use itertools::Itertools; -use language::{Bias, Point, Selection, SelectionGoal, TextDimension, ToPoint}; +use language::{Bias, Point, Selection, SelectionGoal, TextDimension}; use util::post_inc; use crate::{ display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint}, movement::TextLayoutDetails, Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode, - ToOffset, + ToOffset, ToPoint, }; #[derive(Debug, Clone)] @@ -96,7 +96,7 @@ impl SelectionsCollection { pub fn pending>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Option> { self.pending_anchor() .as_ref() @@ -107,7 +107,7 @@ impl SelectionsCollection { self.pending.as_ref().map(|pending| pending.mode.clone()) } - pub fn all<'a, D>(&self, cx: &AppContext) -> Vec> + pub fn all<'a, D>(&self, cx: &mut AppContext) -> Vec> where D: 'a + TextDimension + Ord + Sub, { @@ -194,7 +194,7 @@ impl SelectionsCollection { pub fn disjoint_in_range<'a, D>( &self, range: Range, - cx: &AppContext, + cx: &mut AppContext, ) -> Vec> where D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, @@ -239,9 +239,10 @@ impl SelectionsCollection { pub fn newest>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { - resolve(self.newest_anchor(), &self.buffer(cx)) + let buffer = self.buffer(cx); + self.newest_anchor().map(|p| p.summary::(&buffer)) } pub fn newest_display(&self, cx: &mut AppContext) -> Selection { @@ -262,9 +263,10 @@ impl SelectionsCollection { pub fn oldest>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { - resolve(self.oldest_anchor(), &self.buffer(cx)) + let buffer = self.buffer(cx); + self.oldest_anchor().map(|p| p.summary::(&buffer)) } pub fn first_anchor(&self) -> Selection { @@ -276,14 +278,14 @@ impl SelectionsCollection { pub fn first>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { self.all(cx).first().unwrap().clone() } pub fn last>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Selection { self.all(cx).last().unwrap().clone() } @@ -298,7 +300,7 @@ impl SelectionsCollection { #[cfg(any(test, feature = "test-support"))] pub fn ranges + std::fmt::Debug>( &self, - cx: &AppContext, + cx: &mut AppContext, ) -> Vec> { self.all::(cx) .iter() @@ -475,7 +477,7 @@ impl<'a> MutableSelectionsCollection<'a> { where T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub + std::marker::Copy, { - let mut selections = self.all(self.cx); + let mut selections = self.collection.all(self.cx); let mut start = range.start.to_offset(&self.buffer()); let mut end = range.end.to_offset(&self.buffer()); let reversed = if start > end { @@ -649,6 +651,7 @@ impl<'a> MutableSelectionsCollection<'a> { let mut changed = false; let display_map = self.display_map(); let selections = self + .collection .all::(self.cx) .into_iter() .map(|selection| { @@ -676,6 +679,7 @@ impl<'a> MutableSelectionsCollection<'a> { let mut changed = false; let snapshot = self.buffer().clone(); let selections = self + .collection .all::(self.cx) .into_iter() .map(|selection| { @@ -869,10 +873,3 @@ where goal: s.goal, }) } - -fn resolve>( - selection: &Selection, - buffer: &MultiBufferSnapshot, -) -> Selection { - selection.map(|p| p.summary::(buffer)) -} diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 7234d97c5b77e4..de5065d2656d35 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -17,6 +17,7 @@ use project::{FakeFs, Project}; use std::{ any::TypeId, ops::{Deref, DerefMut, Range}, + path::Path, sync::{ atomic::{AtomicUsize, Ordering}, Arc, @@ -42,17 +43,18 @@ impl EditorTestContext { pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext { let fs = FakeFs::new(cx.executor()); // fs.insert_file("/file", "".to_owned()).await; + let root = Self::root_path(); fs.insert_tree( - "/root", + root, serde_json::json!({ "file": "", }), ) .await; - let project = Project::test(fs, ["/root".as_ref()], cx).await; + let project = Project::test(fs, [root], cx).await; let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/root/file", cx) + project.open_local_buffer(root.join("file"), cx) }) .await .unwrap(); @@ -71,6 +73,16 @@ impl EditorTestContext { } } + #[cfg(target_os = "windows")] + fn root_path() -> &'static Path { + Path::new("C:\\root") + } + + #[cfg(not(target_os = "windows"))] + fn root_path() -> &'static Path { + Path::new("/root") + } + pub async fn for_editor(editor: WindowHandle, cx: &mut gpui::TestAppContext) -> Self { let editor_view = editor.root_view(cx).unwrap(); Self { diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index 25179acec69ed0..1557ef21530148 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -8,7 +8,8 @@ use collections::HashMap; use futures::{Future, FutureExt}; use gpui::AsyncAppContext; use language::{ - CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate, + CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter, + LspAdapterDelegate, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use serde::Serialize; @@ -194,6 +195,7 @@ impl LspAdapter for ExtensionLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, _cx: &mut AsyncAppContext, ) -> Result { let delegate = delegate.clone(); diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 535d68326f9c3e..0a9299a8be8188 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -37,7 +37,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use indexed_docs::{IndexedDocsRegistry, ProviderId}; use language::{ LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry, - QUERY_FILENAME_PREFIXES, + LoadedLanguage, QUERY_FILENAME_PREFIXES, }; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -1102,14 +1102,21 @@ impl ExtensionStore { let config = std::fs::read_to_string(language_path.join("config.toml"))?; let config: LanguageConfig = ::toml::from_str(&config)?; let queries = load_plugin_queries(&language_path); - let tasks = std::fs::read_to_string(language_path.join("tasks.json")) - .ok() - .and_then(|contents| { - let definitions = serde_json_lenient::from_str(&contents).log_err()?; - Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) - }); - - Ok((config, queries, tasks)) + let context_provider = + std::fs::read_to_string(language_path.join("tasks.json")) + .ok() + .and_then(|contents| { + let definitions = + serde_json_lenient::from_str(&contents).log_err()?; + Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) + }); + + Ok(LoadedLanguage { + config, + queries, + context_provider, + toolchain_provider: None, + }) }, ); } diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index fb4e192023d914..286acdfc98e6cc 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -59,6 +59,12 @@ impl FeatureFlag for ZedPro { const NAME: &'static str = "zed-pro"; } +pub struct NotebookFeatureFlag; + +impl FeatureFlag for NotebookFeatureFlag { + const NAME: &'static str = "notebooks"; +} + pub struct AutoCommand {} impl FeatureFlag for AutoCommand { const NAME: &'static str = "auto-command"; diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 299b129d82a90d..ce0e3850576443 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -790,9 +790,9 @@ impl FileFinderDelegate { let mut path_matches = Vec::new(); let abs_file_exists = if let Ok(task) = project.update(&mut cx, |this, cx| { - this.abs_file_path_exists(query.path_query(), cx) + this.resolve_abs_file_path(query.path_query(), cx) }) { - task.await + task.await.is_some() } else { false }; diff --git a/crates/file_finder/src/new_path_prompt.rs b/crates/file_finder/src/new_path_prompt.rs index e992dd315fa729..d4492857b4958f 100644 --- a/crates/file_finder/src/new_path_prompt.rs +++ b/crates/file_finder/src/new_path_prompt.rs @@ -4,7 +4,7 @@ use gpui::{HighlightStyle, Model, StyledText}; use picker::{Picker, PickerDelegate}; use project::{Entry, PathMatchCandidateSet, Project, ProjectPath, WorktreeId}; use std::{ - path::PathBuf, + path::{Path, PathBuf}, sync::{ atomic::{self, AtomicBool}, Arc, @@ -254,6 +254,7 @@ impl PickerDelegate for NewPathDelegate { .trim() .trim_start_matches("./") .trim_start_matches('/'); + let (dir, suffix) = if let Some(index) = query.rfind('/') { let suffix = if index + 1 < query.len() { Some(query[index + 1..].to_string()) @@ -317,6 +318,14 @@ impl PickerDelegate for NewPathDelegate { }) } + fn confirm_completion( + &mut self, + _: String, + cx: &mut ViewContext>, + ) -> Option { + self.confirm_update_query(cx) + } + fn confirm_update_query(&mut self, cx: &mut ViewContext>) -> Option { let m = self.matches.get(self.selected_index)?; if m.is_dir(self.project.read(cx), cx) { @@ -422,7 +431,32 @@ impl NewPathDelegate { ) { cx.notify(); if query.is_empty() { - self.matches = vec![]; + self.matches = self + .project + .read(cx) + .worktrees(cx) + .flat_map(|worktree| { + let worktree_id = worktree.read(cx).id(); + worktree + .read(cx) + .child_entries(Path::new("")) + .filter_map(move |entry| { + entry.is_dir().then(|| Match { + path_match: Some(PathMatch { + score: 1.0, + positions: Default::default(), + worktree_id: worktree_id.to_usize(), + path: entry.path.clone(), + path_prefix: "".into(), + is_dir: entry.is_dir(), + distance_to_relative_ancestor: 0, + }), + suffix: None, + }) + }) + }) + .collect(); + return; } diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 0736d4189b7197..be1e91b482fc93 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -220,7 +220,11 @@ impl PickerDelegate for OpenPathDelegate { }) } - fn confirm_completion(&self, query: String) -> Option { + fn confirm_completion( + &mut self, + query: String, + _: &mut ViewContext>, + ) -> Option { Some( maybe!({ let m = self.matches.get(self.selected_index)?; diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 5ee2947448c90c..4a84c27dfd09da 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -813,6 +813,7 @@ struct FakeFsState { root: Arc>, next_inode: u64, next_mtime: SystemTime, + git_event_tx: smol::channel::Sender, event_txs: Vec>>, events_paused: bool, buffered_events: Vec, @@ -875,9 +876,11 @@ impl FakeFsState { canonical_path.clear(); match prefix { Some(prefix_component) => { - canonical_path.push(prefix_component.as_os_str()); + canonical_path = PathBuf::from(prefix_component.as_os_str()); + // Prefixes like `C:\\` are represented without their trailing slash, so we have to re-add it. + canonical_path.push(std::path::MAIN_SEPARATOR_STR); } - None => canonical_path.push("/"), + None => canonical_path = PathBuf::from(std::path::MAIN_SEPARATOR_STR), } } Component::CurDir => {} @@ -900,7 +903,7 @@ impl FakeFsState { } } entry_stack.push(entry.clone()); - canonical_path.push(name); + canonical_path = canonical_path.join(name); } else { return None; } @@ -962,9 +965,15 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> = #[cfg(any(test, feature = "test-support"))] impl FakeFs { + /// We need to use something large enough for Windows and Unix to consider this a new file. + /// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior + const SYSTEMTIME_INTERVAL: u64 = 100; + pub fn new(executor: gpui::BackgroundExecutor) -> Arc { - Arc::new(Self { - executor, + let (tx, mut rx) = smol::channel::bounded::(10); + + let this = Arc::new(Self { + executor: executor.clone(), state: Mutex::new(FakeFsState { root: Arc::new(Mutex::new(FakeFsEntry::Dir { inode: 0, @@ -973,6 +982,7 @@ impl FakeFs { entries: Default::default(), git_repo_state: None, })), + git_event_tx: tx, next_mtime: SystemTime::UNIX_EPOCH, next_inode: 1, event_txs: Default::default(), @@ -981,7 +991,22 @@ impl FakeFs { read_dir_call_count: 0, metadata_call_count: 0, }), - }) + }); + + executor.spawn({ + let this = this.clone(); + async move { + while let Some(git_event) = rx.next().await { + if let Some(mut state) = this.state.try_lock() { + state.emit_event([(git_event, None)]); + } else { + panic!("Failed to lock file system state, this execution would have caused a test hang"); + } + } + } + }).detach(); + + this } pub fn set_next_mtime(&self, next_mtime: SystemTime) { @@ -995,7 +1020,7 @@ impl FakeFs { let new_mtime = state.next_mtime; let new_inode = state.next_inode; state.next_inode += 1; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state .write_path(path, move |entry| { match entry { @@ -1048,7 +1073,7 @@ impl FakeFs { let inode = state.next_inode; let mtime = state.next_mtime; state.next_inode += 1; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, @@ -1175,7 +1200,12 @@ impl FakeFs { let mut entry = entry.lock(); if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { - let repo_state = git_repo_state.get_or_insert_with(Default::default); + let repo_state = git_repo_state.get_or_insert_with(|| { + Arc::new(Mutex::new(FakeGitRepositoryState::new( + dot_git.to_path_buf(), + state.git_event_tx.clone(), + ))) + }); let mut repo_state = repo_state.lock(); f(&mut repo_state); @@ -1190,7 +1220,22 @@ impl FakeFs { pub fn set_branch_name(&self, dot_git: &Path, branch: Option>) { self.with_git_state(dot_git, true, |state| { - state.branch_name = branch.map(Into::into) + let branch = branch.map(Into::into); + state.branches.extend(branch.clone()); + state.current_branch_name = branch.map(Into::into) + }) + } + + pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) { + self.with_git_state(dot_git, true, |state| { + if let Some(first) = branches.first() { + if state.current_branch_name.is_none() { + state.current_branch_name = Some(first.to_string()) + } + } + state + .branches + .extend(branches.iter().map(ToString::to_string)); }) } @@ -1399,7 +1444,7 @@ impl Fs for FakeFs { let inode = state.next_inode; let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state.next_inode += 1; state.write_path(&cur_path, |entry| { entry.or_insert_with(|| { @@ -1425,7 +1470,7 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let inode = state.next_inode; let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state.next_inode += 1; let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, @@ -1560,7 +1605,7 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let mtime = state.next_mtime; let inode = util::post_inc(&mut state.next_inode); - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let source_entry = state.read_path(&source)?; let content = source_entry.lock().file_content(&source)?.clone(); let mut kind = Some(PathEventKind::Created); @@ -1830,7 +1875,12 @@ impl Fs for FakeFs { let mut entry = entry.lock(); if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { let state = git_repo_state - .get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default()))) + .get_or_insert_with(|| { + Arc::new(Mutex::new(FakeGitRepositoryState::new( + abs_dot_git.to_path_buf(), + state.git_event_tx.clone(), + ))) + }) .clone(); Some(git::repository::FakeGitRepository::open(state)) } else { diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index fb204fba8266ab..cf07b74ac5d8dc 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -1,4 +1,10 @@ +pub mod blame; +pub mod commit; +pub mod diff; mod hosting_provider; +mod remote; +pub mod repository; +pub mod status; use anyhow::{anyhow, Context, Result}; use serde::{Deserialize, Serialize}; @@ -7,15 +13,9 @@ use std::fmt; use std::str::FromStr; use std::sync::LazyLock; -pub use git2 as libgit; - pub use crate::hosting_provider::*; - -pub mod blame; -pub mod commit; -pub mod diff; -pub mod repository; -pub mod status; +pub use crate::remote::*; +pub use git2 as libgit; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); diff --git a/crates/git/src/hosting_provider.rs b/crates/git/src/hosting_provider.rs index 988dae377f71f9..4afbcf42a419bd 100644 --- a/crates/git/src/hosting_provider.rs +++ b/crates/git/src/hosting_provider.rs @@ -69,7 +69,7 @@ pub trait GitHostingProvider { /// Returns a formatted range of line numbers to be placed in a permalink URL. fn format_line_numbers(&self, start_line: u32, end_line: u32) -> String; - fn parse_remote_url<'a>(&self, url: &'a str) -> Option>; + fn parse_remote_url(&self, url: &str) -> Option; fn extract_pull_request( &self, @@ -111,6 +111,12 @@ impl GitHostingProviderRegistry { cx.global::().0.clone() } + /// Returns the global [`GitHostingProviderRegistry`], if one is set. + pub fn try_global(cx: &AppContext) -> Option> { + cx.try_global::() + .map(|registry| registry.0.clone()) + } + /// Returns the global [`GitHostingProviderRegistry`]. /// /// Inserts a default [`GitHostingProviderRegistry`] if one does not yet exist. @@ -153,10 +159,10 @@ impl GitHostingProviderRegistry { } } -#[derive(Debug)] -pub struct ParsedGitRemote<'a> { - pub owner: &'a str, - pub repo: &'a str, +#[derive(Debug, PartialEq)] +pub struct ParsedGitRemote { + pub owner: Arc, + pub repo: Arc, } pub fn parse_git_remote_url( diff --git a/crates/git/src/remote.rs b/crates/git/src/remote.rs new file mode 100644 index 00000000000000..430836fcf3af3c --- /dev/null +++ b/crates/git/src/remote.rs @@ -0,0 +1,85 @@ +use derive_more::Deref; +use url::Url; + +/// The URL to a Git remote. +#[derive(Debug, PartialEq, Eq, Clone, Deref)] +pub struct RemoteUrl(Url); + +impl std::str::FromStr for RemoteUrl { + type Err = url::ParseError; + + fn from_str(input: &str) -> Result { + if input.starts_with("git@") { + // Rewrite remote URLs like `git@github.com:user/repo.git` to `ssh://git@github.com/user/repo.git` + let ssh_url = input.replacen(':', "/", 1).replace("git@", "ssh://git@"); + Ok(RemoteUrl(Url::parse(&ssh_url)?)) + } else { + Ok(RemoteUrl(Url::parse(input)?)) + } + } +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_parsing_valid_remote_urls() { + let valid_urls = vec![ + ( + "https://github.com/octocat/zed.git", + "https", + "github.com", + "/octocat/zed.git", + ), + ( + "git@github.com:octocat/zed.git", + "ssh", + "github.com", + "/octocat/zed.git", + ), + ( + "ssh://git@github.com/octocat/zed.git", + "ssh", + "github.com", + "/octocat/zed.git", + ), + ( + "file:///path/to/local/zed", + "file", + "", + "/path/to/local/zed", + ), + ]; + + for (input, expected_scheme, expected_host, expected_path) in valid_urls { + let parsed = input.parse::().expect("failed to parse URL"); + let url = parsed.0; + assert_eq!( + url.scheme(), + expected_scheme, + "unexpected scheme for {input:?}", + ); + assert_eq!( + url.host_str().unwrap_or(""), + expected_host, + "unexpected host for {input:?}", + ); + assert_eq!(url.path(), expected_path, "unexpected path for {input:?}"); + } + } + + #[test] + fn test_parsing_invalid_remote_urls() { + let invalid_urls = vec!["not_a_url", "http://"]; + + for url in invalid_urls { + assert!( + url.parse::().is_err(), + "expected \"{url}\" to not parse as a Git remote URL", + ); + } + } +} diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 1b3686f0218c9a..fe65816cc5950b 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,8 +1,9 @@ use crate::GitHostingProviderRegistry; use crate::{blame::Blame, status::GitStatus}; use anyhow::{Context, Result}; -use collections::HashMap; +use collections::{HashMap, HashSet}; use git2::BranchType; +use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; use serde::{Deserialize, Serialize}; @@ -17,7 +18,7 @@ use util::ResultExt; #[derive(Clone, Debug, Hash, PartialEq)] pub struct Branch { pub is_head: bool, - pub name: Box, + pub name: SharedString, /// Timestamp of most recent commit, normalized to Unix Epoch format. pub unix_timestamp: Option, } @@ -41,6 +42,7 @@ pub trait GitRepository: Send + Sync { fn branches(&self) -> Result>; fn change_branch(&self, _: &str) -> Result<()>; fn create_branch(&self, _: &str) -> Result<()>; + fn branch_exits(&self, _: &str) -> Result; fn blame(&self, path: &Path, content: Rope) -> Result; } @@ -132,6 +134,18 @@ impl GitRepository for RealGitRepository { GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes) } + fn branch_exits(&self, name: &str) -> Result { + let repo = self.repository.lock(); + let branch = repo.find_branch(name, BranchType::Local); + match branch { + Ok(_) => Ok(true), + Err(e) => match e.code() { + git2::ErrorCode::NotFound => Ok(false), + _ => Err(anyhow::anyhow!(e)), + }, + } + } + fn branches(&self) -> Result> { let repo = self.repository.lock(); let local_branches = repo.branches(Some(BranchType::Local))?; @@ -139,7 +153,11 @@ impl GitRepository for RealGitRepository { .filter_map(|branch| { branch.ok().and_then(|(branch, _)| { let is_head = branch.is_head(); - let name = branch.name().ok().flatten().map(Box::from)?; + let name = branch + .name() + .ok() + .flatten() + .map(|name| name.to_string().into())?; let timestamp = branch.get().peel_to_commit().ok()?.time(); let unix_timestamp = timestamp.seconds(); let timezone_offset = timestamp.offset_minutes(); @@ -201,17 +219,20 @@ impl GitRepository for RealGitRepository { } } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct FakeGitRepository { state: Arc>, } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct FakeGitRepositoryState { + pub path: PathBuf, + pub event_emitter: smol::channel::Sender, pub index_contents: HashMap, pub blames: HashMap, pub worktree_statuses: HashMap, - pub branch_name: Option, + pub current_branch_name: Option, + pub branches: HashSet, } impl FakeGitRepository { @@ -220,6 +241,20 @@ impl FakeGitRepository { } } +impl FakeGitRepositoryState { + pub fn new(path: PathBuf, event_emitter: smol::channel::Sender) -> Self { + FakeGitRepositoryState { + path, + event_emitter, + index_contents: Default::default(), + blames: Default::default(), + worktree_statuses: Default::default(), + current_branch_name: Default::default(), + branches: Default::default(), + } + } +} + impl GitRepository for FakeGitRepository { fn reload_index(&self) {} @@ -234,7 +269,7 @@ impl GitRepository for FakeGitRepository { fn branch_name(&self) -> Option { let state = self.state.lock(); - state.branch_name.clone() + state.current_branch_name.clone() } fn head_sha(&self) -> Option { @@ -264,18 +299,41 @@ impl GitRepository for FakeGitRepository { } fn branches(&self) -> Result> { - Ok(vec![]) + let state = self.state.lock(); + let current_branch = &state.current_branch_name; + Ok(state + .branches + .iter() + .map(|branch_name| Branch { + is_head: Some(branch_name) == current_branch.as_ref(), + name: branch_name.into(), + unix_timestamp: None, + }) + .collect()) + } + + fn branch_exits(&self, name: &str) -> Result { + let state = self.state.lock(); + Ok(state.branches.contains(name)) } fn change_branch(&self, name: &str) -> Result<()> { let mut state = self.state.lock(); - state.branch_name = Some(name.to_owned()); + state.current_branch_name = Some(name.to_owned()); + state + .event_emitter + .try_send(state.path.clone()) + .expect("Dropped repo change event"); Ok(()) } fn create_branch(&self, name: &str) -> Result<()> { let mut state = self.state.lock(); - state.branch_name = Some(name.to_owned()); + state.branches.insert(name.to_owned()); + state + .event_emitter + .try_send(state.path.clone()) + .expect("Dropped repo change event"); Ok(()) } diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index b8ad1ed05d1605..be0ca56eef5199 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -22,8 +22,9 @@ regex.workspace = true serde.workspace = true serde_json.workspace = true url.workspace = true +util.workspace = true [dev-dependencies] -unindent.workspace = true +indoc.workspace = true serde_json.workspace = true pretty_assertions.workspace = true diff --git a/crates/git_hosting_providers/src/git_hosting_providers.rs b/crates/git_hosting_providers/src/git_hosting_providers.rs index 864faa9b495d18..2689d797f41263 100644 --- a/crates/git_hosting_providers/src/git_hosting_providers.rs +++ b/crates/git_hosting_providers/src/git_hosting_providers.rs @@ -2,6 +2,7 @@ mod providers; use std::sync::Arc; +use git::repository::GitRepository; use git::GitHostingProviderRegistry; use gpui::AppContext; @@ -10,17 +11,27 @@ pub use crate::providers::*; /// Initializes the Git hosting providers. pub fn init(cx: &AppContext) { let provider_registry = GitHostingProviderRegistry::global(cx); - - // The providers are stored in a `BTreeMap`, so insertion order matters. - // GitHub comes first. + provider_registry.register_hosting_provider(Arc::new(Bitbucket)); + provider_registry.register_hosting_provider(Arc::new(Codeberg)); + provider_registry.register_hosting_provider(Arc::new(Gitee)); provider_registry.register_hosting_provider(Arc::new(Github)); + provider_registry.register_hosting_provider(Arc::new(Gitlab::new())); + provider_registry.register_hosting_provider(Arc::new(Sourcehut)); +} - // Then GitLab. - provider_registry.register_hosting_provider(Arc::new(Gitlab)); +/// Registers additional Git hosting providers. +/// +/// These require information from the Git repository to construct, so their +/// registration is deferred until we have a Git repository initialized. +pub fn register_additional_providers( + provider_registry: Arc, + repository: Arc, +) { + let Some(origin_url) = repository.remote_url("origin") else { + return; + }; - // Then the other providers, in the order they were added. - provider_registry.register_hosting_provider(Arc::new(Gitee)); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - provider_registry.register_hosting_provider(Arc::new(Sourcehut)); - provider_registry.register_hosting_provider(Arc::new(Codeberg)); + if let Ok(gitlab_self_hosted) = Gitlab::from_remote_url(&origin_url) { + provider_registry.register_hosting_provider(Arc::new(gitlab_self_hosted)); + } } diff --git a/crates/git_hosting_providers/src/providers/bitbucket.rs b/crates/git_hosting_providers/src/providers/bitbucket.rs index 50c453442f03a7..59be1713e72943 100644 --- a/crates/git_hosting_providers/src/providers/bitbucket.rs +++ b/crates/git_hosting_providers/src/providers/bitbucket.rs @@ -1,6 +1,11 @@ +use std::str::FromStr; + use url::Url; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; pub struct Bitbucket; @@ -25,18 +30,22 @@ impl GitHostingProvider for Bitbucket { format!("lines-{start_line}:{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.contains("bitbucket.org") { - let (_, repo_with_owner) = url.trim_end_matches(".git").split_once("bitbucket.org")?; - let (owner, repo) = repo_with_owner - .trim_start_matches('/') - .trim_start_matches(':') - .split_once('/')?; + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "bitbucket.org" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -75,53 +84,62 @@ impl GitHostingProvider for Bitbucket { #[cfg(test)] mod tests { - use std::sync::Arc; - - use git::{parse_git_remote_url, GitHostingProviderRegistry}; + use pretty_assertions::assert_eq; use super::*; #[test] - fn test_parse_git_remote_url_bitbucket_https_with_username() { - let provider_registry = Arc::new(GitHostingProviderRegistry::new()); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - let url = "https://thorstenballzed@bitbucket.org/thorstenzed/testingrepo.git"; - let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); - assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner, "thorstenzed"); - assert_eq!(parsed.repo, "testingrepo"); + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Bitbucket + .parse_remote_url("git@bitbucket.org:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_parse_git_remote_url_bitbucket_https_without_username() { - let provider_registry = Arc::new(GitHostingProviderRegistry::new()); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - let url = "https://bitbucket.org/thorstenzed/testingrepo.git"; - let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); - assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner, "thorstenzed"); - assert_eq!(parsed.repo, "testingrepo"); + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Bitbucket + .parse_remote_url("https://bitbucket.org/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_parse_git_remote_url_bitbucket_git() { - let provider_registry = Arc::new(GitHostingProviderRegistry::new()); - provider_registry.register_hosting_provider(Arc::new(Bitbucket)); - let url = "git@bitbucket.org:thorstenzed/testingrepo.git"; - let (provider, parsed) = parse_git_remote_url(provider_registry, url).unwrap(); - assert_eq!(provider.name(), "Bitbucket"); - assert_eq!(parsed.owner, "thorstenzed"); - assert_eq!(parsed.repo, "testingrepo"); + fn test_parse_remote_url_given_https_url_with_username() { + let parsed_remote = Bitbucket + .parse_remote_url("https://thorstenballzed@bitbucket.org/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_bitbucket_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "thorstenzed", - repo: "testingrepo", - }; + fn test_build_bitbucket_permalink() { let permalink = Bitbucket.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "f00b4r", path: "main.rs", @@ -129,18 +147,17 @@ mod tests { }, ); - let expected_url = "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs"; + let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_bitbucket_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "thorstenzed", - repo: "testingrepo", - }; + fn test_build_bitbucket_permalink_with_single_line_selection() { let permalink = Bitbucket.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "f00b4r", path: "main.rs", @@ -148,19 +165,17 @@ mod tests { }, ); - let expected_url = - "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs#lines-7"; + let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_bitbucket_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "thorstenzed", - repo: "testingrepo", - }; + fn test_build_bitbucket_permalink_with_multi_line_selection() { let permalink = Bitbucket.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "f00b4r", path: "main.rs", @@ -169,7 +184,7 @@ mod tests { ); let expected_url = - "https://bitbucket.org/thorstenzed/testingrepo/src/f00b4r/main.rs#lines-24:48"; + "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-24:48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index 3f6a016f68fd4a..cb917823c5dbbf 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -1,3 +1,4 @@ +use std::str::FromStr; use std::sync::Arc; use anyhow::{bail, Context, Result}; @@ -9,6 +10,7 @@ use url::Url; use git::{ BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote, + RemoteUrl, }; #[derive(Debug, Deserialize)] @@ -103,19 +105,22 @@ impl GitHostingProvider for Codeberg { format!("L{start_line}-L{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@codeberg.org:") || url.starts_with("https://codeberg.org/") { - let repo_with_owner = url - .trim_start_matches("git@codeberg.org:") - .trim_start_matches("https://codeberg.org/") - .trim_end_matches(".git"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "codeberg.org" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -170,119 +175,91 @@ impl GitHostingProvider for Codeberg { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_codeberg_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; - let permalink = Codeberg.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/editor/src/git/permalink.rs", - selection: None, - }, - ); - - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Codeberg + .parse_remote_url("git@codeberg.org:zed-industries/zed.git") + .unwrap(); - #[test] - fn test_build_codeberg_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; - let permalink = Codeberg.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/editor/src/git/permalink.rs", - selection: Some(6..6), - }, + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } ); - - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_codeberg_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; - let permalink = Codeberg.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/editor/src/git/permalink.rs", - selection: Some(23..47), - }, - ); + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Codeberg + .parse_remote_url("https://codeberg.org/zed-industries/zed.git") + .unwrap(); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_codeberg_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_codeberg_permalink() { let permalink = Codeberg.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: None, }, ); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs"; + let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_codeberg_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_codeberg_permalink_with_single_line_selection() { let permalink = Codeberg.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: Some(6..6), }, ); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs#L7"; + let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_codeberg_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_codeberg_permalink_with_multi_line_selection() { let permalink = Codeberg.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: Some(23..47), }, ); - let expected_url = "https://codeberg.org/rajveermalviya/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/zed/src/main.rs#L24-L48"; + let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/gitee.rs b/crates/git_hosting_providers/src/providers/gitee.rs index 34d1da262d9ec6..5090cd0d74d775 100644 --- a/crates/git_hosting_providers/src/providers/gitee.rs +++ b/crates/git_hosting_providers/src/providers/gitee.rs @@ -1,6 +1,11 @@ +use std::str::FromStr; + use url::Url; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; pub struct Gitee; @@ -25,19 +30,22 @@ impl GitHostingProvider for Gitee { format!("L{start_line}-{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@gitee.com:") || url.starts_with("https://gitee.com/") { - let repo_with_owner = url - .trim_start_matches("git@gitee.com:") - .trim_start_matches("https://gitee.com/") - .trim_end_matches(".git"); - - let (owner, repo) = repo_with_owner.split_once('/')?; + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "gitee.com" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -76,119 +84,91 @@ impl GitHostingProvider for Gitee { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_gitee_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", - }; - let permalink = Gitee.build_permalink( - remote, - BuildPermalinkParams { - sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/editor/src/git/permalink.rs", - selection: None, - }, - ); - - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Gitee + .parse_remote_url("git@gitee.com:zed-industries/zed.git") + .unwrap(); - #[test] - fn test_build_gitee_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", - }; - let permalink = Gitee.build_permalink( - remote, - BuildPermalinkParams { - sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/editor/src/git/permalink.rs", - selection: Some(6..6), - }, + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } ); - - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_gitee_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", - }; - let permalink = Gitee.build_permalink( - remote, - BuildPermalinkParams { - sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/editor/src/git/permalink.rs", - selection: Some(23..47), - }, - ); + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Gitee + .parse_remote_url("https://gitee.com/zed-industries/zed.git") + .unwrap(); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_gitee_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", - }; + fn test_build_gitee_permalink() { let permalink = Gitee.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: None, }, ); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs"; + let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_gitee_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", - }; + fn test_build_gitee_permalink_with_single_line_selection() { let permalink = Gitee.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: Some(6..6), }, ); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L7"; + let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_gitee_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "libkitten", - repo: "zed", - }; + fn test_build_gitee_permalink_with_multi_line_selection() { let permalink = Gitee.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: Some(23..47), }, ); - let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L24-48"; + let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 4078025fa004fc..cbd1cc73a815fb 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -1,3 +1,4 @@ +use std::str::FromStr; use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; @@ -10,7 +11,7 @@ use url::Url; use git::{ BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote, - PullRequest, + PullRequest, RemoteUrl, }; fn pull_request_number_regex() -> &'static Regex { @@ -107,19 +108,22 @@ impl GitHostingProvider for Github { format!("L{start_line}-L{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@github.com:") || url.starts_with("https://github.com/") { - let repo_with_owner = url - .trim_start_matches("git@github.com:") - .trim_start_matches("https://github.com/") - .trim_end_matches(".git"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "github.com" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?; + let repo = path_segments.next()?.trim_end_matches(".git"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -193,76 +197,82 @@ impl GitHostingProvider for Github { #[cfg(test)] mod tests { - // TODO: Replace with `indoc`. - use unindent::Unindent; + use indoc::indoc; + use pretty_assertions::assert_eq; use super::*; #[test] - fn test_build_github_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Github.build_permalink( - remote, - BuildPermalinkParams { - sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", - path: "crates/editor/src/git/permalink.rs", - selection: None, - }, - ); + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Github + .parse_remote_url("git@github.com:zed-industries/zed.git") + .unwrap(); - let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_github_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Github.build_permalink( - remote, - BuildPermalinkParams { - sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", - path: "crates/editor/src/git/permalink.rs", - selection: Some(6..6), - }, + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Github + .parse_remote_url("https://github.com/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } ); + } - let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + #[test] + fn test_parse_remote_url_given_https_url_with_username() { + let parsed_remote = Github + .parse_remote_url("https://jlannister@github.com/some-org/some-repo.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "some-org".into(), + repo: "some-repo".into(), + } + ); } #[test] - fn test_build_github_permalink_from_ssh_url_multi_line_selection() { + fn test_build_github_permalink_from_ssh_url() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let permalink = Github.build_permalink( remote, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", - selection: Some(23..47), + selection: None, }, ); - let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-L48"; + let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_github_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; + fn test_build_github_permalink() { let permalink = Github.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", path: "crates/zed/src/main.rs", @@ -275,55 +285,53 @@ mod tests { } #[test] - fn test_build_github_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; + fn test_build_github_permalink_with_single_line_selection() { let permalink = Github.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", + sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", + path: "crates/editor/src/git/permalink.rs", selection: Some(6..6), }, ); - let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L7"; + let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_github_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; + fn test_build_github_permalink_with_multi_line_selection() { let permalink = Github.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", + sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", + path: "crates/editor/src/git/permalink.rs", selection: Some(23..47), }, ); - let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-L48"; + let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-L48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] fn test_github_pull_requests() { let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", + owner: "zed-industries".into(), + repo: "zed".into(), }; let message = "This does not contain a pull request"; assert!(Github.extract_pull_request(&remote, message).is_none()); // Pull request number at end of first line - let message = r#" + let message = indoc! {r#" project panel: do not expand collapsed worktrees on "collapse all entries" (#10687) Fixes #10597 @@ -332,7 +340,7 @@ mod tests { - Fixed "project panel: collapse all entries" expanding collapsed worktrees. "# - .unindent(); + }; assert_eq!( Github @@ -344,12 +352,12 @@ mod tests { ); // Pull request number in middle of line, which we want to ignore - let message = r#" + let message = indoc! {r#" Follow-up to #10687 to fix problems See the original PR, this is a fix. "# - .unindent(); + }; assert_eq!(Github.extract_pull_request(&remote, &message), None); } } diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index 36ee214cf9d47c..7910379ef096de 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -1,16 +1,60 @@ +use std::str::FromStr; + +use anyhow::{anyhow, bail, Result}; use url::Url; +use util::maybe; + +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; + +#[derive(Debug)] +pub struct Gitlab { + name: String, + base_url: Url, +} + +impl Gitlab { + pub fn new() -> Self { + Self { + name: "GitLab".to_string(), + base_url: Url::parse("https://gitlab.com").unwrap(), + } + } + + pub fn from_remote_url(remote_url: &str) -> Result { + let host = maybe!({ + if let Some(remote_url) = remote_url.strip_prefix("git@") { + if let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') { + return Some(host.to_string()); + } + } -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; + Url::parse(&remote_url) + .ok() + .and_then(|remote_url| remote_url.host_str().map(|host| host.to_string())) + }) + .ok_or_else(|| anyhow!("URL has no host"))?; -pub struct Gitlab; + if !host.contains("gitlab") { + bail!("not a GitLab URL"); + } + + Ok(Self { + name: "GitLab Self-Hosted".to_string(), + base_url: Url::parse(&format!("https://{}", host))?, + }) + } +} impl GitHostingProvider for Gitlab { fn name(&self) -> String { - "GitLab".to_string() + self.name.clone() } fn base_url(&self) -> Url { - Url::parse("https://gitlab.com").unwrap() + self.base_url.clone() } fn supports_avatars(&self) -> bool { @@ -25,19 +69,22 @@ impl GitHostingProvider for Gitlab { format!("L{start_line}-{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@gitlab.com:") || url.starts_with("https://gitlab.com/") { - let repo_with_owner = url - .trim_start_matches("git@gitlab.com:") - .trim_start_matches("https://gitlab.com/") - .trim_end_matches(".git"); + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - let (owner, repo) = repo_with_owner.split_once('/')?; - - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != self.base_url.host_str()? { + return None; } - None + let mut path_segments = url.path_segments()?.collect::>(); + let repo = path_segments.pop()?.trim_end_matches(".git"); + let owner = path_segments.join("/"); + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -79,16 +126,82 @@ impl GitHostingProvider for Gitlab { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_gitlab_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Gitlab.build_permalink( - remote, + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Gitlab::new() + .parse_remote_url("git@gitlab.com:zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Gitlab::new() + .parse_remote_url("https://gitlab.com/zed-industries/zed.git") + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_ssh_url() { + let remote_url = "git@gitlab.my-enterprise.com:zed-industries/zed.git"; + + let parsed_remote = Gitlab::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_https_url_with_subgroup() { + let remote_url = "https://gitlab.my-enterprise.com/group/subgroup/zed.git"; + let parsed_remote = Gitlab::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "group/subgroup".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_build_gitlab_permalink() { + let permalink = Gitlab::new().build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -101,13 +214,12 @@ mod tests { } #[test] - fn test_build_gitlab_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Gitlab.build_permalink( - remote, + fn test_build_gitlab_permalink_with_single_line_selection() { + let permalink = Gitlab::new().build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -120,13 +232,12 @@ mod tests { } #[test] - fn test_build_gitlab_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Gitlab.build_permalink( - remote, + fn test_build_gitlab_permalink_with_multi_line_selection() { + let permalink = Gitlab::new().build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", path: "crates/editor/src/git/permalink.rs", @@ -139,59 +250,44 @@ mod tests { } #[test] - fn test_build_gitlab_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Gitlab.build_permalink( - remote, + fn test_build_gitlab_self_hosted_permalink_from_ssh_url() { + let gitlab = + Gitlab::from_remote_url("git@gitlab.some-enterprise.com:zed-industries/zed.git") + .unwrap(); + let permalink = gitlab.build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", + sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7", + path: "crates/editor/src/git/permalink.rs", selection: None, }, ); - let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs"; + let expected_url = "https://gitlab.some-enterprise.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_gitlab_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Gitlab.build_permalink( - remote, - BuildPermalinkParams { - sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", - path: "crates/zed/src/main.rs", - selection: Some(6..6), + fn test_build_gitlab_self_hosted_permalink_from_https_url() { + let gitlab = + Gitlab::from_remote_url("https://gitlab-instance.big-co.com/zed-industries/zed.git") + .unwrap(); + let permalink = gitlab.build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), }, - ); - - let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) - } - - #[test] - fn test_build_gitlab_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "zed-industries", - repo: "zed", - }; - let permalink = Gitlab.build_permalink( - remote, BuildPermalinkParams { sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa", path: "crates/zed/src/main.rs", - selection: Some(23..47), + selection: None, }, ); - let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs#L24-48"; + let expected_url = "https://gitlab-instance.big-co.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/git_hosting_providers/src/providers/sourcehut.rs b/crates/git_hosting_providers/src/providers/sourcehut.rs index 623b23ab6c893b..a2dd14a345cb09 100644 --- a/crates/git_hosting_providers/src/providers/sourcehut.rs +++ b/crates/git_hosting_providers/src/providers/sourcehut.rs @@ -1,6 +1,11 @@ +use std::str::FromStr; + use url::Url; -use git::{BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote}; +use git::{ + BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote, + RemoteUrl, +}; pub struct Sourcehut; @@ -25,21 +30,27 @@ impl GitHostingProvider for Sourcehut { format!("L{start_line}-{end_line}") } - fn parse_remote_url<'a>(&self, url: &'a str) -> Option> { - if url.starts_with("git@git.sr.ht:") || url.starts_with("https://git.sr.ht/") { - // sourcehut indicates a repo with '.git' suffix as a separate repo. - // For example, "git@git.sr.ht:~username/repo" and "git@git.sr.ht:~username/repo.git" - // are two distinct repositories. - let repo_with_owner = url - .trim_start_matches("git@git.sr.ht:~") - .trim_start_matches("https://git.sr.ht/~"); - - let (owner, repo) = repo_with_owner.split_once('/')?; + fn parse_remote_url(&self, url: &str) -> Option { + let url = RemoteUrl::from_str(url).ok()?; - return Some(ParsedGitRemote { owner, repo }); + let host = url.host_str()?; + if host != "git.sr.ht" { + return None; } - None + let mut path_segments = url.path_segments()?; + let owner = path_segments.next()?.trim_start_matches('~'); + // We don't trim the `.git` suffix here like we do elsewhere, as + // sourcehut treats a repo with `.git` suffix as a separate repo. + // + // For example, `git@git.sr.ht:~username/repo` and `git@git.sr.ht:~username/repo.git` + // are two distinct repositories. + let repo = path_segments.next()?; + + Some(ParsedGitRemote { + owner: owner.into(), + repo: repo.into(), + }) } fn build_commit_permalink( @@ -78,138 +89,124 @@ impl GitHostingProvider for Sourcehut { #[cfg(test)] mod tests { + use pretty_assertions::assert_eq; + use super::*; #[test] - fn test_build_sourcehut_permalink_from_ssh_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; - let permalink = Sourcehut.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/editor/src/git/permalink.rs", - selection: None, - }, - ); + fn test_parse_remote_url_given_ssh_url() { + let parsed_remote = Sourcehut + .parse_remote_url("git@git.sr.ht:~zed-industries/zed") + .unwrap(); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_sourcehut_permalink_from_ssh_url_with_git_prefix() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed.git", - }; - let permalink = Sourcehut.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/editor/src/git/permalink.rs", - selection: None, - }, - ); + fn test_parse_remote_url_given_ssh_url_with_git_suffix() { + let parsed_remote = Sourcehut + .parse_remote_url("git@git.sr.ht:~zed-industries/zed.git") + .unwrap(); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed.git".into(), + } + ); } #[test] - fn test_build_sourcehut_permalink_from_ssh_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; - let permalink = Sourcehut.build_permalink( - remote, - BuildPermalinkParams { - sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/editor/src/git/permalink.rs", - selection: Some(6..6), - }, - ); + fn test_parse_remote_url_given_https_url() { + let parsed_remote = Sourcehut + .parse_remote_url("https://git.sr.ht/~zed-industries/zed") + .unwrap(); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7"; - assert_eq!(permalink.to_string(), expected_url.to_string()) + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); } #[test] - fn test_build_sourcehut_permalink_from_ssh_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_sourcehut_permalink() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", path: "crates/editor/src/git/permalink.rs", - selection: Some(23..47), + selection: None, }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48"; + let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_sourcehut_permalink_from_https_url() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_sourcehut_permalink_with_git_suffix() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed.git".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: None, }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs"; + let expected_url = "https://git.sr.ht/~zed-industries/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_sourcehut_permalink_from_https_url_single_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_sourcehut_permalink_with_single_line_selection() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: Some(6..6), }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs#L7"; + let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7"; assert_eq!(permalink.to_string(), expected_url.to_string()) } #[test] - fn test_build_sourcehut_permalink_from_https_url_multi_line_selection() { - let remote = ParsedGitRemote { - owner: "rajveermalviya", - repo: "zed", - }; + fn test_build_sourcehut_permalink_with_multi_line_selection() { let permalink = Sourcehut.build_permalink( - remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, BuildPermalinkParams { sha: "faa6f979be417239b2e070dbbf6392b909224e0b", - path: "crates/zed/src/main.rs", + path: "crates/editor/src/git/permalink.rs", selection: Some(23..47), }, ); - let expected_url = "https://git.sr.ht/~rajveermalviya/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/zed/src/main.rs#L24-48"; + let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } } diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 63e0f2b07915eb..80be035770856b 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -37,34 +37,34 @@ impl CursorPosition { } fn update_position(&mut self, editor: View, cx: &mut ViewContext) { - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); - - self.selected_count = Default::default(); - self.selected_count.selections = editor.selections.count(); - let mut last_selection: Option> = None; - for selection in editor.selections.all::(cx) { - self.selected_count.characters += buffer - .text_for_range(selection.start..selection.end) - .map(|t| t.chars().count()) - .sum::(); - if last_selection - .as_ref() - .map_or(true, |last_selection| selection.id > last_selection.id) - { - last_selection = Some(selection); + editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + + self.selected_count = Default::default(); + self.selected_count.selections = editor.selections.count(); + let mut last_selection: Option> = None; + for selection in editor.selections.all::(cx) { + self.selected_count.characters += buffer + .text_for_range(selection.start..selection.end) + .map(|t| t.chars().count()) + .sum::(); + if last_selection + .as_ref() + .map_or(true, |last_selection| selection.id > last_selection.id) + { + last_selection = Some(selection); + } } - } - for selection in editor.selections.all::(cx) { - if selection.end != selection.start { - self.selected_count.lines += (selection.end.row - selection.start.row) as usize; - if selection.end.column != 0 { - self.selected_count.lines += 1; + for selection in editor.selections.all::(cx) { + if selection.end != selection.start { + self.selected_count.lines += (selection.end.row - selection.start.row) as usize; + if selection.end.column != 0 { + self.selected_count.lines += 1; + } } } - } - self.position = last_selection.map(|s| s.head().to_point(&buffer)); - + self.position = last_selection.map(|s| s.head().to_point(&buffer)); + }); cx.notify(); } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 0e9482b7594144..805c1f0d52f973 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -56,8 +56,8 @@ impl GoToLine { } pub fn new(active_editor: View, cx: &mut ViewContext) -> Self { - let editor = active_editor.read(cx); - let cursor = editor.selections.last::(cx).head(); + let cursor = + active_editor.update(cx, |editor, cx| editor.selections.last::(cx).head()); let line = cursor.row + 1; let column = cursor.column + 1; diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index 7e7de269b11462..97c8b666c70e46 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -485,7 +485,7 @@ impl Render for TextInput { div() .flex() .key_context("TextInput") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .cursor(CursorStyle::IBeam) .on_action(cx.listener(Self::backspace)) .on_action(cx.listener(Self::delete)) @@ -549,7 +549,7 @@ impl Render for InputExample { let num_keystrokes = self.recent_keystrokes.len(); div() .bg(rgb(0xaaaaaa)) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .flex() .flex_col() .size_full() diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index f81a2092d56af6..ffbc757369b868 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -217,6 +217,7 @@ pub(crate) type KeystrokeObserver = type QuitHandler = Box LocalBoxFuture<'static, ()> + 'static>; type ReleaseListener = Box; type NewViewListener = Box; +type NewModelListener = Box; /// Contains the state of the full application, and passed as a reference to a variety of callbacks. /// Other contexts such as [ModelContext], [WindowContext], and [ViewContext] deref to this type, making it the most general context type. @@ -237,6 +238,7 @@ pub struct AppContext { http_client: Arc, pub(crate) globals_by_type: FxHashMap>, pub(crate) entities: EntityMap, + pub(crate) new_model_observers: SubscriberSet, pub(crate) new_view_observers: SubscriberSet, pub(crate) windows: SlotMap>, pub(crate) window_handles: FxHashMap, @@ -256,6 +258,9 @@ pub struct AppContext { pub(crate) layout_id_buffer: Vec, // We recycle this memory across layout requests. pub(crate) propagate_event: bool, pub(crate) prompt_builder: Option, + + #[cfg(any(test, feature = "test-support", debug_assertions))] + pub(crate) name: Option<&'static str>, } impl AppContext { @@ -293,6 +298,7 @@ impl AppContext { globals_by_type: FxHashMap::default(), entities, new_view_observers: SubscriberSet::new(), + new_model_observers: SubscriberSet::new(), window_handles: FxHashMap::default(), windows: SlotMap::with_key(), keymap: Rc::new(RefCell::new(Keymap::default())), @@ -309,6 +315,9 @@ impl AppContext { layout_id_buffer: Default::default(), propagate_event: true, prompt_builder: Some(PromptBuilder::Default), + + #[cfg(any(test, feature = "test-support", debug_assertions))] + name: None, }), }); @@ -988,6 +997,7 @@ impl AppContext { } /// Move the global of the given type to the stack. + #[track_caller] pub(crate) fn lease_global(&mut self) -> GlobalLease { GlobalLease::new( self.globals_by_type @@ -1009,6 +1019,7 @@ impl AppContext { activate(); subscription } + /// Arrange for the given function to be invoked whenever a view of the specified type is created. /// The function will be passed a mutable reference to the view along with an appropriate context. pub fn observe_new_views( @@ -1028,6 +1039,31 @@ impl AppContext { ) } + pub(crate) fn new_model_observer(&self, key: TypeId, value: NewModelListener) -> Subscription { + let (subscription, activate) = self.new_model_observers.insert(key, value); + activate(); + subscription + } + + /// Arrange for the given function to be invoked whenever a view of the specified type is created. + /// The function will be passed a mutable reference to the view along with an appropriate context. + pub fn observe_new_models( + &self, + on_new: impl 'static + Fn(&mut T, &mut ModelContext), + ) -> Subscription { + self.new_model_observer( + TypeId::of::(), + Box::new(move |any_model: AnyModel, cx: &mut AppContext| { + any_model + .downcast::() + .unwrap() + .update(cx, |model_state, cx| { + on_new(model_state, cx); + }) + }), + ) + } + /// Observe the release of a model or view. The callback is invoked after the model or view /// has no more strong references but before it has been dropped. pub fn observe_release( @@ -1319,6 +1355,12 @@ impl AppContext { (task, is_first) } + + /// Get the name for this App. + #[cfg(any(test, feature = "test-support", debug_assertions))] + pub fn get_name(&self) -> &'static str { + self.name.as_ref().unwrap() + } } impl Context for AppContext { @@ -1333,8 +1375,21 @@ impl Context for AppContext { ) -> Model { self.update(|cx| { let slot = cx.entities.reserve(); + let model = slot.clone(); let entity = build_model(&mut ModelContext::new(cx, slot.downgrade())); - cx.entities.insert(slot, entity) + cx.entities.insert(slot, entity); + + // Non-generic part to avoid leaking SubscriberSet to invokers of `new_view`. + fn notify_observers(cx: &mut AppContext, tid: TypeId, model: AnyModel) { + cx.new_model_observers.clone().retain(&tid, |observer| { + let any_model = model.clone(); + (observer)(any_model, cx); + true + }); + } + notify_observers(cx, TypeId::of::(), AnyModel::from(model.clone())); + + model }) } diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index 4d5452acc0e5fe..07aa466295af2e 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -536,6 +536,15 @@ impl AnyWeakModel { } } +impl std::fmt::Debug for AnyWeakModel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct(type_name::()) + .field("entity_id", &self.entity_id) + .field("entity_type", &self.entity_type) + .finish() + } +} + impl From> for AnyWeakModel { fn from(model: WeakModel) -> Self { model.any_model diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index f46cdc8e34b1a6..34449c91ec7328 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -478,6 +478,12 @@ impl TestAppContext { .await .unwrap(); } + + /// Set a name for this App. + #[cfg(any(test, feature = "test-support"))] + pub fn set_name(&mut self, name: &'static str) { + self.update(|cx| cx.name = Some(name)) + } } impl Model { diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index d77c91e6552d31..47f0a82774eb0f 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -1,8 +1,9 @@ //! A list element that can be used to render a large number of differently sized elements //! efficiently. Clients of this API need to ensure that elements outside of the scrolled -//! area do not change their height for this element to function correctly. In order to minimize -//! re-renders, this element's state is stored intrusively on your own views, so that your code -//! can coordinate directly with the list element's cached state. +//! area do not change their height for this element to function correctly. If your elements +//! do change height, notify the list element via [`ListState::splice`] or [`ListState::reset`]. +//! In order to minimize re-renders, this element's state is stored intrusively +//! on your own views, so that your code can coordinate directly with the list element's cached state. //! //! If all of your elements are the same height, see [`UniformList`] for a simpler API diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 9ce85aab232cd7..2379ee9f8123e7 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -340,6 +340,7 @@ impl Element for UniformList { visible_range.clone(), bounds, item_height, + self.item_count, cx, ); let available_space = size( @@ -396,6 +397,7 @@ pub trait UniformListDecoration { visible_range: Range, bounds: Bounds, item_height: Pixels, + item_count: usize, cx: &mut WindowContext, ) -> AnyElement; } diff --git a/crates/gpui/src/global.rs b/crates/gpui/src/global.rs index 05f15983644612..96f5d5fed5b197 100644 --- a/crates/gpui/src/global.rs +++ b/crates/gpui/src/global.rs @@ -57,6 +57,7 @@ pub trait UpdateGlobal { } impl UpdateGlobal for T { + #[track_caller] fn update_global(cx: &mut C, update: F) -> R where C: BorrowAppContext, diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 7ba3ce055ecc67..2952f4af8abfb7 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -306,6 +306,7 @@ where self.borrow_mut().set_global(global) } + #[track_caller] fn update_global(&mut self, f: impl FnOnce(&mut G, &mut Self) -> R) -> R where G: Global, diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index cb40a563674669..722bc89a1d9dc3 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -16,7 +16,7 @@ /// impl Render for Editor { /// fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { /// div() -/// .track_focus(&self.focus_handle) +/// .track_focus(&self.focus_handle(cx)) /// .keymap_context("Editor") /// .on_action(cx.listener(Editor::undo)) /// .on_action(cx.listener(Editor::redo)) diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index 3eaf6ff3a337b8..9a0c054a073d0e 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -75,6 +75,18 @@ impl Keymap { .filter(move |binding| binding.action().partial_eq(action)) } + /// all bindings for input returns all bindings that might match the input + /// (without checking context) + pub fn all_bindings_for_input(&self, input: &[Keystroke]) -> Vec { + self.bindings() + .rev() + .filter_map(|binding| { + binding.match_keystrokes(input).filter(|pending| !pending)?; + Some(binding.clone()) + }) + .collect() + } + /// bindings_for_input returns a list of bindings that match the given input, /// and a boolean indicating whether or not more bindings might match if /// the input was longer. diff --git a/crates/gpui/src/keymap/binding.rs b/crates/gpui/src/keymap/binding.rs index c61210ce2531a7..2fff62c7b6cac2 100644 --- a/crates/gpui/src/keymap/binding.rs +++ b/crates/gpui/src/keymap/binding.rs @@ -69,6 +69,11 @@ impl KeyBinding { pub fn action(&self) -> &dyn Action { self.action.as_ref() } + + /// Get the predicate used to match this binding + pub fn predicate(&self) -> Option<&KeyBindingContextPredicate> { + self.context_predicate.as_ref() + } } impl std::fmt::Debug for KeyBinding { diff --git a/crates/gpui/src/keymap/context.rs b/crates/gpui/src/keymap/context.rs index 2990bff19621f0..fccc02886ba416 100644 --- a/crates/gpui/src/keymap/context.rs +++ b/crates/gpui/src/keymap/context.rs @@ -11,9 +11,12 @@ use std::fmt; pub struct KeyContext(SmallVec<[ContextEntry; 1]>); #[derive(Clone, Debug, Eq, PartialEq, Hash)] -struct ContextEntry { - key: SharedString, - value: Option, +/// An entry in a KeyContext +pub struct ContextEntry { + /// The key (or name if no value) + pub key: SharedString, + /// The value + pub value: Option, } impl<'a> TryFrom<&'a str> for KeyContext { @@ -39,6 +42,17 @@ impl KeyContext { context } + /// Returns the primary context entry (usually the name of the component) + pub fn primary(&self) -> Option<&ContextEntry> { + self.0.iter().find(|p| p.value.is_none()) + } + + /// Returns everything except the primary context entry. + pub fn secondary(&self) -> impl Iterator { + let primary = self.primary(); + self.0.iter().filter(move |&p| Some(p) != primary) + } + /// Parse a key context from a string. /// The key context format is very simple: /// - either a single identifier, such as `StatusBar` @@ -178,6 +192,20 @@ pub enum KeyBindingContextPredicate { ), } +impl fmt::Display for KeyBindingContextPredicate { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Identifier(name) => write!(f, "{}", name), + Self::Equal(left, right) => write!(f, "{} == {}", left, right), + Self::NotEqual(left, right) => write!(f, "{} != {}", left, right), + Self::Not(pred) => write!(f, "!{}", pred), + Self::Child(parent, child) => write!(f, "{} > {}", parent, child), + Self::And(left, right) => write!(f, "({} && {})", left, right), + Self::Or(left, right) => write!(f, "({} || {})", left, right), + } + } +} + impl KeyBindingContextPredicate { /// Parse a string in the same format as the keymap's context field. /// diff --git a/crates/gpui/src/platform/keystroke.rs b/crates/gpui/src/platform/keystroke.rs index 6e0da7dac22ae0..38000f4fb165fb 100644 --- a/crates/gpui/src/platform/keystroke.rs +++ b/crates/gpui/src/platform/keystroke.rs @@ -121,6 +121,32 @@ impl Keystroke { }) } + /// Produces a representation of this key that Parse can understand. + pub fn unparse(&self) -> String { + let mut str = String::new(); + if self.modifiers.control { + str.push_str("ctrl-"); + } + if self.modifiers.alt { + str.push_str("alt-"); + } + if self.modifiers.platform { + #[cfg(target_os = "macos")] + str.push_str("cmd-"); + + #[cfg(target_os = "linux")] + str.push_str("super-"); + + #[cfg(target_os = "windows")] + str.push_str("win-"); + } + if self.modifiers.shift { + str.push_str("shift-"); + } + str.push_str(&self.key); + str + } + /// Returns true if this keystroke left /// the ime system in an incomplete state. pub fn is_ime_in_progress(&self) -> bool { diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index 240654e57e1488..b8b698a0427a68 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -1,6 +1,7 @@ use crate::{ - black, fill, point, px, size, Bounds, Hsla, LineLayout, Pixels, Point, Result, SharedString, - StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, WrappedLineLayout, + black, fill, point, px, size, Bounds, Half, Hsla, LineLayout, Pixels, Point, Result, + SharedString, StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, + WrappedLineLayout, }; use derive_more::{Deref, DerefMut}; use smallvec::SmallVec; @@ -129,8 +130,9 @@ fn paint_line( let text_system = cx.text_system().clone(); let mut glyph_origin = origin; let mut prev_glyph_position = Point::default(); + let mut max_glyph_size = size(px(0.), px(0.)); for (run_ix, run) in layout.runs.iter().enumerate() { - let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size; + max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size; for (glyph_ix, glyph) in run.glyphs.iter().enumerate() { glyph_origin.x += glyph.position.x - prev_glyph_position.x; @@ -139,6 +141,9 @@ fn paint_line( wraps.next(); if let Some((background_origin, background_color)) = current_background.as_mut() { + if glyph_origin.x == background_origin.x { + background_origin.x -= max_glyph_size.width.half() + } cx.paint_quad(fill( Bounds { origin: *background_origin, @@ -150,6 +155,9 @@ fn paint_line( background_origin.y += line_height; } if let Some((underline_origin, underline_style)) = current_underline.as_mut() { + if glyph_origin.x == underline_origin.x { + underline_origin.x -= max_glyph_size.width.half(); + }; cx.paint_underline( *underline_origin, glyph_origin.x - underline_origin.x, @@ -161,6 +169,9 @@ fn paint_line( if let Some((strikethrough_origin, strikethrough_style)) = current_strikethrough.as_mut() { + if glyph_origin.x == strikethrough_origin.x { + strikethrough_origin.x -= max_glyph_size.width.half(); + }; cx.paint_strikethrough( *strikethrough_origin, glyph_origin.x - strikethrough_origin.x, @@ -179,7 +190,18 @@ fn paint_line( let mut finished_underline: Option<(Point, UnderlineStyle)> = None; let mut finished_strikethrough: Option<(Point, StrikethroughStyle)> = None; if glyph.index >= run_end { - if let Some(style_run) = decoration_runs.next() { + let mut style_run = decoration_runs.next(); + + // ignore style runs that apply to a partial glyph + while let Some(run) = style_run { + if glyph.index < run_end + (run.len as usize) { + break; + } + run_end += run.len as usize; + style_run = decoration_runs.next(); + } + + if let Some(style_run) = style_run { if let Some((_, background_color)) = &mut current_background { if style_run.background_color.as_ref() != Some(background_color) { finished_background = current_background.take(); @@ -239,17 +261,24 @@ fn paint_line( } } - if let Some((background_origin, background_color)) = finished_background { + if let Some((mut background_origin, background_color)) = finished_background { + let mut width = glyph_origin.x - background_origin.x; + if background_origin.x == glyph_origin.x { + background_origin.x -= max_glyph_size.width.half(); + }; cx.paint_quad(fill( Bounds { origin: background_origin, - size: size(glyph_origin.x - background_origin.x, line_height), + size: size(width, line_height), }, background_color, )); } - if let Some((underline_origin, underline_style)) = finished_underline { + if let Some((mut underline_origin, underline_style)) = finished_underline { + if underline_origin.x == glyph_origin.x { + underline_origin.x -= max_glyph_size.width.half(); + }; cx.paint_underline( underline_origin, glyph_origin.x - underline_origin.x, @@ -257,7 +286,12 @@ fn paint_line( ); } - if let Some((strikethrough_origin, strikethrough_style)) = finished_strikethrough { + if let Some((mut strikethrough_origin, strikethrough_style)) = + finished_strikethrough + { + if strikethrough_origin.x == glyph_origin.x { + strikethrough_origin.x -= max_glyph_size.width.half(); + }; cx.paint_strikethrough( strikethrough_origin, glyph_origin.x - strikethrough_origin.x, @@ -299,7 +333,10 @@ fn paint_line( last_line_end_x -= glyph.position.x; } - if let Some((background_origin, background_color)) = current_background.take() { + if let Some((mut background_origin, background_color)) = current_background.take() { + if last_line_end_x == background_origin.x { + background_origin.x -= max_glyph_size.width.half() + }; cx.paint_quad(fill( Bounds { origin: background_origin, @@ -309,7 +346,10 @@ fn paint_line( )); } - if let Some((underline_start, underline_style)) = current_underline.take() { + if let Some((mut underline_start, underline_style)) = current_underline.take() { + if last_line_end_x == underline_start.x { + underline_start.x -= max_glyph_size.width.half() + }; cx.paint_underline( underline_start, last_line_end_x - underline_start.x, @@ -317,7 +357,10 @@ fn paint_line( ); } - if let Some((strikethrough_start, strikethrough_style)) = current_strikethrough.take() { + if let Some((mut strikethrough_start, strikethrough_style)) = current_strikethrough.take() { + if last_line_end_x == strikethrough_start.x { + strikethrough_start.x -= max_glyph_size.width.half() + }; cx.paint_strikethrough( strikethrough_start, last_line_end_x - strikethrough_start.x, diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 2d896f2ee8d22b..e4bea94da04a7f 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3324,17 +3324,18 @@ impl<'a> WindowContext<'a> { return; } - self.pending_input_changed(); self.propagate_event = true; for binding in match_result.bindings { self.dispatch_action_on_node(node_id, binding.action.as_ref()); if !self.propagate_event { self.dispatch_keystroke_observers(event, Some(binding.action)); + self.pending_input_changed(); return; } } - self.finish_dispatch_key_event(event, dispatch_path) + self.finish_dispatch_key_event(event, dispatch_path); + self.pending_input_changed(); } fn finish_dispatch_key_event( @@ -3664,6 +3665,22 @@ impl<'a> WindowContext<'a> { receiver } + /// Returns the current context stack. + pub fn context_stack(&self) -> Vec { + let dispatch_tree = &self.window.rendered_frame.dispatch_tree; + let node_id = self + .window + .focus + .and_then(|focus_id| dispatch_tree.focusable_node_id(focus_id)) + .unwrap_or_else(|| dispatch_tree.root_node_id()); + + dispatch_tree + .dispatch_path(node_id) + .iter() + .filter_map(move |&node_id| dispatch_tree.node(node_id).context.clone()) + .collect() + } + /// Returns all available actions for the focused element. pub fn available_actions(&self) -> Vec> { let node_id = self @@ -3704,6 +3721,11 @@ impl<'a> WindowContext<'a> { ) } + /// Returns key bindings that invoke the given action on the currently focused element. + pub fn all_bindings_for_input(&self, input: &[Keystroke]) -> Vec { + RefCell::borrow(&self.keymap).all_bindings_for_input(input) + } + /// Returns any bindings that would invoke the given action on the given focus handle if it were focused. pub fn bindings_for_action_in( &self, diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 607a4a4abe2faf..1f6fb54d16901e 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -271,7 +271,7 @@ impl Render for ImageView { .left_0(); div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .child(checkered_background) .child( diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 62f2f370b01ded..b41ca08c2d2efa 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -4103,6 +4103,10 @@ impl<'a> BufferChunks<'a> { diagnostic_endpoints .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); *diagnostics = diagnostic_endpoints.into_iter().peekable(); + self.hint_depth = 0; + self.error_depth = 0; + self.warning_depth = 0; + self.information_depth = 0; } } } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index c1c9cfebbead5e..e52794f81f8320 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -15,6 +15,7 @@ mod outline; pub mod proto; mod syntax_map; mod task_context; +mod toolchain; #[cfg(test)] pub mod buffer_tests; @@ -28,7 +29,7 @@ use futures::Future; use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; -pub use language_registry::LanguageName; +pub use language_registry::{LanguageName, LoadedLanguage}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use parking_lot::Mutex; use regex::Regex; @@ -61,6 +62,7 @@ use syntax_map::{QueryCursorHandle, SyntaxSnapshot}; use task::RunnableTag; pub use task_context::{ContextProvider, RunnableRange}; use theme::SyntaxTheme; +pub use toolchain::{LanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister}; use tree_sitter::{self, wasmtime, Query, QueryCursor, WasmStore}; use util::serde::default_true; @@ -502,6 +504,7 @@ pub trait LspAdapter: 'static + Send + Sync { async fn workspace_configuration( self: Arc, _: &Arc, + _: Arc, _cx: &mut AsyncAppContext, ) -> Result { Ok(serde_json::json!({})) @@ -855,6 +858,7 @@ pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Option>, pub(crate) context_provider: Option>, + pub(crate) toolchain: Option>, } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] @@ -983,6 +987,7 @@ impl Language { }) }), context_provider: None, + toolchain: None, } } @@ -991,6 +996,11 @@ impl Language { self } + pub fn with_toolchain_lister(mut self, provider: Option>) -> Self { + self.toolchain = provider; + self + } + pub fn with_queries(mut self, queries: LanguageQueries) -> Result { if let Some(query) = queries.highlights { self = self @@ -1361,6 +1371,10 @@ impl Language { self.context_provider.clone() } + pub fn toolchain_lister(&self) -> Option> { + self.toolchain.clone() + } + pub fn highlight_text<'a>( self: &'a Arc, text: &'a Rope, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index c5c445ff4ed761..86352ecf5648e4 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -4,7 +4,7 @@ use crate::{ }, task_context::ContextProvider, with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, - LanguageServerName, LspAdapter, PLAIN_TEXT, + LanguageServerName, LspAdapter, ToolchainLister, PLAIN_TEXT, }; use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; @@ -75,6 +75,13 @@ impl<'a> From<&'a str> for LanguageName { } } +impl From for String { + fn from(value: LanguageName) -> Self { + let value: &str = &value.0; + Self::from(value) + } +} + pub struct LanguageRegistry { state: RwLock, language_server_download_dir: Option>, @@ -124,16 +131,7 @@ pub struct AvailableLanguage { name: LanguageName, grammar: Option>, matcher: LanguageMatcher, - load: Arc< - dyn Fn() -> Result<( - LanguageConfig, - LanguageQueries, - Option>, - )> - + 'static - + Send - + Sync, - >, + load: Arc Result + 'static + Send + Sync>, loaded: bool, } @@ -201,6 +199,13 @@ struct LspBinaryStatusSender { txs: Arc>>>, } +pub struct LoadedLanguage { + pub config: LanguageConfig, + pub queries: LanguageQueries, + pub context_provider: Option>, + pub toolchain_provider: Option>, +} + impl LanguageRegistry { pub fn new(executor: BackgroundExecutor) -> Self { let this = Self { @@ -285,7 +290,14 @@ impl LanguageRegistry { config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), Default::default(), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: Default::default(), + toolchain_provider: None, + context_provider: None, + }) + }, ) } @@ -426,14 +438,7 @@ impl LanguageRegistry { name: LanguageName, grammar_name: Option>, matcher: LanguageMatcher, - load: impl Fn() -> Result<( - LanguageConfig, - LanguageQueries, - Option>, - )> - + 'static - + Send - + Sync, + load: impl Fn() -> Result + 'static + Send + Sync, ) { let load = Arc::new(load); let state = &mut *self.state.write(); @@ -728,16 +733,18 @@ impl LanguageRegistry { self.executor .spawn(async move { let language = async { - let (config, queries, provider) = (language_load)()?; - - if let Some(grammar) = config.grammar.clone() { + let loaded_language = (language_load)()?; + if let Some(grammar) = loaded_language.config.grammar.clone() { let grammar = Some(this.get_or_load_grammar(grammar).await?); - Language::new_with_id(id, config, grammar) - .with_context_provider(provider) - .with_queries(queries) + + Language::new_with_id(id, loaded_language.config, grammar) + .with_context_provider(loaded_language.context_provider) + .with_toolchain_lister(loaded_language.toolchain_provider) + .with_queries(loaded_language.queries) } else { - Ok(Language::new_with_id(id, config, None) - .with_context_provider(provider)) + Ok(Language::new_with_id(id, loaded_language.config, None) + .with_context_provider(loaded_language.context_provider) + .with_toolchain_lister(loaded_language.toolchain_provider)) } } .await; diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs new file mode 100644 index 00000000000000..efb27008d0fe74 --- /dev/null +++ b/crates/language/src/toolchain.rs @@ -0,0 +1,65 @@ +//! Provides support for language toolchains. +//! +//! A language can have associated toolchains, +//! which is a set of tools used to interact with the projects written in said language. +//! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. + +use std::{path::PathBuf, sync::Arc}; + +use async_trait::async_trait; +use gpui::{AsyncAppContext, SharedString}; +use settings::WorktreeId; + +use crate::LanguageName; + +/// Represents a single toolchain. +#[derive(Clone, Debug, PartialEq)] +pub struct Toolchain { + /// User-facing label + pub name: SharedString, + pub path: SharedString, + pub language_name: LanguageName, +} + +#[async_trait(?Send)] +pub trait ToolchainLister: Send + Sync { + async fn list(&self, _: PathBuf) -> ToolchainList; +} + +#[async_trait(?Send)] +pub trait LanguageToolchainStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option; +} + +type DefaultIndex = usize; +#[derive(Default, Clone)] +pub struct ToolchainList { + pub toolchains: Vec, + pub default: Option, + pub groups: Box<[(usize, SharedString)]>, +} + +impl ToolchainList { + pub fn toolchains(&self) -> &[Toolchain] { + &self.toolchains + } + pub fn default_toolchain(&self) -> Option { + self.default.and_then(|ix| self.toolchains.get(ix)).cloned() + } + pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> { + if index >= self.toolchains.len() { + return None; + } + let first_equal_or_greater = self + .groups + .partition_point(|(group_lower_bound, _)| group_lower_bound <= &index); + self.groups + .get(first_equal_or_greater.checked_sub(1)?) + .cloned() + } +} diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 74a2ed0ed0d7e1..685b022340e0bd 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -38,7 +38,7 @@ menu.workspace = true ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } parking_lot.workspace = true -proto = { workspace = true, features = ["test-support"] } +proto.workspace = true project.workspace = true schemars.workspace = true serde.workspace = true @@ -62,6 +62,7 @@ env_logger.workspace = true language = { workspace = true, features = ["test-support"] } log.workspace = true project = { workspace = true, features = ["test-support"] } +proto = { workspace = true, features = ["test-support"] } rand.workspace = true text = { workspace = true, features = ["test-support"] } unindent.workspace = true diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index fe88c73b90deb6..b7e65650b55a30 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -505,10 +505,14 @@ pub fn map_to_language_model_completion_events( LanguageModelToolUse { id: tool_use.id, name: tool_use.name, - input: serde_json::Value::from_str( - &tool_use.input_json, - ) - .map_err(|err| anyhow!(err))?, + input: if tool_use.input_json.is_empty() { + serde_json::Value::Null + } else { + serde_json::Value::from_str( + &tool_use.input_json, + ) + .map_err(|err| anyhow!(err))? + }, }, )) })), diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index a29ff3cf6a7a1a..c95bed181aa6f4 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -54,6 +54,7 @@ pub struct OllamaLanguageModelProvider { pub struct State { http_client: Arc, available_models: Vec, + fetch_model_task: Option>>, _subscription: Subscription, } @@ -89,6 +90,11 @@ impl State { }) } + fn restart_fetch_models_task(&mut self, cx: &mut ModelContext) { + let task = self.fetch_models(cx); + self.fetch_model_task.replace(task); + } + fn authenticate(&mut self, cx: &mut ModelContext) -> Task> { if self.is_authenticated() { Task::ready(Ok(())) @@ -102,17 +108,29 @@ impl OllamaLanguageModelProvider { pub fn new(http_client: Arc, cx: &mut AppContext) -> Self { let this = Self { http_client: http_client.clone(), - state: cx.new_model(|cx| State { - http_client, - available_models: Default::default(), - _subscription: cx.observe_global::(|this: &mut State, cx| { - this.fetch_models(cx).detach(); - cx.notify(); - }), + state: cx.new_model(|cx| { + let subscription = cx.observe_global::({ + let mut settings = AllLanguageModelSettings::get_global(cx).ollama.clone(); + move |this: &mut State, cx| { + let new_settings = &AllLanguageModelSettings::get_global(cx).ollama; + if &settings != new_settings { + settings = new_settings.clone(); + this.restart_fetch_models_task(cx); + cx.notify(); + } + } + }); + + State { + http_client, + available_models: Default::default(), + fetch_model_task: None, + _subscription: subscription, + } }), }; this.state - .update(cx, |state, cx| state.fetch_models(cx).detach()); + .update(cx, |state, cx| state.restart_fetch_models_task(cx)); this } } diff --git a/crates/language_tools/Cargo.toml b/crates/language_tools/Cargo.toml index d85f5a6e52b873..285e128eace8b0 100644 --- a/crates/language_tools/Cargo.toml +++ b/crates/language_tools/Cargo.toml @@ -19,6 +19,7 @@ copilot.workspace = true editor.workspace = true futures.workspace = true gpui.workspace = true +itertools.workspace = true language.workspace = true lsp.workspace = true project.workspace = true @@ -28,6 +29,7 @@ theme.workspace = true tree-sitter.workspace = true ui.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/language_tools/src/key_context_view.rs b/crates/language_tools/src/key_context_view.rs new file mode 100644 index 00000000000000..19f6de2a842758 --- /dev/null +++ b/crates/language_tools/src/key_context_view.rs @@ -0,0 +1,280 @@ +use gpui::{ + actions, Action, AppContext, EventEmitter, FocusHandle, FocusableView, + KeyBindingContextPredicate, KeyContext, Keystroke, MouseButton, Render, Subscription, +}; +use itertools::Itertools; +use serde_json::json; +use ui::{ + div, h_flex, px, v_flex, ButtonCommon, Clickable, FluentBuilder, InteractiveElement, Label, + LabelCommon, LabelSize, ParentElement, SharedString, StatefulInteractiveElement, Styled, + ViewContext, VisualContext, WindowContext, +}; +use ui::{Button, ButtonStyle}; +use workspace::Item; +use workspace::Workspace; + +actions!(debug, [OpenKeyContextView]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(|workspace: &mut Workspace, _| { + workspace.register_action(|workspace, _: &OpenKeyContextView, cx| { + let key_context_view = cx.new_view(KeyContextView::new); + workspace.add_item_to_active_pane(Box::new(key_context_view), None, true, cx) + }); + }) + .detach(); +} + +struct KeyContextView { + pending_keystrokes: Option>, + last_keystrokes: Option, + last_possibilities: Vec<(SharedString, SharedString, Option)>, + context_stack: Vec, + focus_handle: FocusHandle, + _subscriptions: [Subscription; 2], +} + +impl KeyContextView { + pub fn new(cx: &mut ViewContext) -> Self { + let sub1 = cx.observe_keystrokes(|this, e, cx| { + let mut pending = this.pending_keystrokes.take().unwrap_or_default(); + pending.push(e.keystroke.clone()); + let mut possibilities = cx.all_bindings_for_input(&pending); + possibilities.reverse(); + this.context_stack = cx.context_stack(); + this.last_keystrokes = Some( + json!(pending.iter().map(|p| p.unparse()).join(" ")) + .to_string() + .into(), + ); + this.last_possibilities = possibilities + .into_iter() + .map(|binding| { + let match_state = if let Some(predicate) = binding.predicate() { + if this.matches(predicate) { + if this.action_matches(&e.action, binding.action()) { + Some(true) + } else { + Some(false) + } + } else { + None + } + } else { + if this.action_matches(&e.action, binding.action()) { + Some(true) + } else { + Some(false) + } + }; + let predicate = if let Some(predicate) = binding.predicate() { + format!("{}", predicate) + } else { + "".to_string() + }; + let mut name = binding.action().name(); + if name == "zed::NoAction" { + name = "(null)" + } + + ( + name.to_owned().into(), + json!(predicate).to_string().into(), + match_state, + ) + }) + .collect(); + }); + let sub2 = cx.observe_pending_input(|this, cx| { + this.pending_keystrokes = cx + .pending_input_keystrokes() + .map(|k| k.iter().cloned().collect()); + if this.pending_keystrokes.is_some() { + this.last_keystrokes.take(); + } + cx.notify(); + }); + + Self { + context_stack: Vec::new(), + pending_keystrokes: None, + last_keystrokes: None, + last_possibilities: Vec::new(), + focus_handle: cx.focus_handle(), + _subscriptions: [sub1, sub2], + } + } +} + +impl EventEmitter<()> for KeyContextView {} + +impl FocusableView for KeyContextView { + fn focus_handle(&self, _: &AppContext) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} +impl KeyContextView { + fn set_context_stack(&mut self, stack: Vec, cx: &mut ViewContext) { + self.context_stack = stack; + cx.notify() + } + + fn matches(&self, predicate: &KeyBindingContextPredicate) -> bool { + let mut stack = self.context_stack.clone(); + while !stack.is_empty() { + if predicate.eval(&stack) { + return true; + } + stack.pop(); + } + false + } + + fn action_matches(&self, a: &Option>, b: &dyn Action) -> bool { + if let Some(last_action) = a { + last_action.partial_eq(b) + } else { + b.name() == "zed::NoAction" + } + } +} + +impl Item for KeyContextView { + type Event = (); + + fn to_item_events(_: &Self::Event, _: impl FnMut(workspace::item::ItemEvent)) {} + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some("Keyboard Context".into()) + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + None + } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(Self::new)) + } +} + +impl Render for KeyContextView { + fn render(&mut self, cx: &mut ViewContext) -> impl ui::IntoElement { + use itertools::Itertools; + v_flex() + .id("key-context-view") + .overflow_scroll() + .size_full() + .max_h_full() + .pt_4() + .pl_4() + .track_focus(&self.focus_handle) + .key_context("KeyContextView") + .on_mouse_up_out( + MouseButton::Left, + cx.listener(|this, _, cx| { + this.last_keystrokes.take(); + this.set_context_stack(cx.context_stack(), cx); + }), + ) + .on_mouse_up_out( + MouseButton::Right, + cx.listener(|_, _, cx| { + cx.defer(|this, cx| { + this.last_keystrokes.take(); + this.set_context_stack(cx.context_stack(), cx); + }); + }), + ) + .child(Label::new("Keyboard Context").size(LabelSize::Large)) + .child(Label::new("This view lets you determine the current context stack for creating custom key bindings in Zed. When a keyboard shortcut is triggered, it also shows all the possible contexts it could have triggered in, and which one matched.")) + .child( + h_flex() + .mt_4() + .gap_4() + .child( + Button::new("default", "Open Documentation") + .style(ButtonStyle::Filled) + .on_click(|_, cx| cx.open_url("https://zed.dev/docs/key-bindings")), + ) + .child( + Button::new("default", "View default keymap") + .style(ButtonStyle::Filled) + .key_binding(ui::KeyBinding::for_action( + &zed_actions::OpenDefaultKeymap, + cx, + )) + .on_click(|_, cx| { + cx.dispatch_action(workspace::SplitRight.boxed_clone()); + cx.dispatch_action(zed_actions::OpenDefaultKeymap.boxed_clone()); + }), + ) + .child( + Button::new("default", "Edit your keymap") + .style(ButtonStyle::Filled) + .key_binding(ui::KeyBinding::for_action(&zed_actions::OpenKeymap, cx)) + .on_click(|_, cx| { + cx.dispatch_action(workspace::SplitRight.boxed_clone()); + cx.dispatch_action(zed_actions::OpenKeymap.boxed_clone()); + }), + ), + ) + .child( + Label::new("Current Context Stack") + .size(LabelSize::Large) + .mt_8(), + ) + .children({ + cx.context_stack().iter().enumerate().map(|(i, context)| { + let primary = context.primary().map(|e| e.key.clone()).unwrap_or_default(); + let secondary = context + .secondary() + .map(|e| { + if let Some(value) = e.value.as_ref() { + format!("{}={}", e.key, value) + } else { + e.key.to_string() + } + }) + .join(" "); + Label::new(format!("{} {}", primary, secondary)).ml(px(12. * (i + 1) as f32)) + }) + }) + .child(Label::new("Last Keystroke").mt_4().size(LabelSize::Large)) + .when_some(self.pending_keystrokes.as_ref(), |el, keystrokes| { + el.child( + Label::new(format!( + "Waiting for more input: {}", + keystrokes.iter().map(|k| k.unparse()).join(" ") + )) + .ml(px(12.)), + ) + }) + .when_some(self.last_keystrokes.as_ref(), |el, keystrokes| { + el.child(Label::new(format!("Typed: {}", keystrokes)).ml_4()) + .children( + self.last_possibilities + .iter() + .map(|(name, predicate, state)| { + let (text, color) = match state { + Some(true) => ("(match)", ui::Color::Success), + Some(false) => ("(low precedence)", ui::Color::Hint), + None => ("(no match)", ui::Color::Error), + }; + h_flex() + .gap_2() + .ml_8() + .child(div().min_w(px(200.)).child(Label::new(name.clone()))) + .child(Label::new(predicate.clone())) + .child(Label::new(text).color(color)) + }), + ) + }) + } +} diff --git a/crates/language_tools/src/language_tools.rs b/crates/language_tools/src/language_tools.rs index 0a1f31f03fe82e..b7a4694cd42ebb 100644 --- a/crates/language_tools/src/language_tools.rs +++ b/crates/language_tools/src/language_tools.rs @@ -1,3 +1,4 @@ +mod key_context_view; mod lsp_log; mod syntax_tree_view; @@ -12,4 +13,5 @@ pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView}; pub fn init(cx: &mut AppContext) { lsp_log::init(cx); syntax_tree_view::init(cx); + key_context_view::init(cx); } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index a5f77ec55f1064..e57d5dbc4a2ca6 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -1237,21 +1237,29 @@ impl Render for LspLogToolbarItemView { view.show_rpc_trace_for_server(row.server_id, cx); }), ); - menu = menu.entry( - SERVER_CAPABILITIES, - None, - cx.handler_for(&log_view, move |view, cx| { - view.show_capabilities_for_server(row.server_id, cx); - }), - ); if server_selected && row.selected_entry == LogKind::Rpc { let selected_ix = menu.select_last(); + // Each language server has: + // 1. A title. + // 2. Server logs. + // 3. Server trace. + // 4. RPC messages. + // 5. Server capabilities + // Thus, if nth server's RPC is selected, the index of selected entry should match this formula + let _expected_index = ix * 5 + 3; debug_assert_eq!( - Some(ix * 4 + 3), + Some(_expected_index), selected_ix, "Could not scroll to a just added LSP menu item" ); } + menu = menu.entry( + SERVER_CAPABILITIES, + None, + cx.handler_for(&log_view, move |view, cx| { + view.show_capabilities_for_server(row.server_id, cx); + }), + ); } menu }) diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index e2c4903e191cef..b9c960c9c3dfa4 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -128,12 +128,14 @@ impl SyntaxTreeView { fn editor_updated(&mut self, did_reparse: bool, cx: &mut ViewContext) -> Option<()> { // Find which excerpt the cursor is in, and the position within that excerpted buffer. let editor_state = self.editor.as_mut()?; - let editor = &editor_state.editor.read(cx); - let selection_range = editor.selections.last::(cx).range(); - let multibuffer = editor.buffer().read(cx); - let (buffer, range, excerpt_id) = multibuffer - .range_to_buffer_ranges(selection_range, cx) - .pop()?; + let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { + let selection_range = editor.selections.last::(cx).range(); + editor + .buffer() + .read(cx) + .range_to_buffer_ranges(selection_range, cx) + .pop() + })?; // If the cursor has moved into a different excerpt, retrieve a new syntax layer // from that buffer. diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index d6746575f39b7e..29c52ba301694e 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -47,6 +47,11 @@ log.workspace = true lsp.workspace = true node_runtime.workspace = true paths.workspace = true +pet.workspace = true +pet-core.workspace = true +pet-conda.workspace = true +pet-poetry.workspace = true +pet-reporter.workspace = true project.workspace = true regex.workspace = true rope.workspace = true diff --git a/crates/languages/src/cpp/config.toml b/crates/languages/src/cpp/config.toml index f9e7a26818b24e..e78bc8ea6c73a7 100644 --- a/crates/languages/src/cpp/config.toml +++ b/crates/languages/src/cpp/config.toml @@ -1,7 +1,7 @@ name = "C++" grammar = "cpp" path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "ipp", "inl", "cu", "cuh"] -line_comments = ["// "] +line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 0ebe18d0a8d25d..346c58fe251717 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,7 +7,9 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{ + LanguageRegistry, LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate, +}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -205,6 +207,7 @@ impl LspAdapter for JsonLspAdapter { async fn workspace_configuration( self: Arc, _: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 7e8c09c8ad068a..455b05b64c2043 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -3,7 +3,7 @@ use gpui::{AppContext, UpdateGlobal}; use json::json_task_context; pub use language::*; use node_runtime::NodeRuntime; -use python::PythonContextProvider; +use python::{PythonContextProvider, PythonToolchainProvider}; use rust_embed::RustEmbed; use settings::SettingsStore; use smol::stream::StreamExt; @@ -61,7 +61,14 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), load_queries($name), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: None, + toolchain_provider: None, + }) + }, ); }; ($name:literal, $adapters:expr) => { @@ -75,7 +82,14 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), load_queries($name), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: None, + toolchain_provider: None, + }) + }, ); }; ($name:literal, $adapters:expr, $context_provider:expr) => { @@ -90,11 +104,33 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.grammar.clone(), config.matcher.clone(), move || { - Ok(( - config.clone(), - load_queries($name), - Some(Arc::new($context_provider)), - )) + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: Some(Arc::new($context_provider)), + toolchain_provider: None, + }) + }, + ); + }; + ($name:literal, $adapters:expr, $context_provider:expr, $toolchain_provider:expr) => { + let config = load_config($name); + // typeck helper + let adapters: Vec> = $adapters; + for adapter in adapters { + languages.register_lsp_adapter(config.name.clone(), adapter); + } + languages.register_language( + config.name.clone(), + config.grammar.clone(), + config.matcher.clone(), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: Some(Arc::new($context_provider)), + toolchain_provider: Some($toolchain_provider), + }) }, ); }; @@ -141,7 +177,8 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu vec![Arc::new(python::PythonLspAdapter::new( node_runtime.clone(), ))], - PythonContextProvider + PythonContextProvider, + Arc::new(PythonToolchainProvider::default()) as Arc ); language!( "rust", @@ -288,6 +325,15 @@ fn load_config(name: &str) -> LanguageConfig { .with_context(|| format!("failed to load config.toml for language {name:?}")) .unwrap(); + #[cfg(not(any(feature = "load-grammars", test)))] + { + config = LanguageConfig { + name: config.name, + matcher: config.matcher, + ..Default::default() + } + } + config } diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 4b5fe3d277cd39..e73e3c86829aca 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -3,9 +3,16 @@ use async_trait::async_trait; use collections::HashMap; use gpui::AppContext; use gpui::AsyncAppContext; +use language::LanguageName; +use language::LanguageToolchainStore; +use language::Toolchain; +use language::ToolchainList; +use language::ToolchainLister; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; +use pet_core::python_environment::PythonEnvironmentKind; +use pet_core::Configuration; use project::lsp_store::language_server_settings; use serde_json::Value; @@ -200,12 +207,35 @@ impl LspAdapter for PythonLspAdapter { async fn workspace_configuration( self: Arc, adapter: &Arc, + toolchains: Arc, cx: &mut AsyncAppContext, ) -> Result { - cx.update(|cx| { - language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) - .and_then(|s| s.settings.clone()) - .unwrap_or_default() + let toolchain = toolchains + .active_toolchain(adapter.worktree_id(), LanguageName::new("Python"), cx) + .await; + cx.update(move |cx| { + let mut user_settings = + language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) + .and_then(|s| s.settings.clone()) + .unwrap_or_default(); + + // If python.pythonPath is not set in user config, do so using our toolchain picker. + if let Some(toolchain) = toolchain { + if user_settings.is_null() { + user_settings = Value::Object(serde_json::Map::default()); + } + let object = user_settings.as_object_mut().unwrap(); + if let Some(python) = object + .entry("python") + .or_insert(Value::Object(serde_json::Map::default())) + .as_object_mut() + { + python + .entry("pythonPath") + .or_insert(Value::String(toolchain.path.into())); + } + } + user_settings }) } } @@ -320,6 +350,83 @@ fn python_module_name_from_relative_path(relative_path: &str) -> String { .to_string() } +#[derive(Default)] +pub(crate) struct PythonToolchainProvider {} + +static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[ + // Prioritize non-Conda environments. + PythonEnvironmentKind::Poetry, + PythonEnvironmentKind::Pipenv, + PythonEnvironmentKind::VirtualEnvWrapper, + PythonEnvironmentKind::Venv, + PythonEnvironmentKind::VirtualEnv, + PythonEnvironmentKind::Conda, + PythonEnvironmentKind::Pyenv, + PythonEnvironmentKind::GlobalPaths, + PythonEnvironmentKind::Homebrew, +]; + +fn env_priority(kind: Option) -> usize { + if let Some(kind) = kind { + ENV_PRIORITY_LIST + .iter() + .position(|blessed_env| blessed_env == &kind) + .unwrap_or(ENV_PRIORITY_LIST.len()) + } else { + // Unknown toolchains are less useful than non-blessed ones. + ENV_PRIORITY_LIST.len() + 1 + } +} + +#[async_trait(?Send)] +impl ToolchainLister for PythonToolchainProvider { + async fn list(&self, worktree_root: PathBuf) -> ToolchainList { + let environment = pet_core::os_environment::EnvironmentApi::new(); + let locators = pet::locators::create_locators( + Arc::new(pet_conda::Conda::from(&environment)), + Arc::new(pet_poetry::Poetry::from(&environment)), + &environment, + ); + let mut config = Configuration::default(); + config.workspace_directories = Some(vec![worktree_root]); + let reporter = pet_reporter::collect::create_reporter(); + pet::find::find_and_report_envs(&reporter, config, &locators, &environment, None); + + let mut toolchains = reporter + .environments + .lock() + .ok() + .map_or(Vec::new(), |mut guard| std::mem::take(&mut guard)); + toolchains.sort_by(|lhs, rhs| { + env_priority(lhs.kind) + .cmp(&env_priority(rhs.kind)) + .then_with(|| lhs.executable.cmp(&rhs.executable)) + }); + let mut toolchains: Vec<_> = toolchains + .into_iter() + .filter_map(|toolchain| { + let name = if let Some(version) = &toolchain.version { + format!("Python {version} ({:?})", toolchain.kind?) + } else { + format!("{:?}", toolchain.kind?) + } + .into(); + Some(Toolchain { + name, + path: toolchain.executable?.to_str()?.to_owned().into(), + language_name: LanguageName::new("Python"), + }) + }) + .collect(); + toolchains.dedup(); + ToolchainList { + toolchains, + default: None, + groups: Default::default(), + } + } +} + #[cfg(test)] mod tests { use gpui::{BorrowAppContext, Context, ModelContext, TestAppContext}; diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index 5b646427712f98..e5f1b4d423b153 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,5 +1,14 @@ (attribute attribute: (identifier) @property) (type (identifier) @type) +(generic_type (identifier) @type) + +; Type alias +(type_alias_statement "type" @keyword) + +; TypeVar with constraints in type parameters +(type + (tuple (identifier) @type) +) ; Function calls diff --git a/crates/languages/src/rust/config.toml b/crates/languages/src/rust/config.toml index 81b9c1e2d94d7f..96207904f5bc95 100644 --- a/crates/languages/src/rust/config.toml +++ b/crates/languages/src/rust/config.toml @@ -5,9 +5,9 @@ line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, - { start = "r#\"", end = "\"#", close = true, newline = true }, - { start = "r##\"", end = "\"##", close = true, newline = true }, - { start = "r###\"", end = "\"###", close = true, newline = true }, + { start = "r#\"", end = "\"#", close = true, newline = true, not_in = ["string", "comment"] }, + { start = "r##\"", end = "\"##", close = true, newline = true, not_in = ["string", "comment"] }, + { start = "r###\"", end = "\"###", close = true, newline = true, not_in = ["string", "comment"] }, { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 4ed5c742a9fc8d..6d4416c7d95cc2 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; use gpui::AsyncAppContext; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -111,6 +111,7 @@ impl LspAdapter for TailwindLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index cfd7e04bc64177..345a5f0694447d 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -5,7 +5,7 @@ use async_trait::async_trait; use collections::HashMap; use gpui::AsyncAppContext; use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion}; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -230,6 +230,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { @@ -325,6 +326,7 @@ impl LspAdapter for EsLintLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let workspace_root = delegate.worktree_root_path(); diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index ff8637dc28dbd8..ae65488a385d23 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -2,7 +2,7 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use collections::HashMap; use gpui::AsyncAppContext; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -183,6 +183,7 @@ impl LspAdapter for VtslsLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let tsdk_path = Self::tsdk_path(delegate).await; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 9f1c468b876b4a..d8f927b770ce2c 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -3,7 +3,8 @@ use async_trait::async_trait; use futures::StreamExt; use gpui::AsyncAppContext; use language::{ - language_settings::AllLanguageSettings, LanguageServerName, LspAdapter, LspAdapterDelegate, + language_settings::AllLanguageSettings, LanguageServerName, LanguageToolchainStore, LspAdapter, + LspAdapterDelegate, }; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; @@ -92,6 +93,7 @@ impl LspAdapter for YamlLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let location = SettingsLocation { diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 10e910036b1a5c..d514b89e52c948 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -234,6 +234,10 @@ impl<'a> MarkdownParser<'a> { text.push('\n'); } + // We want to ignore any inline HTML tags in the text but keep + // the text between them + Event::InlineHtml(_) => {} + Event::Text(t) => { text.push_str(t.as_ref()); @@ -626,6 +630,8 @@ impl<'a> MarkdownParser<'a> { // Otherwise we need to insert the block after all the nested items // that have been parsed so far items.extend(block); + } else { + self.cursor += 1; } } } @@ -847,6 +853,16 @@ mod tests { ); } + #[gpui::test] + async fn test_text_with_inline_html() { + let parsed = parse("This is a paragraph with an inline HTML tag.").await; + + assert_eq!( + parsed.children, + vec![p("This is a paragraph with an inline HTML tag.", 0..63),], + ); + } + #[gpui::test] async fn test_raw_links_detection() { let parsed = parse("Checkout this https://zed.dev link").await; @@ -1090,6 +1106,26 @@ Some other content ); } + #[gpui::test] + async fn test_list_item_with_inline_html() { + let parsed = parse( + "\ +* This is a list item with an inline HTML tag. +", + ) + .await; + + assert_eq!( + parsed.children, + vec![list_item( + 0..67, + 1, + Unordered, + vec![p("This is a list item with an inline HTML tag.", 4..44),], + ),], + ); + } + #[gpui::test] async fn test_nested_list_with_paragraph_inside() { let parsed = parse( diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 1aa60e2a3b71b9..7e8cc42dcf7333 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -301,8 +301,8 @@ impl MarkdownPreviewView { this.parse_markdown_from_active_editor(true, cx); } EditorEvent::SelectionsChanged { .. } => { - let editor = editor.read(cx); - let selection_range = editor.selections.last::(cx).range(); + let selection_range = + editor.update(cx, |editor, cx| editor.selections.last::(cx).range()); this.selected_block = this.get_block_index_under_cursor(selection_range); this.list_state.scroll_to_reveal_item(this.selected_block); cx.notify(); @@ -479,7 +479,7 @@ impl Render for MarkdownPreviewView { v_flex() .id("MarkdownPreview") .key_context("MarkdownPreview") - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .bg(cx.theme().colors().editor_background) .p_4() diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 1d82d06ad85705..18965fe0488ec9 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -194,9 +194,11 @@ impl PickerDelegate for OutlineViewDelegate { }) .collect(); - let editor = self.active_editor.read(cx); - let cursor_offset = editor.selections.newest::(cx).head(); - let buffer = editor.buffer().read(cx).snapshot(cx); + let (buffer, cursor_offset) = self.active_editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx).snapshot(cx); + let cursor_offset = editor.selections.newest::(cx).head(); + (buffer, cursor_offset) + }); selected_index = self .outline .items diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index 824ea70735d9ca..be7653db685e96 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -30,8 +30,10 @@ search.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +smallvec.workspace = true smol.workspace = true theme.workspace = true +ui.workspace = true util.workspace = true worktree.workspace = true workspace.workspace = true diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 25dd5cba8dcbfa..6ffac21021b06e 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -5,7 +5,7 @@ use std::{ cmp, hash::Hash, ops::Range, - path::{Path, PathBuf}, + path::{Path, PathBuf, MAIN_SEPARATOR_STR}, sync::{atomic::AtomicBool, Arc, OnceLock}, time::Duration, u32, @@ -17,31 +17,33 @@ use db::kvp::KEY_VALUE_STORE; use editor::{ display_map::ToDisplayPoint, items::{entry_git_aware_label_color, entry_label_color}, - scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor}, - AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, EditorMode, ExcerptId, ExcerptRange, - MultiBufferSnapshot, RangeToAnchorExt, + scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor, ScrollbarAutoHide}, + AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, EditorMode, EditorSettings, ExcerptId, + ExcerptRange, MultiBufferSnapshot, RangeToAnchorExt, ShowScrollbar, }; use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, - AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, - EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, - KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, - SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, - VisualContext, WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, + AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, + Div, ElementId, EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, + IntoElement, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, Model, MouseButton, + MouseDownEvent, ParentElement, Pixels, Point, Render, SharedString, Stateful, + StatefulInteractiveElement as _, Styled, Subscription, Task, UniformListScrollHandle, View, + ViewContext, VisualContext, WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrev}; -use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings}; +use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings, ShowIndentGuides}; use project::{File, Fs, Item, Project}; use search::{BufferSearchBar, ProjectSearchView}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; use smol::channel; use theme::{SyntaxTheme, ThemeSettings}; +use ui::{IndentGuideColors, IndentGuideLayout}; use util::{debug_panic, RangeExt, ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, @@ -50,7 +52,8 @@ use workspace::{ ui::{ h_flex, v_flex, ActiveTheme, ButtonCommon, Clickable, Color, ContextMenu, FluentBuilder, HighlightedLabel, Icon, IconButton, IconButtonShape, IconName, IconSize, Label, - LabelCommon, ListItem, Selectable, Spacing, StyledExt, StyledTypography, Tooltip, + LabelCommon, ListItem, Scrollbar, ScrollbarState, Selectable, Spacing, StyledExt, + StyledTypography, Tooltip, }, OpenInTerminal, WeakItemHandle, Workspace, }; @@ -115,6 +118,11 @@ pub struct OutlinePanel { cached_entries: Vec, filter_editor: View, mode: ItemsDisplayMode, + show_scrollbar: bool, + vertical_scrollbar_state: ScrollbarState, + horizontal_scrollbar_state: ScrollbarState, + hide_scrollbar_task: Option>, + max_width_item_index: Option, } enum ItemsDisplayMode { @@ -254,14 +262,14 @@ impl SearchState { #[derive(Debug)] enum SelectedEntry { Invalidated(Option), - Valid(PanelEntry), + Valid(PanelEntry, usize), None, } impl SelectedEntry { fn invalidate(&mut self) { match std::mem::replace(self, SelectedEntry::None) { - Self::Valid(entry) => *self = Self::Invalidated(Some(entry)), + Self::Valid(entry, _) => *self = Self::Invalidated(Some(entry)), Self::None => *self = Self::Invalidated(None), other => *self = other, } @@ -623,6 +631,9 @@ impl OutlinePanel { let focus_handle = cx.focus_handle(); let focus_subscription = cx.on_focus(&focus_handle, Self::focus_in); + let focus_out_subscription = cx.on_focus_out(&focus_handle, |outline_panel, _, cx| { + outline_panel.hide_scrollbar(cx); + }); let workspace_subscription = cx.subscribe( &workspace .weak_handle() @@ -673,6 +684,8 @@ impl OutlinePanel { } }); + let scroll_handle = UniformListScrollHandle::new(); + let mut outline_panel = Self { mode: ItemsDisplayMode::Outline, active: false, @@ -680,7 +693,14 @@ impl OutlinePanel { workspace: workspace_handle, project, fs: workspace.app_state().fs.clone(), - scroll_handle: UniformListScrollHandle::new(), + show_scrollbar: !Self::should_autohide_scrollbar(cx), + hide_scrollbar_task: None, + vertical_scrollbar_state: ScrollbarState::new(scroll_handle.clone()) + .parent_view(cx.view()), + horizontal_scrollbar_state: ScrollbarState::new(scroll_handle.clone()) + .parent_view(cx.view()), + max_width_item_index: None, + scroll_handle, focus_handle, filter_editor, fs_entries: Vec::new(), @@ -704,6 +724,7 @@ impl OutlinePanel { settings_subscription, icons_subscription, focus_subscription, + focus_out_subscription, workspace_subscription, filter_update_subscription, ], @@ -1605,16 +1626,11 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; - let excerpt_range = range.context.to_point(&buffer_snapshot); - let label_element = Label::new(format!( - "Lines {}- {}", - excerpt_range.start.row + 1, - excerpt_range.end.row + 1, - )) - .single_line() - .color(color) - .into_any_element(); + let label = self.excerpt_label(buffer_id, range, cx)?; + let label_element = Label::new(label) + .single_line() + .color(color) + .into_any_element(); Some(self.entry_element( PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, range.clone())), @@ -1627,6 +1643,21 @@ impl OutlinePanel { )) } + fn excerpt_label( + &self, + buffer_id: BufferId, + range: &ExcerptRange, + cx: &AppContext, + ) -> Option { + let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; + let excerpt_range = range.context.to_point(&buffer_snapshot); + Some(format!( + "Lines {}- {}", + excerpt_range.start.row + 1, + excerpt_range.end.row + 1, + )) + } + fn render_outline( &self, buffer_id: BufferId, @@ -2409,11 +2440,9 @@ impl OutlinePanel { editor: &View, cx: &mut ViewContext, ) -> Option { - let selection = editor - .read(cx) - .selections - .newest::(cx) - .head(); + let selection = editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).head() + }); let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(cx)); let multi_buffer = editor.read(cx).buffer(); let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); @@ -2794,10 +2823,11 @@ impl OutlinePanel { else { return; }; - let new_cached_entries = new_cached_entries.await; + let (new_cached_entries, max_width_item_index) = new_cached_entries.await; outline_panel .update(&mut cx, |outline_panel, cx| { outline_panel.cached_entries = new_cached_entries; + outline_panel.max_width_item_index = max_width_item_index; if outline_panel.selected_entry.is_invalidated() { if let Some(new_selected_entry) = outline_panel.active_editor().and_then(|active_editor| { @@ -2820,12 +2850,10 @@ impl OutlinePanel { is_singleton: bool, query: Option, cx: &mut ViewContext<'_, Self>, - ) -> Task> { + ) -> Task<(Vec, Option)> { let project = self.project.clone(); cx.spawn(|outline_panel, mut cx| async move { - let mut entries = Vec::new(); - let mut match_candidates = Vec::new(); - let mut added_contexts = HashSet::default(); + let mut generation_state = GenerationState::default(); let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; @@ -2945,9 +2973,7 @@ impl OutlinePanel { folded_dirs, ); outline_panel.push_entry( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, track_matches, new_folded_dirs, folded_depth, @@ -2984,9 +3010,7 @@ impl OutlinePanel { .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3010,9 +3034,7 @@ impl OutlinePanel { .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3047,9 +3069,7 @@ impl OutlinePanel { && (should_add || (query.is_some() && folded_dirs_entry.is_none())) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, track_matches, PanelEntry::Fs(entry.clone()), depth, @@ -3061,9 +3081,7 @@ impl OutlinePanel { ItemsDisplayMode::Search(_) => { if is_singleton || query.is_some() || (should_add && is_expanded) { outline_panel.add_search_entries( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, entry.clone(), depth, query.clone(), @@ -3089,15 +3107,13 @@ impl OutlinePanel { }; if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider { outline_panel.add_excerpt_entries( + &mut generation_state, buffer_id, entry_excerpts, depth, track_matches, is_singleton, query.as_deref(), - &mut entries, - &mut match_candidates, - &mut added_contexts, cx, ); } @@ -3106,14 +3122,12 @@ impl OutlinePanel { if is_singleton && matches!(entry, FsEntry::File(..) | FsEntry::ExternalFile(..)) - && !entries.iter().any(|item| { + && !generation_state.entries.iter().any(|item| { matches!(item.entry, PanelEntry::Outline(..) | PanelEntry::Search(_)) }) { outline_panel.push_entry( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, track_matches, PanelEntry::Fs(entry.clone()), 0, @@ -3130,9 +3144,7 @@ impl OutlinePanel { .map_or(true, |parent| parent.expanded); if parent_expanded || query.is_some() { outline_panel.push_entry( - &mut entries, - &mut match_candidates, - &mut added_contexts, + &mut generation_state, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3141,27 +3153,20 @@ impl OutlinePanel { } } }) else { - return Vec::new(); + return (Vec::new(), None); }; - outline_panel - .update(&mut cx, |outline_panel, _| { - if matches!(outline_panel.mode, ItemsDisplayMode::Search(_)) { - cleanup_fs_entries_without_search_children( - &outline_panel.collapsed_entries, - &mut entries, - &mut match_candidates, - &mut added_contexts, - ); - } - }) - .ok(); - let Some(query) = query else { - return entries; + return ( + generation_state.entries, + generation_state + .max_width_estimate_and_index + .map(|(_, index)| index), + ); }; + let mut matched_ids = match_strings( - &match_candidates, + &generation_state.match_candidates, &query, true, usize::MAX, @@ -3174,7 +3179,7 @@ impl OutlinePanel { .collect::>(); let mut id = 0; - entries.retain_mut(|cached_entry| { + generation_state.entries.retain_mut(|cached_entry| { let retain = match matched_ids.remove(&id) { Some(string_match) => { cached_entry.string_match = Some(string_match); @@ -3186,16 +3191,19 @@ impl OutlinePanel { retain }); - entries + ( + generation_state.entries, + generation_state + .max_width_estimate_and_index + .map(|(_, index)| index), + ) }) } #[allow(clippy::too_many_arguments)] fn push_entry( &self, - entries: &mut Vec, - match_candidates: &mut Vec, - added_contexts: &mut HashSet, + state: &mut GenerationState, track_matches: bool, entry: PanelEntry, depth: usize, @@ -3215,57 +3223,58 @@ impl OutlinePanel { }; if track_matches { - let id = entries.len(); + let id = state.entries.len(); match &entry { PanelEntry::Fs(fs_entry) => { if let Some(file_name) = self.relative_path(fs_entry, cx).as_deref().map(file_name) { - if added_contexts.insert(file_name.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: file_name.to_string(), - char_bag: file_name.chars().collect(), - }); - } + state.match_candidates.push(StringMatchCandidate { + id, + string: file_name.to_string(), + char_bag: file_name.chars().collect(), + }); } } PanelEntry::FoldedDirs(worktree_id, entries) => { let dir_names = self.dir_names_string(entries, *worktree_id, cx); { - if added_contexts.insert(dir_names.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: dir_names.clone(), - char_bag: dir_names.chars().collect(), - }); - } + state.match_candidates.push(StringMatchCandidate { + id, + string: dir_names.clone(), + char_bag: dir_names.chars().collect(), + }); } } PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Outline(_, _, outline) => { - if added_contexts.insert(outline.text.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: outline.text.clone(), - char_bag: outline.text.chars().collect(), - }); - } + state.match_candidates.push(StringMatchCandidate { + id, + string: outline.text.clone(), + char_bag: outline.text.chars().collect(), + }); } OutlineEntry::Excerpt(..) => {} }, PanelEntry::Search(new_search_entry) => { - if added_contexts.insert(new_search_entry.render_data.context_text.clone()) { - match_candidates.push(StringMatchCandidate { - id, - char_bag: new_search_entry.render_data.context_text.chars().collect(), - string: new_search_entry.render_data.context_text.clone(), - }); - } + state.match_candidates.push(StringMatchCandidate { + id, + char_bag: new_search_entry.render_data.context_text.chars().collect(), + string: new_search_entry.render_data.context_text.clone(), + }); } } } - entries.push(CachedEntry { + + let width_estimate = self.width_estimate(depth, &entry, cx); + if Some(width_estimate) + > state + .max_width_estimate_and_index + .map(|(estimate, _)| estimate) + { + state.max_width_estimate_and_index = Some((width_estimate, state.entries.len())); + } + state.entries.push(CachedEntry { depth, entry, string_match: None, @@ -3400,15 +3409,13 @@ impl OutlinePanel { #[allow(clippy::too_many_arguments)] fn add_excerpt_entries( &self, + state: &mut GenerationState, buffer_id: BufferId, entries_to_add: &[ExcerptId], parent_depth: usize, track_matches: bool, is_singleton: bool, query: Option<&str>, - entries: &mut Vec, - match_candidates: &mut Vec, - added_contexts: &mut HashSet, cx: &mut ViewContext, ) { if let Some(excerpts) = self.excerpts.get(&buffer_id) { @@ -3418,9 +3425,7 @@ impl OutlinePanel { }; let excerpt_depth = parent_depth + 1; self.push_entry( - entries, - match_candidates, - added_contexts, + state, track_matches, PanelEntry::Outline(OutlineEntry::Excerpt( buffer_id, @@ -3434,8 +3439,7 @@ impl OutlinePanel { let mut outline_base_depth = excerpt_depth + 1; if is_singleton { outline_base_depth = 0; - entries.clear(); - match_candidates.clear(); + state.clear(); } else if query.is_none() && self .collapsed_entries @@ -3446,9 +3450,7 @@ impl OutlinePanel { for outline in excerpt.iter_outlines() { self.push_entry( - entries, - match_candidates, - added_contexts, + state, track_matches, PanelEntry::Outline(OutlineEntry::Outline( buffer_id, @@ -3466,9 +3468,7 @@ impl OutlinePanel { #[allow(clippy::too_many_arguments)] fn add_search_entries( &mut self, - entries: &mut Vec, - match_candidates: &mut Vec, - added_contexts: &mut HashSet, + state: &mut GenerationState, parent_entry: FsEntry, parent_depth: usize, filter_query: Option, @@ -3499,7 +3499,8 @@ impl OutlinePanel { || related_excerpts.contains(&match_range.end.excerpt_id) }); - let previous_search_matches = entries + let previous_search_matches = state + .entries .iter() .skip_while(|entry| { if let PanelEntry::Fs(entry) = &entry.entry { @@ -3554,9 +3555,7 @@ impl OutlinePanel { .collect::>(); for new_search_entry in new_search_entries { self.push_entry( - entries, - match_candidates, - added_contexts, + state, filter_query.is_some(), PanelEntry::Search(new_search_entry), depth, @@ -3603,7 +3602,7 @@ impl OutlinePanel { fn selected_entry(&self) -> Option<&PanelEntry> { match &self.selected_entry { SelectedEntry::Invalidated(entry) => entry.as_ref(), - SelectedEntry::Valid(entry) => Some(entry), + SelectedEntry::Valid(entry, _) => Some(entry), SelectedEntry::None => None, } } @@ -3612,135 +3611,443 @@ impl OutlinePanel { if focus { self.focus_handle.focus(cx); } - self.selected_entry = SelectedEntry::Valid(entry); + let ix = self + .cached_entries + .iter() + .enumerate() + .find(|(_, cached_entry)| &cached_entry.entry == &entry) + .map(|(i, _)| i) + .unwrap_or_default(); + + self.selected_entry = SelectedEntry::Valid(entry, ix); + self.autoscroll(cx); cx.notify(); } -} -fn cleanup_fs_entries_without_search_children( - collapsed_entries: &HashSet, - entries: &mut Vec, - string_match_candidates: &mut Vec, - added_contexts: &mut HashSet, -) { - let mut match_ids_to_remove = BTreeSet::new(); - let mut previous_entry = None::<&PanelEntry>; - for (id, entry) in entries.iter().enumerate().rev() { - let has_search_items = match (previous_entry, &entry.entry) { - (Some(PanelEntry::Outline(_)), _) => unreachable!(), - (_, PanelEntry::Outline(_)) => false, - (_, PanelEntry::Search(_)) => true, - (None, PanelEntry::FoldedDirs(_, _) | PanelEntry::Fs(_)) => false, - ( - Some(PanelEntry::Search(_)), - PanelEntry::FoldedDirs(_, _) | PanelEntry::Fs(FsEntry::Directory(..)), - ) => false, - (Some(PanelEntry::FoldedDirs(..)), PanelEntry::FoldedDirs(..)) => true, - ( - Some(PanelEntry::Search(_)), - PanelEntry::Fs(FsEntry::File(..) | FsEntry::ExternalFile(..)), - ) => true, - ( - Some(PanelEntry::Fs(previous_fs)), - PanelEntry::FoldedDirs(folded_worktree, folded_dirs), - ) => { - let expected_parent = folded_dirs.last().map(|dir_entry| dir_entry.path.as_ref()); - match previous_fs { - FsEntry::ExternalFile(..) => false, - FsEntry::File(file_worktree, file_entry, ..) => { - file_worktree == folded_worktree - && file_entry.path.parent() == expected_parent - } - FsEntry::Directory(directory_wortree, directory_entry) => { - directory_wortree == folded_worktree - && directory_entry.path.parent() == expected_parent - } - } - } - ( - Some(PanelEntry::FoldedDirs(folded_worktree, folded_dirs)), - PanelEntry::Fs(fs_entry), - ) => match fs_entry { - FsEntry::File(..) | FsEntry::ExternalFile(..) => false, - FsEntry::Directory(directory_wortree, maybe_parent_directory) => { - directory_wortree == folded_worktree - && Some(maybe_parent_directory.path.as_ref()) - == folded_dirs - .first() - .and_then(|dir_entry| dir_entry.path.parent()) - } - }, - (Some(PanelEntry::Fs(previous_entry)), PanelEntry::Fs(maybe_parent_entry)) => { - match (previous_entry, maybe_parent_entry) { - (FsEntry::ExternalFile(..), _) | (_, FsEntry::ExternalFile(..)) => false, - (FsEntry::Directory(..) | FsEntry::File(..), FsEntry::File(..)) => false, - ( - FsEntry::Directory(previous_worktree, previous_directory), - FsEntry::Directory(new_worktree, maybe_parent_directory), - ) => { - previous_worktree == new_worktree - && previous_directory.path.parent() - == Some(maybe_parent_directory.path.as_ref()) - } - ( - FsEntry::File(previous_worktree, previous_file, ..), - FsEntry::Directory(new_worktree, maybe_parent_directory), - ) => { - previous_worktree == new_worktree - && previous_file.path.parent() - == Some(maybe_parent_directory.path.as_ref()) - } - } + fn render_vertical_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) + || !(self.show_scrollbar || self.vertical_scrollbar_state.is_dragging()) + { + return None; + } + Some( + div() + .occlude() + .id("project-panel-vertical-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|outline_panel, _, cx| { + if !outline_panel.vertical_scrollbar_state.is_dragging() + && !outline_panel.focus_handle.contains_focused(cx) + { + outline_panel.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .h_full() + .absolute() + .right_1() + .top_1() + .bottom_0() + .w(px(12.)) + .cursor_default() + .children(Scrollbar::vertical(self.vertical_scrollbar_state.clone())), + ) + } + + fn render_horizontal_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) + || !(self.show_scrollbar || self.horizontal_scrollbar_state.is_dragging()) + { + return None; + } + + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + + Some( + div() + .occlude() + .id("project-panel-horizontal-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|outline_panel, _, cx| { + if !outline_panel.horizontal_scrollbar_state.is_dragging() + && !outline_panel.focus_handle.contains_focused(cx) + { + outline_panel.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .w_full() + .absolute() + .right_1() + .left_1() + .bottom_0() + .h(px(12.)) + .cursor_default() + .when(self.width.is_some(), |this| { + this.children(Scrollbar::horizontal( + self.horizontal_scrollbar_state.clone(), + )) + }), + ) + } + + fn should_show_scrollbar(cx: &AppContext) -> bool { + let show = OutlinePanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => true, + ShowScrollbar::Always => true, + ShowScrollbar::Never => false, + } + } + + fn should_autohide_scrollbar(cx: &AppContext) -> bool { + let show = OutlinePanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => cx + .try_global::() + .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0), + ShowScrollbar::Always => false, + ShowScrollbar::Never => true, + } + } + + fn hide_scrollbar(&mut self, cx: &mut ViewContext) { + const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1); + if !Self::should_autohide_scrollbar(cx) { + return; + } + self.hide_scrollbar_task = Some(cx.spawn(|panel, mut cx| async move { + cx.background_executor() + .timer(SCROLLBAR_SHOW_INTERVAL) + .await; + panel + .update(&mut cx, |panel, cx| { + panel.show_scrollbar = false; + cx.notify(); + }) + .log_err(); + })) + } + + fn width_estimate(&self, depth: usize, entry: &PanelEntry, cx: &AppContext) -> u64 { + let item_text_chars = match entry { + PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => self + .buffer_snapshot_for_id(*buffer_id, cx) + .and_then(|snapshot| { + Some(snapshot.file()?.path().file_name()?.to_string_lossy().len()) + }) + .unwrap_or_default(), + PanelEntry::Fs(FsEntry::Directory(_, directory)) => directory + .path + .file_name() + .map(|name| name.to_string_lossy().len()) + .unwrap_or_default(), + PanelEntry::Fs(FsEntry::File(_, file, _, _)) => file + .path + .file_name() + .map(|name| name.to_string_lossy().len()) + .unwrap_or_default(), + PanelEntry::FoldedDirs(_, dirs) => { + dirs.iter() + .map(|dir| { + dir.path + .file_name() + .map(|name| name.to_string_lossy().len()) + .unwrap_or_default() + }) + .sum::() + + dirs.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() } + PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, _, range)) => self + .excerpt_label(*buffer_id, range, cx) + .map(|label| label.len()) + .unwrap_or_default(), + PanelEntry::Outline(OutlineEntry::Outline(_, _, outline)) => outline.text.len(), + PanelEntry::Search(search) => search.render_data.context_text.len(), }; - if has_search_items { - previous_entry = Some(&entry.entry); + (item_text_chars + depth) as u64 + } + + fn render_main_contents( + &mut self, + query: Option, + show_indent_guides: bool, + indent_size: f32, + cx: &mut ViewContext<'_, Self>, + ) -> Div { + let contents = if self.cached_entries.is_empty() { + let header = if self.updating_fs_entries { + "Loading outlines" + } else if query.is_some() { + "No matches for query" + } else { + "No outlines available" + }; + + v_flex() + .flex_1() + .justify_center() + .size_full() + .child(h_flex().justify_center().child(Label::new(header))) + .when_some(query.clone(), |panel, query| { + panel.child(h_flex().justify_center().child(Label::new(query))) + }) + .child( + h_flex() + .pt(Spacing::Small.rems(cx)) + .justify_center() + .child({ + let keystroke = match self.position(cx) { + DockPosition::Left => { + cx.keystroke_text_for(&workspace::ToggleLeftDock) + } + DockPosition::Bottom => { + cx.keystroke_text_for(&workspace::ToggleBottomDock) + } + DockPosition::Right => { + cx.keystroke_text_for(&workspace::ToggleRightDock) + } + }; + Label::new(format!("Toggle this panel with {keystroke}")) + }), + ) } else { - let collapsed_entries_to_check = match &entry.entry { - PanelEntry::FoldedDirs(worktree_id, entries) => entries - .iter() - .map(|entry| CollapsedEntry::Dir(*worktree_id, entry.id)) - .collect(), - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => { - vec![CollapsedEntry::Dir(*worktree_id, entry.id)] - } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - vec![CollapsedEntry::ExternalFile(*buffer_id)] - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - vec![CollapsedEntry::File(*worktree_id, *buffer_id)] - } - PanelEntry::Search(_) | PanelEntry::Outline(_) => Vec::new(), + let list_contents = { + let items_len = self.cached_entries.len(); + let multi_buffer_snapshot = self + .active_editor() + .map(|editor| editor.read(cx).buffer().read(cx).snapshot(cx)); + uniform_list(cx.view().clone(), "entries", items_len, { + move |outline_panel, range, cx| { + let entries = outline_panel.cached_entries.get(range); + entries + .map(|entries| entries.to_vec()) + .unwrap_or_default() + .into_iter() + .filter_map(|cached_entry| match cached_entry.entry { + PanelEntry::Fs(entry) => Some(outline_panel.render_entry( + &entry, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )), + PanelEntry::FoldedDirs(worktree_id, entries) => { + Some(outline_panel.render_folded_dirs( + worktree_id, + &entries, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )) + } + PanelEntry::Outline(OutlineEntry::Excerpt( + buffer_id, + excerpt_id, + excerpt, + )) => outline_panel.render_excerpt( + buffer_id, + excerpt_id, + &excerpt, + cached_entry.depth, + cx, + ), + PanelEntry::Outline(OutlineEntry::Outline( + buffer_id, + excerpt_id, + outline, + )) => Some(outline_panel.render_outline( + buffer_id, + excerpt_id, + &outline, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )), + PanelEntry::Search(SearchEntry { + match_range, + render_data, + kind, + .. + }) => Some(outline_panel.render_search_match( + multi_buffer_snapshot.as_ref(), + &match_range, + &render_data, + kind, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )), + }) + .collect() + } + }) + .with_sizing_behavior(ListSizingBehavior::Infer) + .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) + .with_width_from_item(self.max_width_item_index) + .track_scroll(self.scroll_handle.clone()) + .when(show_indent_guides, |list| { + list.with_decoration( + ui::indent_guides( + cx.view().clone(), + px(indent_size), + IndentGuideColors::panel(cx), + |outline_panel, range, _| { + let entries = outline_panel.cached_entries.get(range); + if let Some(entries) = entries { + entries.into_iter().map(|item| item.depth).collect() + } else { + smallvec::SmallVec::new() + } + }, + ) + .with_render_fn( + cx.view().clone(), + move |outline_panel, params, _| { + const LEFT_OFFSET: f32 = 14.; + + let indent_size = params.indent_size; + let item_height = params.item_height; + let active_indent_guide_ix = find_active_indent_guide_ix( + outline_panel, + ¶ms.indent_guides, + ); + + params + .indent_guides + .into_iter() + .enumerate() + .map(|(ix, layout)| { + let bounds = Bounds::new( + point( + px(layout.offset.x as f32) * indent_size + + px(LEFT_OFFSET), + px(layout.offset.y as f32) * item_height, + ), + size(px(1.), px(layout.length as f32) * item_height), + ); + ui::RenderedIndentGuide { + bounds, + layout, + is_active: active_indent_guide_ix == Some(ix), + hitbox: None, + } + }) + .collect() + }, + ), + ) + }) }; - if !collapsed_entries_to_check.is_empty() - && collapsed_entries_to_check - .iter() - .any(|collapsed_entry| collapsed_entries.contains(collapsed_entry)) - { - previous_entry = Some(&entry.entry); - continue; - } - match_ids_to_remove.insert(id); - previous_entry = None; + + v_flex() + .flex_shrink() + .size_full() + .child(list_contents.size_full().flex_shrink()) + .children(self.render_vertical_scrollbar(cx)) + .when_some(self.render_horizontal_scrollbar(cx), |this, scrollbar| { + this.pb_4().child(scrollbar) + }) } - } + .children(self.context_menu.as_ref().map(|(menu, position, _)| { + deferred( + anchored() + .position(*position) + .anchor(gpui::AnchorCorner::TopLeft) + .child(menu.clone()), + ) + .with_priority(1) + })); - if match_ids_to_remove.is_empty() { - return; + v_flex().w_full().flex_1().overflow_hidden().child(contents) } - string_match_candidates.retain(|candidate| { - let retain = !match_ids_to_remove.contains(&candidate.id); - if !retain { - added_contexts.remove(&candidate.string); - } - retain - }); - match_ids_to_remove.into_iter().rev().for_each(|id| { - entries.remove(id); - }); + fn render_filter_footer(&mut self, pinned: bool, cx: &mut ViewContext<'_, Self>) -> Div { + v_flex().flex_none().child(horizontal_separator(cx)).child( + h_flex() + .p_2() + .w_full() + .child(self.filter_editor.clone()) + .child( + div().child( + IconButton::new( + "outline-panel-menu", + if pinned { + IconName::Unpin + } else { + IconName::Pin + }, + ) + .tooltip(move |cx| { + Tooltip::text( + if pinned { + "Unpin Outline" + } else { + "Pin Active Outline" + }, + cx, + ) + }) + .shape(IconButtonShape::Square) + .on_click(cx.listener(|outline_panel, _, cx| { + outline_panel.toggle_active_editor_pin(&ToggleActiveEditorPin, cx); + })), + ), + ), + ) + } } fn workspace_active_editor( @@ -3893,14 +4200,34 @@ impl EventEmitter for OutlinePanel {} impl Render for OutlinePanel { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let project = self.project.read(cx); + let (is_local, is_via_ssh) = self + .project + .read_with(cx, |project, _| (project.is_local(), project.is_via_ssh())); let query = self.query(cx); let pinned = self.pinned; + let settings = OutlinePanelSettings::get_global(cx); + let indent_size = settings.indent_size; + let show_indent_guides = settings.indent_guides.show == ShowIndentGuides::Always; - let outline_panel = v_flex() + let search_query = match &self.mode { + ItemsDisplayMode::Search(search_query) => Some(search_query), + _ => None, + }; + + v_flex() .id("outline-panel") .size_full() + .overflow_hidden() .relative() + .on_hover(cx.listener(|this, hovered, cx| { + if *hovered { + this.show_scrollbar = true; + this.hide_scrollbar_task.take(); + cx.notify(); + } else if !this.focus_handle.contains_focused(cx) { + this.hide_scrollbar(cx); + } + })) .key_context(self.dispatch_context(cx)) .on_action(cx.listener(Self::open)) .on_action(cx.listener(Self::cancel)) @@ -3918,10 +4245,10 @@ impl Render for OutlinePanel { .on_action(cx.listener(Self::toggle_active_editor_pin)) .on_action(cx.listener(Self::unfold_directory)) .on_action(cx.listener(Self::fold_directory)) - .when(project.is_local(), |el| { + .when(is_local, |el| { el.on_action(cx.listener(Self::reveal_in_finder)) }) - .when(project.is_local() || project.is_via_ssh(), |el| { + .when(is_local || is_via_ssh, |el| { el.on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( @@ -3934,177 +4261,57 @@ impl Render for OutlinePanel { } }), ) - .track_focus(&self.focus_handle); - - if self.cached_entries.is_empty() { - let header = if self.updating_fs_entries { - "Loading outlines" - } else if query.is_some() { - "No matches for query" - } else { - "No outlines available" - }; - - outline_panel.child( - v_flex() - .justify_center() - .size_full() - .child(h_flex().justify_center().child(Label::new(header))) - .when_some(query.clone(), |panel, query| { - panel.child(h_flex().justify_center().child(Label::new(query))) - }) - .child( - h_flex() - .pt(Spacing::Small.rems(cx)) - .justify_center() - .child({ - let keystroke = match self.position(cx) { - DockPosition::Left => { - cx.keystroke_text_for(&workspace::ToggleLeftDock) - } - DockPosition::Bottom => { - cx.keystroke_text_for(&workspace::ToggleBottomDock) - } - DockPosition::Right => { - cx.keystroke_text_for(&workspace::ToggleRightDock) - } - }; - Label::new(format!("Toggle this panel with {keystroke}")) - }), - ), - ) - } else { - let search_query = match &self.mode { - ItemsDisplayMode::Search(search_query) => Some(search_query), - _ => None, - }; - outline_panel - .when_some(search_query, |outline_panel, search_state| { - outline_panel.child( - div() - .mx_2() - .child( - Label::new(format!("Searching: '{}'", search_state.query)) - .color(Color::Muted), - ) - .child(horizontal_separator(cx)), - ) - }) - .child({ - let items_len = self.cached_entries.len(); - let multi_buffer_snapshot = self - .active_editor() - .map(|editor| editor.read(cx).buffer().read(cx).snapshot(cx)); - uniform_list(cx.view().clone(), "entries", items_len, { - move |outline_panel, range, cx| { - let entries = outline_panel.cached_entries.get(range); - entries - .map(|entries| entries.to_vec()) - .unwrap_or_default() - .into_iter() - .filter_map(|cached_entry| match cached_entry.entry { - PanelEntry::Fs(entry) => Some(outline_panel.render_entry( - &entry, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), - PanelEntry::FoldedDirs(worktree_id, entries) => { - Some(outline_panel.render_folded_dirs( - worktree_id, - &entries, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )) - } - PanelEntry::Outline(OutlineEntry::Excerpt( - buffer_id, - excerpt_id, - excerpt, - )) => outline_panel.render_excerpt( - buffer_id, - excerpt_id, - &excerpt, - cached_entry.depth, - cx, - ), - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - outline, - )) => Some(outline_panel.render_outline( - buffer_id, - excerpt_id, - &outline, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), - PanelEntry::Search(SearchEntry { - match_range, - render_data, - kind, - .. - }) => Some(outline_panel.render_search_match( - multi_buffer_snapshot.as_ref(), - &match_range, - &render_data, - kind, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), - }) - .collect() - } - }) - .size_full() - .track_scroll(self.scroll_handle.clone()) - }) - } - .children(self.context_menu.as_ref().map(|(menu, position, _)| { - deferred( - anchored() - .position(*position) - .anchor(gpui::AnchorCorner::TopLeft) - .child(menu.clone()), - ) - .with_priority(1) - })) - .child( - v_flex().child(horizontal_separator(cx)).child( - h_flex().p_2().child(self.filter_editor.clone()).child( - div().child( - IconButton::new( - "outline-panel-menu", - if pinned { - IconName::Unpin - } else { - IconName::Pin - }, + .track_focus(&self.focus_handle(cx)) + .when_some(search_query, |outline_panel, search_state| { + outline_panel.child( + v_flex() + .child( + Label::new(format!("Searching: '{}'", search_state.query)) + .color(Color::Muted) + .mx_2(), ) - .tooltip(move |cx| { - Tooltip::text( - if pinned { - "Unpin Outline" - } else { - "Pin Active Outline" - }, - cx, - ) - }) - .shape(IconButtonShape::Square) - .on_click(cx.listener(|outline_panel, _, cx| { - outline_panel.toggle_active_editor_pin(&ToggleActiveEditorPin, cx); - })), - ), - ), - ), - ) + .child(horizontal_separator(cx)), + ) + }) + .child(self.render_main_contents(query, show_indent_guides, indent_size, cx)) + .child(self.render_filter_footer(pinned, cx)) } } +fn find_active_indent_guide_ix( + outline_panel: &OutlinePanel, + candidates: &[IndentGuideLayout], +) -> Option { + let SelectedEntry::Valid(_, target_ix) = &outline_panel.selected_entry else { + return None; + }; + let target_depth = outline_panel + .cached_entries + .get(*target_ix) + .map(|cached_entry| cached_entry.depth)?; + + let (target_ix, target_depth) = if let Some(target_depth) = outline_panel + .cached_entries + .get(target_ix + 1) + .filter(|cached_entry| cached_entry.depth > target_depth) + .map(|entry| entry.depth) + { + (target_ix + 1, target_depth.saturating_sub(1)) + } else { + (*target_ix, target_depth.saturating_sub(1)) + }; + + candidates + .iter() + .enumerate() + .find(|(_, guide)| { + guide.offset.y <= target_ix + && target_ix < guide.offset.y + guide.length + && guide.offset.x == target_depth + }) + .map(|(ix, _)| ix) +} + fn subscribe_for_editor_events( editor: &View, cx: &mut ViewContext, @@ -4168,6 +4375,21 @@ fn horizontal_separator(cx: &mut WindowContext) -> Div { div().mx_2().border_primary(cx).border_t_1() } +#[derive(Debug, Default)] +struct GenerationState { + entries: Vec, + match_candidates: Vec, + max_width_estimate_and_index: Option<(u64, usize)>, +} + +impl GenerationState { + fn clear(&mut self) { + self.entries.clear(); + self.match_candidates.clear(); + self.max_width_estimate_and_index = None; + } +} + #[cfg(test)] mod tests { use gpui::{TestAppContext, VisualTestContext, WindowHandle}; @@ -4374,6 +4596,117 @@ mod tests { }); } + #[gpui::test] + async fn test_item_filtering(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + populate_with_test_ra_project(&fs, "/rust-analyzer").await; + let project = Project::test(fs.clone(), ["/rust-analyzer".as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(Arc::new(rust_lang())) + }); + let workspace = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let outline_panel = outline_panel(&workspace, cx); + outline_panel.update(cx, |outline_panel, cx| outline_panel.set_active(true, cx)); + + workspace + .update(cx, |workspace, cx| { + ProjectSearchView::deploy_search(workspace, &workspace::DeploySearch::default(), cx) + }) + .unwrap(); + let search_view = workspace + .update(cx, |workspace, cx| { + workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()) + .expect("Project search view expected to appear after new search event trigger") + }) + .unwrap(); + + let query = "param_names_for_lifetime_elision_hints"; + perform_project_search(&search_view, query, cx); + search_view.update(cx, |search_view, cx| { + search_view + .results_editor() + .update(cx, |results_editor, cx| { + assert_eq!( + results_editor.display_text(cx).match_indices(query).count(), + 9 + ); + }); + }); + let all_matches = r#"/ + crates/ + ide/src/ + inlay_hints/ + fn_lifetime_fn.rs + search: match config.param_names_for_lifetime_elision_hints { + search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + search: Some(it) if config.param_names_for_lifetime_elision_hints => { + search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + inlay_hints.rs + search: pub param_names_for_lifetime_elision_hints: bool, + search: param_names_for_lifetime_elision_hints: self + static_index.rs + search: param_names_for_lifetime_elision_hints: false, + rust-analyzer/src/ + cli/ + analysis_stats.rs + search: param_names_for_lifetime_elision_hints: true, + config.rs + search: param_names_for_lifetime_elision_hints: self"#; + + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None,), + all_matches, + ); + }); + + let filter_text = "a"; + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.filter_editor.update(cx, |filter_editor, cx| { + filter_editor.set_text(filter_text, cx); + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None), + all_matches + .lines() + .filter(|item| item.contains(filter_text)) + .collect::>() + .join("\n"), + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.filter_editor.update(cx, |filter_editor, cx| { + filter_editor.set_text("", cx); + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None,), + all_matches, + ); + }); + } + #[gpui::test] async fn test_frontend_repo_structure(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index e19fc3c0084947..2759424c6aef35 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,3 +1,4 @@ +use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -10,6 +11,13 @@ pub enum OutlinePanelDockPosition { Right, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ShowIndentGuides { + Always, + Never, +} + #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct OutlinePanelSettings { pub button: bool, @@ -19,8 +27,37 @@ pub struct OutlinePanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, + pub indent_guides: IndentGuidesSettings, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, + pub scrollbar: ScrollbarSettings, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ScrollbarSettings { + /// When to show the scrollbar in the project panel. + /// + /// Default: inherits editor scrollbar settings + pub show: Option, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ScrollbarSettingsContent { + /// When to show the scrollbar in the project panel. + /// + /// Default: inherits editor scrollbar settings + pub show: Option>, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettings { + pub show: ShowIndentGuides, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettingsContent { + /// When to show the scrollbar in the outline panel. + pub show: Option, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] @@ -64,6 +101,10 @@ pub struct OutlinePanelSettingsContent { /// /// Default: true pub auto_fold_dirs: Option, + /// Settings related to indent guides in the outline panel. + pub indent_guides: Option, + /// Scrollbar-related settings + pub scrollbar: Option, } impl Settings for OutlinePanelSettings { diff --git a/crates/picker/src/head.rs b/crates/picker/src/head.rs index 1a103b252f92fd..5ebcaf13a52e90 100644 --- a/crates/picker/src/head.rs +++ b/crates/picker/src/head.rs @@ -52,8 +52,8 @@ impl EmptyHead { } impl Render for EmptyHead { - fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { - div().track_focus(&self.focus_handle) + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().track_focus(&self.focus_handle(cx)) } } diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index a9512606d27845..5ebbcd3330333e 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -108,7 +108,11 @@ pub trait PickerDelegate: Sized + 'static { fn should_dismiss(&self) -> bool { true } - fn confirm_completion(&self, _query: String) -> Option { + fn confirm_completion( + &mut self, + _query: String, + _: &mut ViewContext>, + ) -> Option { None } @@ -370,7 +374,7 @@ impl Picker { } fn confirm_completion(&mut self, _: &ConfirmCompletion, cx: &mut ViewContext) { - if let Some(new_query) = self.delegate.confirm_completion(self.query(cx)) { + if let Some(new_query) = self.delegate.confirm_completion(self.query(cx), cx) { self.set_query(new_query, cx); } else { cx.propagate() diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index d2d56696a696e3..d7b13c99926c45 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -14,14 +14,14 @@ use std::{ }; use util::paths::PathMatcher; -#[derive(Clone)] +#[derive(Debug, Clone)] pub enum Prettier { Real(RealPrettier), #[cfg(any(test, feature = "test-support"))] Test(TestPrettier), } -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct RealPrettier { default: bool, prettier_dir: PathBuf, @@ -29,7 +29,7 @@ pub struct RealPrettier { } #[cfg(any(test, feature = "test-support"))] -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct TestPrettier { prettier_dir: PathBuf, default: bool, @@ -329,11 +329,7 @@ impl Prettier { })? .context("prettier params calculation")?; - let response = local - .server - .request::(params) - .await - .context("prettier format request")?; + let response = local.server.request::(params).await?; let diff_task = buffer.update(cx, |buffer, cx| buffer.diff(response.text, cx))?; Ok(diff_task.await) } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index c6234008e29a4a..51d3f385886aff 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -2,7 +2,7 @@ use crate::{ dap_store::DapStore, search::SearchQuery, worktree_store::{WorktreeStore, WorktreeStoreEvent}, - Item, NoRepositoryError, ProjectPath, + Item, ProjectPath, }; use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; use anyhow::{anyhow, Context as _, Result}; @@ -1147,7 +1147,7 @@ impl BufferStore { buffer: &Model, version: Option, cx: &AppContext, - ) -> Task> { + ) -> Task>> { let buffer = buffer.read(cx); let Some(file) = File::from_dyn(buffer.file()) else { return Task::ready(Err(anyhow!("buffer has no file"))); @@ -1159,7 +1159,7 @@ impl BufferStore { let blame_params = maybe!({ let (repo_entry, local_repo_entry) = match worktree.repo_for_path(&file.path) { Some(repo_for_path) => repo_for_path, - None => anyhow::bail!(NoRepositoryError {}), + None => return Ok(None), }; let relative_path = repo_entry @@ -1173,13 +1173,16 @@ impl BufferStore { None => buffer.as_rope().clone(), }; - anyhow::Ok((repo, relative_path, content)) + anyhow::Ok(Some((repo, relative_path, content))) }); cx.background_executor().spawn(async move { - let (repo, relative_path, content) = blame_params?; + let Some((repo, relative_path, content)) = blame_params? else { + return Ok(None); + }; repo.blame(&relative_path, content) .with_context(|| format!("Failed to blame {:?}", relative_path.0)) + .map(Some) }) } Worktree::Remote(worktree) => { @@ -2151,7 +2154,13 @@ fn is_not_found_error(error: &anyhow::Error) -> bool { .is_some_and(|err| err.kind() == io::ErrorKind::NotFound) } -fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBufferResponse { +fn serialize_blame_buffer_response(blame: Option) -> proto::BlameBufferResponse { + let Some(blame) = blame else { + return proto::BlameBufferResponse { + blame_response: None, + }; + }; + let entries = blame .entries .into_iter() @@ -2193,14 +2202,19 @@ fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBuff .collect::>(); proto::BlameBufferResponse { - entries, - messages, - permalinks, - remote_url: blame.remote_url, + blame_response: Some(proto::blame_buffer_response::BlameResponse { + entries, + messages, + permalinks, + remote_url: blame.remote_url, + }), } } -fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> git::blame::Blame { +fn deserialize_blame_buffer_response( + response: proto::BlameBufferResponse, +) -> Option { + let response = response.blame_response?; let entries = response .entries .into_iter() @@ -2241,10 +2255,10 @@ fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> gi }) .collect::>(); - Blame { + Some(Blame { entries, permalinks, messages, remote_url: response.remote_url, - } + }) } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 104977419b62b9..f107779c244e04 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -8,10 +8,11 @@ use crate::{ prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, + toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, CodeAction, Completion, CoreCompletion, Hover, InlayHint, Item as _, ProjectPath, - ProjectTransaction, ResolveState, Symbol, + ProjectTransaction, ResolveState, Symbol, ToolchainStore, }; use anyhow::{anyhow, Context as _, Result}; use async_trait::async_trait; @@ -29,6 +30,7 @@ use gpui::{ Task, WeakModel, }; use http_client::HttpClient; +use itertools::Itertools as _; use language::{ language_settings::{ language_settings, FormatOnSave, Formatter, LanguageSettings, SelectedFormatter, @@ -37,9 +39,9 @@ use language::{ proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageName, - LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LocalFile, LspAdapter, - LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LanguageToolchainStore, + LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, @@ -145,7 +147,6 @@ pub struct LocalLspStore { dap_store: Model, prettier_store: Model, current_lsp_settings: HashMap, - last_formatting_failure: Option, _subscription: gpui::Subscription, } @@ -564,9 +565,7 @@ impl LocalLspStore { })?; prettier_store::format_with_prettier(&prettier, &buffer.handle, cx) .await - .transpose() - .ok() - .flatten() + .transpose()? } Formatter::External { command, arguments } => { Self::format_via_external_command(buffer, command, arguments.as_deref(), cx) @@ -676,6 +675,7 @@ impl LocalLspStore { } } +#[derive(Debug)] pub struct FormattableBuffer { handle: Model, abs_path: Option, @@ -705,16 +705,18 @@ impl LspStoreMode { pub struct LspStore { mode: LspStoreMode, + last_formatting_failure: Option, downstream_client: Option<(AnyProtoClient, u64)>, nonce: u128, buffer_store: Model, worktree_store: Model, + toolchain_store: Option>, buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots pub languages: Arc, language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, pub language_server_statuses: BTreeMap, active_entry: Option, - _maintain_workspace_config: Task>, + _maintain_workspace_config: (Task>, watch::Sender<()>), _maintain_buffer_languages: Task<()>, next_diagnostic_group_id: usize, diagnostic_summaries: @@ -786,6 +788,7 @@ impl LspStore { pub fn init(client: &AnyProtoClient) { client.add_model_request_handler(Self::handle_multi_lsp_query); client.add_model_request_handler(Self::handle_restart_language_servers); + client.add_model_request_handler(Self::handle_cancel_language_server_work); client.add_model_message_handler(Self::handle_start_language_server); client.add_model_message_handler(Self::handle_update_language_server); client.add_model_message_handler(Self::handle_language_server_log); @@ -874,6 +877,7 @@ impl LspStore { worktree_store: Model, dap_store: Model, prettier_store: Model, + toolchain_store: Model, environment: Model, languages: Arc, http_client: Arc, @@ -887,9 +891,15 @@ impl LspStore { .detach(); cx.subscribe(&prettier_store, Self::on_prettier_store_event) .detach(); + cx.subscribe(&toolchain_store, Self::on_toolchain_store_event) + .detach(); cx.observe_global::(Self::on_settings_changed) .detach(); + let _maintain_workspace_config = { + let (sender, receiver) = watch::channel(); + (Self::maintain_workspace_config(receiver, cx), sender) + }; Self { mode: LspStoreMode::Local(LocalLspStore { supplementary_language_servers: Default::default(), @@ -899,7 +909,6 @@ impl LspStore { language_server_watcher_registrations: Default::default(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), buffers_being_formatted: Default::default(), - last_formatting_failure: None, prettier_store, dap_store, environment, @@ -910,9 +919,11 @@ impl LspStore { this.as_local_mut().unwrap().shutdown_language_servers(cx) }), }), + last_formatting_failure: None, downstream_client: None, buffer_store, worktree_store, + toolchain_store: Some(toolchain_store), languages: languages.clone(), language_server_ids: Default::default(), language_server_statuses: Default::default(), @@ -923,7 +934,7 @@ impl LspStore { diagnostics: Default::default(), active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } } @@ -946,9 +957,10 @@ impl LspStore { }) } - pub fn new_remote( + pub(super) fn new_remote( buffer_store: Model, worktree_store: Model, + toolchain_store: Option>, languages: Arc, upstream_client: AnyProtoClient, project_id: u64, @@ -958,13 +970,17 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - + let _maintain_workspace_config = { + let (sender, receiver) = watch::channel(); + (Self::maintain_workspace_config(receiver, cx), sender) + }; Self { mode: LspStoreMode::Remote(RemoteLspStore { upstream_client: Some(upstream_client), upstream_project_id: project_id, }), downstream_client: None, + last_formatting_failure: None, buffer_store, worktree_store, languages: languages.clone(), @@ -976,7 +992,8 @@ impl LspStore { diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), + toolchain_store, + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } } @@ -1067,6 +1084,22 @@ impl LspStore { } } + fn on_toolchain_store_event( + &mut self, + _: Model, + event: &ToolchainStoreEvent, + _: &mut ModelContext, + ) { + match event { + ToolchainStoreEvent::ToolchainActivated { .. } => { + self.request_workspace_config_refresh() + } + } + } + + fn request_workspace_config_refresh(&mut self) { + *self._maintain_workspace_config.1.borrow_mut() = (); + } // todo! pub fn prettier_store(&self) -> Option> { self.as_local().map(|local| local.prettier_store.clone()) @@ -3041,17 +3074,13 @@ impl LspStore { None } - fn maintain_workspace_config(cx: &mut ModelContext) -> Task> { - let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel(); - let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx); - - let settings_observation = cx.observe_global::(move |_, _| { - *settings_changed_tx.borrow_mut() = (); - }); - - cx.spawn(move |this, mut cx| async move { - while let Some(()) = settings_changed_rx.next().await { - let servers = this.update(&mut cx, |this, cx| { + pub(crate) async fn refresh_workspace_configurations( + this: &WeakModel, + mut cx: AsyncAppContext, + ) { + maybe!(async move { + let servers = this + .update(&mut cx, |this, cx| { this.language_server_ids .iter() .filter_map(|((worktree_id, _), server_id)| { @@ -3073,17 +3102,52 @@ impl LspStore { } }) .collect::>() - })?; + }) + .ok()?; + + let toolchain_store = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; + for (adapter, server, delegate) in servers { + let settings = adapter + .workspace_configuration(&delegate, toolchain_store.clone(), &mut cx) + .await + .ok()?; - for (adapter, server, delegate) in servers { - let settings = adapter.workspace_configuration(&delegate, &mut cx).await?; + server + .notify::( + lsp::DidChangeConfigurationParams { settings }, + ) + .ok(); + } + Some(()) + }) + .await; + } - server - .notify::( - lsp::DidChangeConfigurationParams { settings }, - ) - .ok(); - } + fn toolchain_store(&self, cx: &AppContext) -> Arc { + if let Some(toolchain_store) = self.toolchain_store.as_ref() { + toolchain_store.read(cx).as_language_toolchain_store() + } else { + Arc::new(EmptyToolchainStore) + } + } + fn maintain_workspace_config( + external_refresh_requests: watch::Receiver<()>, + cx: &mut ModelContext, + ) -> Task> { + let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel(); + let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx); + + let settings_observation = cx.observe_global::(move |_, _| { + *settings_changed_tx.borrow_mut() = (); + }); + + let mut joint_future = + futures::stream::select(settings_changed_rx, external_refresh_requests); + cx.spawn(move |this, cx| async move { + while let Some(()) = joint_future.next().await { + Self::refresh_workspace_configurations(&this, cx.clone()).await; } drop(settings_observation); @@ -3993,6 +4057,20 @@ impl LspStore { .or_default() .insert(server_id, summary); } + if let Some((downstream_client, project_id)) = &this.downstream_client { + downstream_client + .send(proto::UpdateDiagnosticSummary { + project_id: *project_id, + worktree_id: worktree_id.to_proto(), + summary: Some(proto::DiagnosticSummary { + path: project_path.path.to_string_lossy().to_string(), + language_server_id: server_id.0 as u64, + error_count: summary.error_count as u32, + warning_count: summary.warning_count as u32, + }), + }) + .log_err(); + } cx.emit(LspStoreEvent::DiagnosticsUpdated { language_server_id: LanguageServerId(message.language_server_id as usize), path: project_path, @@ -4053,7 +4131,7 @@ impl LspStore { LanguageServerProgress { title: payload.title, is_disk_based_diagnostics_progress: false, - is_cancellable: false, + is_cancellable: payload.is_cancellable.unwrap_or(false), message: payload.message, percentage: payload.percentage.map(|p| p as usize), last_update_at: cx.background_executor().now(), @@ -4069,7 +4147,7 @@ impl LspStore { LanguageServerProgress { title: None, is_disk_based_diagnostics_progress: false, - is_cancellable: false, + is_cancellable: payload.is_cancellable.unwrap_or(false), message: payload.message, percentage: payload.percentage.map(|p| p as usize), last_update_at: cx.background_executor().now(), @@ -4570,6 +4648,7 @@ impl LspStore { token, message: report.message, percentage: report.percentage, + is_cancellable: report.cancellable, }, ), }) @@ -4603,6 +4682,7 @@ impl LspStore { title: progress.title, message: progress.message, percentage: progress.percentage.map(|p| p as u32), + is_cancellable: Some(progress.is_cancellable), }), }) } @@ -4633,6 +4713,9 @@ impl LspStore { if progress.percentage.is_some() { entry.percentage = progress.percentage; } + if progress.is_cancellable != entry.is_cancellable { + entry.is_cancellable = progress.is_cancellable; + } cx.notify(); return true; } @@ -5103,22 +5186,52 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { this.update(&mut cx, |this, cx| { - let buffers: Vec<_> = envelope - .payload - .buffer_ids - .into_iter() - .flat_map(|buffer_id| { - this.buffer_store - .read(cx) - .get(BufferId::new(buffer_id).log_err()?) - }) - .collect(); - this.restart_language_servers_for_buffers(buffers, cx) + let buffers = this.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx); + this.restart_language_servers_for_buffers(buffers, cx); })?; Ok(proto::Ack {}) } + pub async fn handle_cancel_language_server_work( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + this.update(&mut cx, |this, cx| { + if let Some(work) = envelope.payload.work { + match work { + proto::cancel_language_server_work::Work::Buffers(buffers) => { + let buffers = + this.buffer_ids_to_buffers(buffers.buffer_ids.into_iter(), cx); + this.cancel_language_server_work_for_buffers(buffers, cx); + } + proto::cancel_language_server_work::Work::LanguageServerWork(work) => { + let server_id = LanguageServerId::from_proto(work.language_server_id); + this.cancel_language_server_work(server_id, work.token, cx); + } + } + } + })?; + + Ok(proto::Ack {}) + } + + fn buffer_ids_to_buffers( + &mut self, + buffer_ids: impl Iterator, + cx: &mut ModelContext, + ) -> Vec> { + buffer_ids + .into_iter() + .flat_map(|buffer_id| { + self.buffer_store + .read(cx) + .get(BufferId::new(buffer_id).log_err()?) + }) + .collect::>() + } + async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -5164,9 +5277,9 @@ impl LspStore { .map(language::proto::serialize_transaction), }) } + pub fn last_formatting_failure(&self) -> Option<&str> { - self.as_local() - .and_then(|local| local.last_formatting_failure.as_deref()) + self.last_formatting_failure.as_deref() } pub fn environment_for_buffer( @@ -5237,23 +5350,16 @@ impl LspStore { cx.clone(), ) .await; - lsp_store.update(&mut cx, |lsp_store, _| { - let local = lsp_store.as_local_mut().unwrap(); - match &result { - Ok(_) => local.last_formatting_failure = None, - Err(error) => { - local.last_formatting_failure.replace(error.to_string()); - } - } + lsp_store.update_last_formatting_failure(&result); })?; result }) } else if let Some((client, project_id)) = self.upstream_client() { let buffer_store = self.buffer_store(); - cx.spawn(move |_, mut cx| async move { - let response = client + cx.spawn(move |lsp_store, mut cx| async move { + let result = client .request(proto::FormatBuffers { project_id, trigger: trigger as i32, @@ -5264,13 +5370,21 @@ impl LspStore { }) .collect::>()?, }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; + .await + .and_then(|result| result.transaction.context("missing transaction")); + + lsp_store.update(&mut cx, |lsp_store, _| { + lsp_store.update_last_formatting_failure(&result); + })?; + let transaction_response = result?; buffer_store .update(&mut cx, |buffer_store, cx| { - buffer_store.deserialize_project_transaction(response, push_to_history, cx) + buffer_store.deserialize_project_transaction( + transaction_response, + push_to_history, + cx, + ) })? .await }) @@ -5292,7 +5406,7 @@ impl LspStore { buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); } let trigger = FormatTrigger::from_proto(envelope.payload.trigger); - Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, FormatTarget::Buffer, cx)) + anyhow::Ok(this.format(buffers, false, trigger, FormatTarget::Buffer, cx)) })??; let project_transaction = format.await?; @@ -5529,6 +5643,9 @@ impl LspStore { let delegate = delegate.clone(); let adapter = adapter.clone(); let this = this.clone(); + let toolchains = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; let mut cx = cx.clone(); async move { let language_server = pending_server.await?; @@ -5536,7 +5653,7 @@ impl LspStore { let workspace_config = adapter .adapter .clone() - .workspace_configuration(&delegate, &mut cx) + .workspace_configuration(&delegate, toolchains.clone(), &mut cx) .await?; let mut initialization_options = adapter @@ -5861,7 +5978,6 @@ impl LspStore { let adapter = adapter.clone(); if let Some(this) = this.upgrade() { adapter.process_diagnostics(&mut params); - // Everything else has to be on the server, Can we make it on the client? this.update(&mut cx, |this, cx| { this.update_diagnostics( server_id, @@ -5876,17 +5992,21 @@ impl LspStore { } }) .detach(); - language_server .on_request::({ let adapter = adapter.adapter.clone(); let delegate = delegate.clone(); + let this = this.clone(); move |params, mut cx| { let adapter = adapter.clone(); let delegate = delegate.clone(); + let this = this.clone(); async move { - let workspace_config = - adapter.workspace_configuration(&delegate, &mut cx).await?; + let toolchains = + this.update(&mut cx, |this, cx| this.toolchain_store(cx))?; + let workspace_config = adapter + .workspace_configuration(&delegate, toolchains, &mut cx) + .await?; Ok(params .items .into_iter() @@ -6657,16 +6777,89 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - let servers = buffers - .into_iter() - .flat_map(|buffer| { - self.language_server_ids_for_buffer(buffer.read(cx), cx) - .into_iter() - }) - .collect::>(); + if let Some((client, project_id)) = self.upstream_client() { + let request = client.request(proto::CancelLanguageServerWork { + project_id, + work: Some(proto::cancel_language_server_work::Work::Buffers( + proto::cancel_language_server_work::Buffers { + buffer_ids: buffers + .into_iter() + .map(|b| b.read(cx).remote_id().to_proto()) + .collect(), + }, + )), + }); + cx.background_executor() + .spawn(request) + .detach_and_log_err(cx); + } else { + let servers = buffers + .into_iter() + .flat_map(|buffer| { + self.language_server_ids_for_buffer(buffer.read(cx), cx) + .into_iter() + }) + .collect::>(); - for server_id in servers { - self.cancel_language_server_work(server_id, None, cx); + for server_id in servers { + self.cancel_language_server_work(server_id, None, cx); + } + } + } + + pub(crate) fn cancel_language_server_work( + &mut self, + server_id: LanguageServerId, + token_to_cancel: Option, + cx: &mut ModelContext, + ) { + if let Some(local) = self.as_local() { + let status = self.language_server_statuses.get(&server_id); + let server = local.language_servers.get(&server_id); + if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) + { + for (token, progress) in &status.pending_work { + if let Some(token_to_cancel) = token_to_cancel.as_ref() { + if token != token_to_cancel { + continue; + } + } + if progress.is_cancellable { + server + .notify::( + WorkDoneProgressCancelParams { + token: lsp::NumberOrString::String(token.clone()), + }, + ) + .ok(); + } + + if progress.is_cancellable { + server + .notify::( + WorkDoneProgressCancelParams { + token: lsp::NumberOrString::String(token.clone()), + }, + ) + .ok(); + } + } + } + } else if let Some((client, project_id)) = self.upstream_client() { + let request = client.request(proto::CancelLanguageServerWork { + project_id, + work: Some( + proto::cancel_language_server_work::Work::LanguageServerWork( + proto::cancel_language_server_work::LanguageServerWork { + language_server_id: server_id.to_proto(), + token: token_to_cancel, + }, + ), + ), + }); + cx.background_executor() + .spawn(request) + .detach_and_log_err(cx); } } @@ -6797,47 +6990,6 @@ impl LspStore { } } - pub(crate) fn cancel_language_server_work( - &mut self, - server_id: LanguageServerId, - token_to_cancel: Option, - _cx: &mut ModelContext, - ) { - let Some(local) = self.as_local() else { - return; - }; - let status = self.language_server_statuses.get(&server_id); - let server = local.language_servers.get(&server_id); - if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) { - for (token, progress) in &status.pending_work { - if let Some(token_to_cancel) = token_to_cancel.as_ref() { - if token != token_to_cancel { - continue; - } - } - if progress.is_cancellable { - server - .notify::( - WorkDoneProgressCancelParams { - token: lsp::NumberOrString::String(token.clone()), - }, - ) - .ok(); - } - - if progress.is_cancellable { - server - .notify::( - WorkDoneProgressCancelParams { - token: lsp::NumberOrString::String(token.clone()), - }, - ) - .ok(); - } - } - } - } - pub fn wait_for_remote_buffer( &mut self, id: BufferId, @@ -7227,6 +7379,18 @@ impl LspStore { lsp_action, }) } + + fn update_last_formatting_failure(&mut self, formatting_result: &anyhow::Result) { + match &formatting_result { + Ok(_) => self.last_formatting_failure = None, + Err(error) => { + let error_string = format!("{error:#}"); + log::error!("Formatting failed: {error_string}"); + self.last_formatting_failure + .replace(error_string.lines().join(" ")); + } + } + } } impl EventEmitter for LspStore {} diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 282895a6066779..2888bdd4b675c4 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -12,6 +12,7 @@ pub mod search; mod task_inventory; pub mod task_store; pub mod terminals; +pub mod toolchain_store; pub mod worktree_store; #[cfg(test)] @@ -25,9 +26,7 @@ mod yarn; use anyhow::{anyhow, Context as _, Result}; use buffer_store::{BufferStore, BufferStoreEvent}; -use client::{ - proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore, -}; +use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore}; use clock::ReplicaId; use dap::{ @@ -55,8 +54,8 @@ use itertools::Itertools; use language::{ language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, DiagnosticEntry, Documentation, File as _, Language, - LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageName, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, + Toolchain, ToolchainList, Transaction, Unclipped, }; use lsp::{ CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer, LanguageServerId, @@ -112,7 +111,7 @@ pub use lsp_store::{ LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, SERVER_PROGRESS_THROTTLE_TIMEOUT, }; - +pub use toolchain_store::ToolchainStore; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; const MAX_SEARCH_RESULT_FILES: usize = 5_000; const MAX_SEARCH_RESULT_RANGES: usize = 10_000; @@ -164,13 +163,13 @@ pub struct Project { remotely_created_models: Arc>, terminals: Terminals, node: Option, - hosted_project_id: Option, search_history: SearchHistory, search_included_history: SearchHistory, search_excluded_history: SearchHistory, snippets: Model, environment: Model, settings_observer: Model, + toolchain_store: Option>, } #[derive(Default)] @@ -310,6 +309,13 @@ impl ProjectPath { path: self.path.to_string_lossy().to_string(), } } + + pub fn root_path(worktree_id: WorktreeId) -> Self { + Self { + worktree_id, + path: Path::new("").into(), + } + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -597,6 +603,7 @@ impl Project { LspStore::init(&client); SettingsObserver::init(&client); TaskStore::init(Some(&client)); + ToolchainStore::init(&client); } pub fn local( @@ -665,13 +672,16 @@ impl Project { }); cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); - + let toolchain_store = cx.new_model(|cx| { + ToolchainStore::local(languages.clone(), worktree_store.clone(), cx) + }); let lsp_store = cx.new_model(|cx| { LspStore::new_local( buffer_store.clone(), worktree_store.clone(), dap_store.clone(), prettier_store.clone(), + toolchain_store.clone(), environment.clone(), languages.clone(), client.http_client(), @@ -707,13 +717,14 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - hosted_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), + + toolchain_store: Some(toolchain_store), } }) } @@ -737,7 +748,7 @@ impl Project { let ssh_proto = ssh.read(cx).proto_client(); let worktree_store = - cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), 0)); + cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), SSH_PROJECT_ID)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -770,10 +781,14 @@ impl Project { .detach(); let environment = ProjectEnvironment::new(&worktree_store, None, cx); + let toolchain_store = Some(cx.new_model(|cx| { + ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx) + })); let lsp_store = cx.new_model(|cx| { LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), + toolchain_store.clone(), languages.clone(), ssh_proto.clone(), SSH_PROJECT_ID, @@ -836,13 +851,14 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - hosted_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), + + toolchain_store, }; let ssh = ssh.read(cx); @@ -858,11 +874,12 @@ impl Project { ssh_proto.add_model_message_handler(Self::handle_toast); ssh_proto.add_model_request_handler(Self::handle_language_server_prompt_request); ssh_proto.add_model_message_handler(Self::handle_hide_toast); - ssh_proto.add_model_request_handler(BufferStore::handle_update_buffer); + ssh_proto.add_model_request_handler(Self::handle_update_buffer_from_ssh); BufferStore::init(&ssh_proto); LspStore::init(&ssh_proto); SettingsObserver::init(&ssh_proto); TaskStore::init(Some(&ssh_proto)); + ToolchainStore::init(&ssh_proto); this }) @@ -960,6 +977,7 @@ impl Project { let mut lsp_store = LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), + None, languages.clone(), client.clone().into(), remote_id, @@ -1046,12 +1064,12 @@ impl Project { local_handles: Vec::new(), }, node: None, - hosted_project_id: None, search_history: Self::new_search_history(), search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), + toolchain_store: None, }; this.set_role(role, cx); for worktree in worktrees { @@ -1098,47 +1116,6 @@ impl Project { Ok(this) } - pub async fn hosted( - remote_id: ProjectId, - user_store: Model, - client: Arc, - languages: Arc, - fs: Arc, - cx: AsyncAppContext, - ) -> Result> { - client.authenticate_and_connect(true, &cx).await?; - - let subscriptions = [ - EntitySubscription::Project(client.subscribe_to_entity::(remote_id.0)?), - EntitySubscription::BufferStore( - client.subscribe_to_entity::(remote_id.0)?, - ), - EntitySubscription::WorktreeStore( - client.subscribe_to_entity::(remote_id.0)?, - ), - EntitySubscription::LspStore(client.subscribe_to_entity::(remote_id.0)?), - EntitySubscription::SettingsObserver( - client.subscribe_to_entity::(remote_id.0)?, - ), - ]; - let response = client - .request_envelope(proto::JoinHostedProject { - project_id: remote_id.0, - }) - .await?; - Self::from_join_project_response( - response, - subscriptions, - client, - true, - user_store, - languages, - fs, - cx, - ) - .await - } - fn new_search_history() -> SearchHistory { SearchHistory::new( Some(MAX_PROJECT_SEARCH_HISTORY_SIZE), @@ -1536,10 +1513,6 @@ impl Project { } } - pub fn hosted_project_id(&self) -> Option { - self.hosted_project_id - } - pub fn supports_terminal(&self, _cx: &AppContext) -> bool { if self.is_local() { return true; @@ -1613,7 +1586,7 @@ impl Project { } pub fn host(&self) -> Option<&Collaborator> { - self.collaborators.values().find(|c| c.replica_id == 0) + self.collaborators.values().find(|c| c.is_host) } pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool, cx: &mut AppContext) { @@ -2673,6 +2646,46 @@ impl Project { .map_err(|e| anyhow!(e)) } + pub fn available_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + if let Some(toolchain_store) = self.toolchain_store.as_ref() { + toolchain_store + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } else { + Task::ready(None) + } + } + pub fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut AppContext, + ) -> Task> { + let Some(toolchain_store) = self.toolchain_store.clone() else { + return Task::ready(None); + }; + toolchain_store.update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }) + } + pub fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let Some(toolchain_store) = self.toolchain_store.clone() else { + return Task::ready(None); + }; + toolchain_store + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } pub fn language_server_statuses<'a>( &'a self, cx: &'a AppContext, @@ -3364,7 +3377,7 @@ impl Project { } /// Returns the resolved version of `path`, that was found in `buffer`, if it exists. - pub fn resolve_existing_file_path( + pub fn resolve_path_in_buffer( &self, path: &str, buffer: &Model, @@ -3372,47 +3385,56 @@ impl Project { ) -> Task> { let path_buf = PathBuf::from(path); if path_buf.is_absolute() || path.starts_with("~") { - self.resolve_abs_file_path(path, cx) + self.resolve_abs_path(path, cx) } else { self.resolve_path_in_worktrees(path_buf, buffer, cx) } } - pub fn abs_file_path_exists(&self, path: &str, cx: &mut ModelContext) -> Task { - let resolve_task = self.resolve_abs_file_path(path, cx); + pub fn resolve_abs_file_path( + &self, + path: &str, + cx: &mut ModelContext, + ) -> Task> { + let resolve_task = self.resolve_abs_path(path, cx); cx.background_executor().spawn(async move { let resolved_path = resolve_task.await; - resolved_path.is_some() + resolved_path.filter(|path| path.is_file()) }) } - fn resolve_abs_file_path( + pub fn resolve_abs_path( &self, path: &str, cx: &mut ModelContext, ) -> Task> { if self.is_local() { let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); - let fs = self.fs.clone(); cx.background_executor().spawn(async move { let path = expanded.as_path(); - let exists = fs.is_file(path).await; + let metadata = fs.metadata(path).await.ok().flatten(); - exists.then(|| ResolvedPath::AbsPath(expanded)) + metadata.map(|metadata| ResolvedPath::AbsPath { + path: expanded, + is_dir: metadata.is_dir, + }) }) } else if let Some(ssh_client) = self.ssh_client.as_ref() { let request = ssh_client .read(cx) .proto_client() - .request(proto::CheckFileExists { + .request(proto::GetPathMetadata { project_id: SSH_PROJECT_ID, path: path.to_string(), }); cx.background_executor().spawn(async move { let response = request.await.log_err()?; if response.exists { - Some(ResolvedPath::AbsPath(PathBuf::from(response.path))) + Some(ResolvedPath::AbsPath { + path: PathBuf::from(response.path), + is_dir: response.is_dir, + }) } else { None } @@ -3451,10 +3473,14 @@ impl Project { resolved.strip_prefix(root_entry_path).unwrap_or(&resolved); worktree.entry_for_path(stripped).map(|entry| { - ResolvedPath::ProjectPath(ProjectPath { + let project_path = ProjectPath { worktree_id: worktree.id(), path: entry.path.clone(), - }) + }; + ResolvedPath::ProjectPath { + project_path, + is_dir: entry.is_dir(), + } }) }) .ok()?; @@ -3684,12 +3710,31 @@ impl Project { worktree.get_local_repo(&root_entry)?.repo().clone().into() } + pub fn branches( + &self, + project_path: ProjectPath, + cx: &AppContext, + ) -> Task>> { + self.worktree_store().read(cx).branches(project_path, cx) + } + + pub fn update_or_create_branch( + &self, + repository: ProjectPath, + new_branch: String, + cx: &AppContext, + ) -> Task> { + self.worktree_store() + .read(cx) + .update_or_create_branch(repository, new_branch, cx) + } + pub fn blame_buffer( &self, buffer: &Model, version: Option, cx: &AppContext, - ) -> Task> { + ) -> Task>> { self.buffer_store.read(cx).blame_buffer(buffer, version, cx) } @@ -3764,7 +3809,7 @@ impl Project { .collaborators .remove(&old_peer_id) .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?; - let is_host = collaborator.replica_id == 0; + let is_host = collaborator.is_host; this.collaborators.insert(new_peer_id, collaborator); log::info!("peer {} became {}", old_peer_id, new_peer_id,); @@ -3872,6 +3917,13 @@ impl Project { anyhow::Ok(()) })??; + // We drop `this` to avoid holding a reference in this future for too + // long. + // If we keep the reference, we might not drop the `Project` early + // enough when closing a window and it will only get releases on the + // next `flush_effects()` call. + drop(this); + let answer = rx.next().await; Ok(LanguageServerPromptResponse { @@ -3915,6 +3967,24 @@ impl Project { })? } + async fn handle_update_buffer_from_ssh( + this: Model, + envelope: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let buffer_store = this.read_with(&cx, |this, cx| { + if let Some(remote_id) = this.remote_id() { + let mut payload = envelope.payload.clone(); + payload.project_id = remote_id; + cx.background_executor() + .spawn(this.client.request(payload)) + .detach_and_log_err(cx); + } + this.buffer_store.clone() + })?; + BufferStore::handle_update_buffer(buffer_store, envelope, cx).await + } + async fn handle_update_buffer( this: Model, envelope: TypedEnvelope, @@ -4446,24 +4516,41 @@ fn resolve_path(base: &Path, path: &Path) -> PathBuf { /// or an AbsPath and that *exists*. #[derive(Debug, Clone)] pub enum ResolvedPath { - ProjectPath(ProjectPath), - AbsPath(PathBuf), + ProjectPath { + project_path: ProjectPath, + is_dir: bool, + }, + AbsPath { + path: PathBuf, + is_dir: bool, + }, } impl ResolvedPath { pub fn abs_path(&self) -> Option<&Path> { match self { - Self::AbsPath(path) => Some(path.as_path()), + Self::AbsPath { path, .. } => Some(path.as_path()), _ => None, } } pub fn project_path(&self) -> Option<&ProjectPath> { match self { - Self::ProjectPath(path) => Some(&path), + Self::ProjectPath { project_path, .. } => Some(&project_path), _ => None, } } + + pub fn is_file(&self) -> bool { + !self.is_dir() + } + + pub fn is_dir(&self) -> bool { + match self { + Self::ProjectPath { is_dir, .. } => *is_dir, + Self::AbsPath { is_dir, .. } => *is_dir, + } + } } impl Item for Buffer { @@ -4515,17 +4602,6 @@ impl Completion { } } -#[derive(Debug)] -pub struct NoRepositoryError {} - -impl std::fmt::Display for NoRepositoryError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "no git repository for worktree found") - } -} - -impl std::error::Error for NoRepositoryError {} - pub fn sort_worktree_entries(entries: &mut [Entry]) { entries.sort_by(|entry_a, entry_b| { compare_paths( diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 8cbab1e6085ab4..f421f2c0013aec 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -111,6 +111,16 @@ impl GitSettings { _ => None, } } + + pub fn show_inline_commit_summary(&self) -> bool { + match self.inline_blame { + Some(InlineBlameSettings { + show_commit_summary, + .. + }) => show_commit_summary, + _ => false, + } + } } #[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)] @@ -141,12 +151,21 @@ pub struct InlineBlameSettings { /// /// Default: 0 pub min_column: Option, + /// Whether to show commit summary as part of the inline blame. + /// + /// Default: false + #[serde(default = "false_value")] + pub show_commit_summary: bool, } const fn true_value() -> bool { true } +const fn false_value() -> bool { + false +} + #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] pub struct BinarySettings { pub path: Option, diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs new file mode 100644 index 00000000000000..a3f27d731b6ae4 --- /dev/null +++ b/crates/project/src/toolchain_store.rs @@ -0,0 +1,416 @@ +use std::sync::Arc; + +use anyhow::{bail, Result}; + +use async_trait::async_trait; +use collections::BTreeMap; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task, + WeakModel, +}; +use language::{LanguageName, LanguageRegistry, LanguageToolchainStore, Toolchain, ToolchainList}; +use rpc::{proto, AnyProtoClient, TypedEnvelope}; +use settings::WorktreeId; +use util::ResultExt as _; + +use crate::worktree_store::WorktreeStore; + +pub struct ToolchainStore(ToolchainStoreInner); +enum ToolchainStoreInner { + Local(Model, #[allow(dead_code)] Subscription), + Remote(Model), +} + +impl EventEmitter for ToolchainStore {} +impl ToolchainStore { + pub fn init(client: &AnyProtoClient) { + client.add_model_request_handler(Self::handle_activate_toolchain); + client.add_model_request_handler(Self::handle_list_toolchains); + client.add_model_request_handler(Self::handle_active_toolchain); + } + + pub fn local( + languages: Arc, + worktree_store: Model, + cx: &mut ModelContext, + ) -> Self { + let model = cx.new_model(|_| LocalToolchainStore { + languages, + worktree_store, + active_toolchains: Default::default(), + }); + let subscription = cx.subscribe(&model, |_, _, e: &ToolchainStoreEvent, cx| { + cx.emit(e.clone()) + }); + Self(ToolchainStoreInner::Local(model, subscription)) + } + pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut AppContext) -> Self { + Self(ToolchainStoreInner::Remote( + cx.new_model(|_| RemoteToolchainStore { client, project_id }), + )) + } + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => local.update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }), + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .activate_toolchain(worktree_id, toolchain, cx) + } + } + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => { + local + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } + } + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => { + local + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } + } + } + async fn handle_activate_toolchain( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + this.update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let Some(toolchain) = envelope.payload.toolchain else { + bail!("Missing `toolchain` in payload"); + }; + let toolchain = Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + language_name, + }; + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + Ok(this.activate_toolchain(worktree_id, toolchain, cx)) + })?? + .await; + Ok(proto::Ack {}) + } + async fn handle_active_toolchain( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let toolchain = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.active_toolchain(worktree_id, language_name, cx) + })? + .await; + + Ok(proto::ActiveToolchainResponse { + toolchain: toolchain.map(|toolchain| proto::Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + }), + }) + } + + async fn handle_list_toolchains( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let toolchains = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.list_toolchains(worktree_id, language_name, cx) + })? + .await; + let has_values = toolchains.is_some(); + let groups = if let Some(toolchains) = &toolchains { + toolchains + .groups + .iter() + .filter_map(|group| { + Some(proto::ToolchainGroup { + start_index: u64::try_from(group.0).ok()?, + name: String::from(group.1.as_ref()), + }) + }) + .collect() + } else { + vec![] + }; + let toolchains = if let Some(toolchains) = toolchains { + toolchains + .toolchains + .into_iter() + .map(|toolchain| proto::Toolchain { + name: toolchain.name.to_string(), + path: toolchain.path.to_string(), + }) + .collect::>() + } else { + vec![] + }; + + Ok(proto::ListToolchainsResponse { + has_values, + toolchains, + groups, + }) + } + pub(crate) fn as_language_toolchain_store(&self) -> Arc { + match &self.0 { + ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())), + ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())), + } + } +} + +struct LocalToolchainStore { + languages: Arc, + worktree_store: Model, + active_toolchains: BTreeMap<(WorktreeId, LanguageName), Toolchain>, +} + +#[async_trait(?Send)] +impl language::LanguageToolchainStore for LocalStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option { + self.0 + .update(cx, |this, cx| { + this.active_toolchain(worktree_id, language_name, cx) + }) + .ok()? + .await + } +} + +#[async_trait(?Send)] +impl language::LanguageToolchainStore for RemoteStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option { + self.0 + .update(cx, |this, cx| { + this.active_toolchain(worktree_id, language_name, cx) + }) + .ok()? + .await + } +} + +pub(crate) struct EmptyToolchainStore; +#[async_trait(?Send)] +impl language::LanguageToolchainStore for EmptyToolchainStore { + async fn active_toolchain( + self: Arc, + _: WorktreeId, + _: LanguageName, + _: &mut AsyncAppContext, + ) -> Option { + None + } +} +struct LocalStore(WeakModel); +struct RemoteStore(WeakModel); + +#[derive(Clone)] +pub(crate) enum ToolchainStoreEvent { + ToolchainActivated, +} + +impl EventEmitter for LocalToolchainStore {} + +impl LocalToolchainStore { + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(move |this, mut cx| async move { + this.update(&mut cx, |this, cx| { + this.active_toolchains.insert( + (worktree_id, toolchain.language_name.clone()), + toolchain.clone(), + ); + cx.emit(ToolchainStoreEvent::ToolchainActivated); + }) + .ok(); + Some(()) + }) + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let registry = self.languages.clone(); + let Some(root) = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + else { + return Task::ready(None); + }; + cx.spawn(|_| async move { + let language = registry.language_for_name(&language_name.0).await.ok()?; + let toolchains = language.toolchain_lister()?.list(root.to_path_buf()).await; + Some(toolchains) + }) + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + _: &AppContext, + ) -> Task> { + Task::ready( + self.active_toolchains + .get(&(worktree_id, language_name)) + .cloned(), + ) + } +} +struct RemoteToolchainStore { + client: AnyProtoClient, + project_id: u64, +} + +impl RemoteToolchainStore { + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let _ = client + .request(proto::ActivateToolchain { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: toolchain.language_name.into(), + toolchain: Some(proto::Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + }), + }) + .await + .log_err()?; + Some(()) + }) + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let response = client + .request(proto::ListToolchains { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: language_name.clone().into(), + }) + .await + .log_err()?; + if !response.has_values { + return None; + } + let toolchains = response + .toolchains + .into_iter() + .map(|toolchain| Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + }) + .collect(); + let groups = response + .groups + .into_iter() + .filter_map(|group| { + Some((usize::try_from(group.start_index).ok()?, group.name.into())) + }) + .collect(); + Some(ToolchainList { + toolchains, + default: None, + groups, + }) + }) + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let response = client + .request(proto::ActiveToolchain { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: language_name.clone().into(), + }) + .await + .log_err()?; + + response.toolchain.map(|toolchain| Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + }) + }) + } +} diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index df190d03f39239..dc67eedbc11bfa 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -73,6 +73,8 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_copy_project_entry); client.add_model_request_handler(Self::handle_delete_project_entry); client.add_model_request_handler(Self::handle_expand_project_entry); + client.add_model_request_handler(Self::handle_git_branches); + client.add_model_request_handler(Self::handle_update_branch); } pub fn local(retain_worktrees: bool, fs: Arc) -> Self { @@ -127,6 +129,13 @@ impl WorktreeStore { .find(|worktree| worktree.read(cx).id() == id) } + pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option> { + self.worktree_for_id(repository.worktree_id, cx)? + .read(cx) + .git_entry(repository.path)? + .branch() + } + pub fn worktree_for_entry( &self, entry_id: ProjectEntryId, @@ -836,6 +845,131 @@ impl WorktreeStore { Ok(()) } + pub fn branches( + &self, + project_path: ProjectPath, + cx: &AppContext, + ) -> Task>> { + let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree found for ProjectPath"))); + }; + + match worktree.read(cx) { + Worktree::Local(local_worktree) => { + let branches = util::maybe!({ + let worktree_error = |error| { + format!( + "{} for worktree {}", + error, + local_worktree.abs_path().to_string_lossy() + ) + }; + + let entry = local_worktree + .git_entry(project_path.path) + .with_context(|| worktree_error("No git entry found"))?; + + let repo = local_worktree + .get_local_repo(&entry) + .with_context(|| worktree_error("No repository found"))? + .repo() + .clone(); + + repo.branches() + }); + + Task::ready(branches) + } + Worktree::Remote(remote_worktree) => { + let request = remote_worktree.client().request(proto::GitBranches { + project_id: remote_worktree.project_id(), + repository: Some(proto::ProjectPath { + worktree_id: project_path.worktree_id.to_proto(), + path: project_path.path.to_string_lossy().to_string(), // Root path + }), + }); + + cx.background_executor().spawn(async move { + let response = request.await?; + + let branches = response + .branches + .into_iter() + .map(|proto_branch| git::repository::Branch { + is_head: proto_branch.is_head, + name: proto_branch.name.into(), + unix_timestamp: proto_branch + .unix_timestamp + .map(|timestamp| timestamp as i64), + }) + .collect(); + + Ok(branches) + }) + } + } + } + + pub fn update_or_create_branch( + &self, + repository: ProjectPath, + new_branch: String, + cx: &AppContext, + ) -> Task> { + let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree found for ProjectPath"))); + }; + + match worktree.read(cx) { + Worktree::Local(local_worktree) => { + let result = util::maybe!({ + let worktree_error = |error| { + format!( + "{} for worktree {}", + error, + local_worktree.abs_path().to_string_lossy() + ) + }; + + let entry = local_worktree + .git_entry(repository.path) + .with_context(|| worktree_error("No git entry found"))?; + + let repo = local_worktree + .get_local_repo(&entry) + .with_context(|| worktree_error("No repository found"))? + .repo() + .clone(); + + if !repo.branch_exits(&new_branch)? { + repo.create_branch(&new_branch)?; + } + + repo.change_branch(&new_branch)?; + + Ok(()) + }); + + Task::ready(result) + } + Worktree::Remote(remote_worktree) => { + let request = remote_worktree.client().request(proto::UpdateGitBranch { + project_id: remote_worktree.project_id(), + repository: Some(proto::ProjectPath { + worktree_id: repository.worktree_id.to_proto(), + path: repository.path.to_string_lossy().to_string(), // Root path + }), + branch_name: new_branch, + }); + + cx.background_executor().spawn(async move { + request.await?; + Ok(()) + }) + } + } + } + async fn filter_paths( fs: &Arc, mut input: Receiver, @@ -917,6 +1051,61 @@ impl WorktreeStore { .ok_or_else(|| anyhow!("invalid request"))?; Worktree::handle_expand_entry(worktree, envelope.payload, cx).await } + + pub async fn handle_git_branches( + this: Model, + branches: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let project_path = branches + .payload + .repository + .clone() + .context("Invalid GitBranches call")?; + let project_path = ProjectPath { + worktree_id: WorktreeId::from_proto(project_path.worktree_id), + path: Path::new(&project_path.path).into(), + }; + + let branches = this + .read_with(&cx, |this, cx| this.branches(project_path, cx))? + .await?; + + Ok(proto::GitBranchesResponse { + branches: branches + .into_iter() + .map(|branch| proto::Branch { + is_head: branch.is_head, + name: branch.name.to_string(), + unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64), + }) + .collect(), + }) + } + + pub async fn handle_update_branch( + this: Model, + update_branch: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let project_path = update_branch + .payload + .repository + .clone() + .context("Invalid GitBranches call")?; + let project_path = ProjectPath { + worktree_id: WorktreeId::from_proto(project_path.worktree_id), + path: Path::new(&project_path.path).into(), + }; + let new_branch = update_branch.payload.branch_name; + + this.read_with(&cx, |this, cx| { + this.update_or_create_branch(project_path, new_branch, cx) + })? + .await?; + + Ok(proto::Ack {}) + } } #[derive(Clone, Debug)] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 50c9d2d1269c6a..6532e3e97761b9 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -30,7 +30,7 @@ use project::{ relativize_path, Entry, EntryKind, Fs, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId, }; -use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings}; +use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings, ShowIndentGuides}; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::{ @@ -2821,6 +2821,17 @@ impl ProjectPanel { return None; } + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + Some( div() .occlude() @@ -3032,7 +3043,8 @@ impl Render for ProjectPanel { let has_worktree = !self.visible_entries.is_empty(); let project = self.project.read(cx); let indent_size = ProjectPanelSettings::get_global(cx).indent_size; - let indent_guides = ProjectPanelSettings::get_global(cx).indent_guides; + let show_indent_guides = + ProjectPanelSettings::get_global(cx).indent_guides.show == ShowIndentGuides::Always; let is_local = project.is_local(); if has_worktree { @@ -3125,7 +3137,7 @@ impl Render for ProjectPanel { } }), ) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( uniform_list(cx.view().clone(), "entries", item_count, { |this, range, cx| { @@ -3136,7 +3148,7 @@ impl Render for ProjectPanel { items } }) - .when(indent_guides, |list| { + .when(show_indent_guides, |list| { list.with_decoration( ui::indent_guides( cx.view().clone(), @@ -3257,7 +3269,7 @@ impl Render for ProjectPanel { .id("empty-project_panel") .size_full() .p_4() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( Button::new("open_project", "Open a project") .full_width() diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 16980c00d18b98..c8417945856bc2 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -11,6 +11,13 @@ pub enum ProjectPanelDockPosition { Right, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ShowIndentGuides { + Always, + Never, +} + #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct ProjectPanelSettings { pub button: bool, @@ -20,12 +27,23 @@ pub struct ProjectPanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, - pub indent_guides: bool, + pub indent_guides: IndentGuidesSettings, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, pub scrollbar: ScrollbarSettings, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettings { + pub show: ShowIndentGuides, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct IndentGuidesSettingsContent { + /// When to show the scrollbar in the project panel. + pub show: Option, +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. @@ -72,10 +90,6 @@ pub struct ProjectPanelSettingsContent { /// /// Default: 20 pub indent_size: Option, - /// Whether to show indent guides in the project panel. - /// - /// Default: true - pub indent_guides: Option, /// Whether to reveal it in the project panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. @@ -89,6 +103,8 @@ pub struct ProjectPanelSettingsContent { pub auto_fold_dirs: Option, /// Scrollbar-related settings pub scrollbar: Option, + /// Settings related to indent guides in the project panel. + pub indent_guides: Option, } impl Settings for ProjectPanelSettings { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 5635eb880022ea..90fbc397f138db 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -196,8 +196,6 @@ message Envelope { GetImplementation get_implementation = 162; GetImplementationResponse get_implementation_response = 163; - JoinHostedProject join_hosted_project = 164; - CountLanguageModelTokens count_language_model_tokens = 230; CountLanguageModelTokensResponse count_language_model_tokens_response = 231; GetCachedEmbeddings get_cached_embeddings = 189; @@ -261,9 +259,6 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CheckFileExists check_file_exists = 255; - CheckFileExistsResponse check_file_exists_response = 256; - ShutdownRemoteServer shutdown_remote_server = 257; RemoveWorktree remove_worktree = 258; @@ -281,12 +276,30 @@ message Envelope { FlushBufferedMessages flush_buffered_messages = 267; LanguageServerPromptRequest language_server_prompt_request = 268; - LanguageServerPromptResponse language_server_prompt_response = 269; // current max - } + LanguageServerPromptResponse language_server_prompt_response = 269; + GitBranches git_branches = 270; + GitBranchesResponse git_branches_response = 271; + + UpdateGitBranch update_git_branch = 272; + + ListToolchains list_toolchains = 273; + ListToolchainsResponse list_toolchains_response = 274; + ActivateToolchain activate_toolchain = 275; + ActiveToolchain active_toolchain = 276; + ActiveToolchainResponse active_toolchain_response = 277; + GetPathMetadata get_path_metadata = 278; + GetPathMetadataResponse get_path_metadata_response = 279; + + GetPanicFiles get_panic_files = 280; + GetPanicFilesResponse get_panic_files_response = 281; + + CancelLanguageServerWork cancel_language_server_work = 282; // current max + } reserved 87 to 88; reserved 158 to 161; + reserved 164; reserved 166 to 169; reserved 177 to 185; reserved 188; @@ -297,6 +310,7 @@ message Envelope { reserved 221; reserved 224 to 229; reserved 247 to 254; + reserved 255 to 256; } // Messages @@ -518,11 +532,6 @@ message JoinProject { uint64 project_id = 1; } -message JoinHostedProject { - uint64 project_id = 1; -} - - message ListRemoteDirectory { uint64 dev_server_id = 1; string path = 2; @@ -1250,12 +1259,14 @@ message LspWorkStart { optional string title = 4; optional string message = 2; optional uint32 percentage = 3; + optional bool is_cancellable = 5; } message LspWorkProgress { string token = 1; optional string message = 2; optional uint32 percentage = 3; + optional bool is_cancellable = 4; } message LspWorkEnd { @@ -1289,13 +1300,7 @@ message UpdateChannels { repeated ChannelMessageId latest_channel_message_ids = 8; repeated ChannelBufferVersion latest_channel_buffer_versions = 9; - repeated HostedProject hosted_projects = 10; - repeated uint64 deleted_hosted_projects = 11; - - reserved 12; - reserved 13; - reserved 14; - reserved 15; + reserved 10 to 15; } message UpdateUserChannels { @@ -1324,13 +1329,6 @@ message ChannelParticipants { repeated uint64 participant_user_ids = 2; } -message HostedProject { - uint64 project_id = 1; - uint64 channel_id = 2; - string name = 3; - ChannelVisibility visibility = 4; -} - message JoinChannel { uint64 channel_id = 1; } @@ -1730,6 +1728,7 @@ message Collaborator { PeerId peer_id = 1; uint32 replica_id = 2; uint64 user_id = 3; + bool is_host = 4; } message User { @@ -2125,10 +2124,16 @@ message CommitPermalink { } message BlameBufferResponse { - repeated BlameEntry entries = 1; - repeated CommitMessage messages = 2; - repeated CommitPermalink permalinks = 3; - optional string remote_url = 4; + message BlameResponse { + repeated BlameEntry entries = 1; + repeated CommitMessage messages = 2; + repeated CommitPermalink permalinks = 3; + optional string remote_url = 4; + } + + optional BlameResponse blame_response = 5; + + reserved 1 to 4; } message MultiLspQuery { @@ -2367,14 +2372,15 @@ message UpdateUserSettings { } } -message CheckFileExists { +message GetPathMetadata { uint64 project_id = 1; string path = 2; } -message CheckFileExistsResponse { +message GetPathMetadataResponse { bool exists = 1; string path = 2; + bool is_dir = 3; } message ShutdownRemoteServer {} @@ -2407,7 +2413,6 @@ message GetPermalinkToLine { message GetPermalinkToLineResponse { string permalink = 1; } - message FlushBufferedMessages {} message FlushBufferedMessagesResponse {} @@ -2432,3 +2437,88 @@ message LanguageServerPromptRequest { message LanguageServerPromptResponse { optional uint64 action_response = 1; } + +message ListToolchains { + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; +} + +message Toolchain { + string name = 1; + string path = 2; +} + +message ToolchainGroup { + uint64 start_index = 1; + string name = 2; +} + +message ListToolchainsResponse { + repeated Toolchain toolchains = 1; + bool has_values = 2; + repeated ToolchainGroup groups = 3; +} + +message ActivateToolchain { + uint64 project_id = 1; + uint64 worktree_id = 2; + Toolchain toolchain = 3; + string language_name = 4; +} + +message ActiveToolchain { + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; +} + +message ActiveToolchainResponse { + optional Toolchain toolchain = 1; +} + +message Branch { + bool is_head = 1; + string name = 2; + optional uint64 unix_timestamp = 3; +} + +message GitBranches { + uint64 project_id = 1; + ProjectPath repository = 2; +} + +message GitBranchesResponse { + repeated Branch branches = 1; +} + +message UpdateGitBranch { + uint64 project_id = 1; + string branch_name = 2; + ProjectPath repository = 3; +} + +message GetPanicFiles { +} + +message GetPanicFilesResponse { + repeated string file_contents = 2; +} + +message CancelLanguageServerWork { + uint64 project_id = 1; + + oneof work { + Buffers buffers = 2; + LanguageServerWork language_server_work = 3; + } + + message Buffers { + repeated uint64 buffer_ids = 2; + } + + message LanguageServerWork { + uint64 language_server_id = 1; + optional string token = 2; + } +} diff --git a/crates/proto/src/error.rs b/crates/proto/src/error.rs index 8a87d6fdc9e21a..680056fc1c2cbb 100644 --- a/crates/proto/src/error.rs +++ b/crates/proto/src/error.rs @@ -104,7 +104,19 @@ impl ErrorExt for anyhow::Error { if let Some(rpc_error) = self.downcast_ref::() { rpc_error.to_proto() } else { - ErrorCode::Internal.message(format!("{}", self)).to_proto() + ErrorCode::Internal + .message( + format!("{self:#}") + .lines() + .fold(String::new(), |mut message, line| { + if !message.is_empty() { + message.push(' '); + } + message.push_str(line); + message + }), + ) + .to_proto() } } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 7a31e7cc7a691a..ca0403ed72d052 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -228,7 +228,6 @@ messages!( (JoinChannelChat, Foreground), (JoinChannelChatResponse, Foreground), (JoinProject, Foreground), - (JoinHostedProject, Foreground), (JoinProjectResponse, Foreground), (JoinRoom, Foreground), (JoinRoomResponse, Foreground), @@ -344,8 +343,6 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CheckFileExists, Background), - (CheckFileExistsResponse, Background), (ShutdownRemoteServer, Foreground), (RemoveWorktree, Foreground), (LanguageServerLog, Foreground), @@ -357,6 +354,19 @@ messages!( (FlushBufferedMessages, Foreground), (LanguageServerPromptRequest, Foreground), (LanguageServerPromptResponse, Foreground), + (GitBranches, Background), + (GitBranchesResponse, Background), + (UpdateGitBranch, Background), + (ListToolchains, Foreground), + (ListToolchainsResponse, Foreground), + (ActivateToolchain, Foreground), + (ActiveToolchain, Foreground), + (ActiveToolchainResponse, Foreground), + (GetPathMetadata, Background), + (GetPathMetadataResponse, Background), + (GetPanicFiles, Background), + (GetPanicFilesResponse, Background), + (CancelLanguageServerWork, Foreground), ); request_messages!( @@ -408,7 +418,6 @@ request_messages!( (JoinChannel, JoinRoomResponse), (JoinChannelBuffer, JoinChannelBufferResponse), (JoinChannelChat, JoinChannelChatResponse), - (JoinHostedProject, JoinProjectResponse), (JoinProject, JoinProjectResponse), (JoinRoom, JoinRoomResponse), (LeaveChannelBuffer, Ack), @@ -466,13 +475,20 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CheckFileExists, CheckFileExistsResponse), (ShutdownRemoteServer, Ack), (RemoveWorktree, Ack), (OpenServerSettings, OpenBufferResponse), (GetPermalinkToLine, GetPermalinkToLineResponse), (FlushBufferedMessages, Ack), (LanguageServerPromptRequest, LanguageServerPromptResponse), + (GitBranches, GitBranchesResponse), + (UpdateGitBranch, Ack), + (ListToolchains, ListToolchainsResponse), + (ActivateToolchain, Ack), + (ActiveToolchain, ActiveToolchainResponse), + (GetPathMetadata, GetPathMetadataResponse), + (GetPanicFiles, GetPanicFilesResponse), + (CancelLanguageServerWork, Ack), ); entity_messages!( @@ -544,13 +560,19 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CheckFileExists, LanguageServerLog, Toast, HideToast, OpenServerSettings, GetPermalinkToLine, - LanguageServerPromptRequest + LanguageServerPromptRequest, + GitBranches, + UpdateGitBranch, + ListToolchains, + ActivateToolchain, + ActiveToolchain, + GetPathMetadata, + CancelLanguageServerWork, ); entity_messages!( diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index ed81fbb345ddee..f176d56c5134a9 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -149,7 +149,7 @@ impl Render for DisconnectedOverlay { }; div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .elevation_3(cx) .on_action(cx.listener(Self::cancel)) .occlude() diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b3770f7534b151..a4aa0e39b42db9 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -1,7 +1,7 @@ pub mod disconnected_overlay; mod remote_servers; mod ssh_connections; -pub use ssh_connections::open_ssh_project; +pub use ssh_connections::{is_connecting_over_ssh, open_ssh_project}; use disconnected_overlay::DisconnectedOverlay; use fuzzy::{StringMatch, StringMatchCandidate}; diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index faf58f312fafa7..1b83120eb36faf 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -738,7 +738,8 @@ impl RemoteServerProjects { }; let project = project.clone(); let server = server.clone(); - cx.spawn(|remote_server_projects, mut cx| async move { + cx.emit(DismissEvent); + cx.spawn(|_, mut cx| async move { let result = open_ssh_project( server.into(), project.paths.into_iter().map(PathBuf::from).collect(), @@ -757,10 +758,6 @@ impl RemoteServerProjects { ) .await .ok(); - } else { - remote_server_projects - .update(&mut cx, |_, cx| cx.emit(DismissEvent)) - .ok(); } }) .detach(); @@ -1207,7 +1204,7 @@ impl RemoteServerProjects { Modal::new("remote-projects", Some(self.scroll_handle.clone())) .header( ModalHeader::new() - .child(Headline::new("Remote Projects (alpha)").size(HeadlineSize::XSmall)), + .child(Headline::new("Remote Projects (beta)").size(HeadlineSize::XSmall)), ) .section( Section::new().padded(false).child( @@ -1269,7 +1266,7 @@ impl Render for RemoteServerProjects { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { self.selectable_items.reset(); div() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .elevation_3(cx) .w(rems(34.)) .key_context("RemoteServerModal") diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 55204e14b947dd..84618a2f491aa9 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -14,7 +14,7 @@ use gpui::{AppContext, Model}; use language::CursorShape; use markdown::{Markdown, MarkdownStyle}; use release_channel::{AppVersion, ReleaseChannel}; -use remote::ssh_session::ServerBinary; +use remote::ssh_session::{ServerBinary, ServerVersion}; use remote::{SshConnectionOptions, SshPlatform, SshRemoteClient}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -446,7 +446,7 @@ impl remote::SshClientDelegate for SshClientDelegate { platform: SshPlatform, upload_binary_over_ssh: bool, cx: &mut AsyncAppContext, - ) -> oneshot::Receiver> { + ) -> oneshot::Receiver> { let (tx, rx) = oneshot::channel(); let this = self.clone(); cx.spawn(|mut cx| async move { @@ -491,7 +491,7 @@ impl SshClientDelegate { platform: SshPlatform, upload_binary_via_ssh: bool, cx: &mut AsyncAppContext, - ) -> Result<(ServerBinary, SemanticVersion)> { + ) -> Result<(ServerBinary, ServerVersion)> { let (version, release_channel) = cx.update(|cx| { let version = AppVersion::global(cx); let channel = ReleaseChannel::global(cx); @@ -505,7 +505,10 @@ impl SshClientDelegate { let result = self.build_local(cx, platform, version).await?; // Fall through to a remote binary if we're not able to compile a local binary if let Some((path, version)) = result { - return Ok((ServerBinary::LocalBinary(path), version)); + return Ok(( + ServerBinary::LocalBinary(path), + ServerVersion::Semantic(version), + )); } } @@ -540,9 +543,12 @@ impl SshClientDelegate { ) })?; - Ok((ServerBinary::LocalBinary(binary_path), version)) + Ok(( + ServerBinary::LocalBinary(binary_path), + ServerVersion::Semantic(version), + )) } else { - let (request_url, request_body) = AutoUpdater::get_remote_server_release_url( + let (release, request_body) = AutoUpdater::get_remote_server_release_url( platform.os, platform.arch, release_channel, @@ -560,9 +566,14 @@ impl SshClientDelegate { ) })?; + let version = release + .version + .parse::() + .map(ServerVersion::Semantic) + .unwrap_or_else(|_| ServerVersion::Commit(release.version)); Ok(( ServerBinary::ReleaseUrl { - url: request_url, + url: release.url, body: request_body, }, version, @@ -631,7 +642,7 @@ impl SshClientDelegate { self.update_status( Some(&format!( - "Building remote server binary from source for {}", + "Building remote server binary from source for {} with Docker", &triple )), cx, @@ -678,6 +689,10 @@ impl SshClientDelegate { } } +pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &AppContext) -> bool { + workspace.active_modal::(cx).is_some() +} + pub fn connect_over_ssh( unique_identifier: String, connection_options: SshConnectionOptions, diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 422937ed234de3..a69f0330ff2cd7 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -227,6 +227,20 @@ pub enum ServerBinary { ReleaseUrl { url: String, body: String }, } +pub enum ServerVersion { + Semantic(SemanticVersion), + Commit(String), +} + +impl std::fmt::Display for ServerVersion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Semantic(version) => write!(f, "{}", version), + Self::Commit(commit) => write!(f, "{}", commit), + } + } +} + pub trait SshClientDelegate: Send + Sync { fn ask_password( &self, @@ -243,7 +257,7 @@ pub trait SshClientDelegate: Send + Sync { platform: SshPlatform, upload_binary_over_ssh: bool, cx: &mut AsyncAppContext, - ) -> oneshot::Receiver>; + ) -> oneshot::Receiver>; fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext); } @@ -1009,7 +1023,7 @@ impl SshRemoteClient { server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server")); let connection: Arc = Arc::new(fake::FakeRemoteConnection { connection_options: opts.clone(), - server_cx: fake::SendableCx::new(server_cx.to_async()), + server_cx: fake::SendableCx::new(server_cx), server_channel: server_client.clone(), }); @@ -1221,9 +1235,11 @@ impl RemoteConnection for SshRemoteConnection { delegate.set_status(Some("Starting proxy"), cx); let mut start_proxy_command = format!( - "RUST_LOG={} RUST_BACKTRACE={} {:?} proxy --identifier {}", + "RUST_LOG={} {} {:?} proxy --identifier {}", std::env::var("RUST_LOG").unwrap_or_default(), - std::env::var("RUST_BACKTRACE").unwrap_or_default(), + std::env::var("RUST_BACKTRACE") + .map(|b| { format!("RUST_BACKTRACE={}", b) }) + .unwrap_or_default(), remote_binary_path, unique_identifier, ); @@ -1272,6 +1288,7 @@ impl SshRemoteConnection { ) -> Result { use futures::AsyncWriteExt as _; use futures::{io::BufReader, AsyncBufReadExt as _}; + use smol::net::unix::UnixStream; use smol::{fs::unix::PermissionsExt as _, net::unix::UnixListener}; use util::ResultExt as _; @@ -1288,6 +1305,9 @@ impl SshRemoteConnection { let listener = UnixListener::bind(&askpass_socket).context("failed to create askpass socket")?; + let (askpass_kill_master_tx, askpass_kill_master_rx) = oneshot::channel::(); + let mut kill_tx = Some(askpass_kill_master_tx); + let askpass_task = cx.spawn({ let delegate = delegate.clone(); |mut cx| async move { @@ -1311,6 +1331,11 @@ impl SshRemoteConnection { .log_err() { stream.write_all(password.as_bytes()).await.log_err(); + } else { + if let Some(kill_tx) = kill_tx.take() { + kill_tx.send(stream).log_err(); + break; + } } } } @@ -1331,6 +1356,7 @@ impl SshRemoteConnection { // the connection and keep it open, allowing other ssh commands to reuse it // via a control socket. let socket_path = temp_dir.path().join("ssh.sock"); + let mut master_process = process::Command::new("ssh") .stdin(Stdio::null()) .stdout(Stdio::piped()) @@ -1353,20 +1379,28 @@ impl SshRemoteConnection { // Wait for this ssh process to close its stdout, indicating that authentication // has completed. - let stdout = master_process.stdout.as_mut().unwrap(); + let mut stdout = master_process.stdout.take().unwrap(); let mut output = Vec::new(); let connection_timeout = Duration::from_secs(10); let result = select_biased! { _ = askpass_opened_rx.fuse() => { - // If the askpass script has opened, that means the user is typing - // their password, in which case we don't want to timeout anymore, - // since we know a connection has been established. - stdout.read_to_end(&mut output).await?; - Ok(()) + select_biased! { + stream = askpass_kill_master_rx.fuse() => { + master_process.kill().ok(); + drop(stream); + Err(anyhow!("SSH connection canceled")) + } + // If the askpass script has opened, that means the user is typing + // their password, in which case we don't want to timeout anymore, + // since we know a connection has been established. + result = stdout.read_to_end(&mut output).fuse() => { + result?; + Ok(()) + } + } } - result = stdout.read_to_end(&mut output).fuse() => { - result?; + _ = stdout.read_to_end(&mut output).fuse() => { Ok(()) } _ = futures::FutureExt::fuse(smol::Timer::after(connection_timeout)) => { @@ -1702,44 +1736,63 @@ impl SshRemoteConnection { } } - if self.is_binary_in_use(dst_path).await? { - log::info!("server binary is opened by another process. not updating"); - delegate.set_status( - Some("Skipping update of remote development server, since it's still in use"), - cx, - ); - return Ok(()); + if cfg!(not(debug_assertions)) { + // When we're not in dev mode, we don't want to switch out the binary if it's + // still open. + // In dev mode, that's fine, since we often kill Zed processes with Ctrl-C and want + // to still replace the binary. + if self.is_binary_in_use(dst_path).await? { + log::info!("server binary is opened by another process. not updating"); + delegate.set_status( + Some("Skipping update of remote development server, since it's still in use"), + cx, + ); + return Ok(()); + } } let upload_binary_over_ssh = self.socket.connection_options.upload_binary_over_ssh; - let (binary, version) = delegate + let (binary, new_server_version) = delegate .get_server_binary(platform, upload_binary_over_ssh, cx) .await??; - let mut remote_version = None; if cfg!(not(debug_assertions)) { - if let Ok(installed_version) = + let installed_version = if let Ok(version_output) = run_cmd(self.socket.ssh_command(dst_path).arg("version")).await { - if let Ok(version) = installed_version.trim().parse::() { - remote_version = Some(version); + if let Ok(version) = version_output.trim().parse::() { + Some(ServerVersion::Semantic(version)) } else { - log::warn!("failed to parse version of remote server: {installed_version:?}",); + Some(ServerVersion::Commit(version_output.trim().to_string())) } - } + } else { + None + }; - if let Some(remote_version) = remote_version { - if remote_version == version { - log::info!("remote development server present and matching client version"); - return Ok(()); - } else if remote_version > version { - let error = anyhow!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", remote_version, version); - return Err(error); - } else { - log::info!( - "remote development server has older version: {}. updating...", - remote_version - ); + if let Some(installed_version) = installed_version { + use ServerVersion::*; + match (installed_version, new_server_version) { + (Semantic(installed), Semantic(new)) if installed == new => { + log::info!("remote development server present and matching client version"); + return Ok(()); + } + (Semantic(installed), Semantic(new)) if installed > new => { + let error = anyhow!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", installed, new); + return Err(error); + } + (Commit(installed), Commit(new)) if installed == new => { + log::info!( + "remote development server present and matching client version {}", + installed + ); + return Ok(()); + } + (installed, _) => { + log::info!( + "remote development server has version: {}. updating...", + installed + ); + } } } } @@ -1964,77 +2017,97 @@ impl ChannelClient { mut incoming_rx: mpsc::UnboundedReceiver, cx: &AsyncAppContext, ) -> Task> { - cx.spawn(|cx| { - async move { - let peer_id = PeerId { owner_id: 0, id: 0 }; - while let Some(incoming) = incoming_rx.next().await { - let Some(this) = this.upgrade() else { - return anyhow::Ok(()); - }; - if let Some(ack_id) = incoming.ack_id { - let mut buffer = this.buffer.lock(); - while buffer.front().is_some_and(|msg| msg.id <= ack_id) { - buffer.pop_front(); - } + cx.spawn(|cx| async move { + let peer_id = PeerId { owner_id: 0, id: 0 }; + while let Some(incoming) = incoming_rx.next().await { + let Some(this) = this.upgrade() else { + return anyhow::Ok(()); + }; + if let Some(ack_id) = incoming.ack_id { + let mut buffer = this.buffer.lock(); + while buffer.front().is_some_and(|msg| msg.id <= ack_id) { + buffer.pop_front(); } - if let Some(proto::envelope::Payload::FlushBufferedMessages(_)) = - &incoming.payload + } + if let Some(proto::envelope::Payload::FlushBufferedMessages(_)) = &incoming.payload + { + log::debug!( + "{}:ssh message received. name:FlushBufferedMessages", + this.name + ); { - log::debug!("{}:ssh message received. name:FlushBufferedMessages", this.name); - { - let buffer = this.buffer.lock(); - for envelope in buffer.iter() { - this.outgoing_tx.lock().unbounded_send(envelope.clone()).ok(); - } + let buffer = this.buffer.lock(); + for envelope in buffer.iter() { + this.outgoing_tx + .lock() + .unbounded_send(envelope.clone()) + .ok(); } - let mut envelope = proto::Ack{}.into_envelope(0, Some(incoming.id), None); - envelope.id = this.next_message_id.fetch_add(1, SeqCst); - this.outgoing_tx.lock().unbounded_send(envelope).ok(); - continue; } + let mut envelope = proto::Ack {}.into_envelope(0, Some(incoming.id), None); + envelope.id = this.next_message_id.fetch_add(1, SeqCst); + this.outgoing_tx.lock().unbounded_send(envelope).ok(); + continue; + } - this.max_received.store(incoming.id, SeqCst); + this.max_received.store(incoming.id, SeqCst); - if let Some(request_id) = incoming.responding_to { - let request_id = MessageId(request_id); - let sender = this.response_channels.lock().remove(&request_id); - if let Some(sender) = sender { - let (tx, rx) = oneshot::channel(); - if incoming.payload.is_some() { - sender.send((incoming, tx)).ok(); - } - rx.await.ok(); + if let Some(request_id) = incoming.responding_to { + let request_id = MessageId(request_id); + let sender = this.response_channels.lock().remove(&request_id); + if let Some(sender) = sender { + let (tx, rx) = oneshot::channel(); + if incoming.payload.is_some() { + sender.send((incoming, tx)).ok(); } - } else if let Some(envelope) = - build_typed_envelope(peer_id, Instant::now(), incoming) - { - let type_name = envelope.payload_type_name(); - if let Some(future) = ProtoMessageHandlerSet::handle_message( - &this.message_handlers, - envelope, - this.clone().into(), - cx.clone(), - ) { - log::debug!("{}:ssh message received. name:{type_name}", this.name); - cx.foreground_executor().spawn(async move { + rx.await.ok(); + } + } else if let Some(envelope) = + build_typed_envelope(peer_id, Instant::now(), incoming) + { + let type_name = envelope.payload_type_name(); + if let Some(future) = ProtoMessageHandlerSet::handle_message( + &this.message_handlers, + envelope, + this.clone().into(), + cx.clone(), + ) { + log::debug!("{}:ssh message received. name:{type_name}", this.name); + cx.foreground_executor() + .spawn(async move { match future.await { Ok(_) => { - log::debug!("{}:ssh message handled. name:{type_name}", this.name); + log::debug!( + "{}:ssh message handled. name:{type_name}", + this.name + ); } Err(error) => { log::error!( - "{}:error handling message. type:{type_name}, error:{error}", this.name, + "{}:error handling message. type:{}, error:{}", + this.name, + type_name, + format!("{error:#}").lines().fold( + String::new(), + |mut message, line| { + if !message.is_empty() { + message.push(' '); + } + message.push_str(line); + message + } + ) ); } } - }).detach() - } else { - log::error!("{}:unhandled ssh message name:{type_name}", this.name); - } + }) + .detach() + } else { + log::error!("{}:unhandled ssh message name:{type_name}", this.name); } } - anyhow::Ok(()) } + anyhow::Ok(()) }) } @@ -2222,12 +2295,12 @@ mod fake { }, select_biased, FutureExt, SinkExt, StreamExt, }; - use gpui::{AsyncAppContext, SemanticVersion, Task}; + use gpui::{AsyncAppContext, Task, TestAppContext}; use rpc::proto::Envelope; use super::{ - ChannelClient, RemoteConnection, ServerBinary, SshClientDelegate, SshConnectionOptions, - SshPlatform, + ChannelClient, RemoteConnection, ServerBinary, ServerVersion, SshClientDelegate, + SshConnectionOptions, SshPlatform, }; pub(super) struct FakeRemoteConnection { @@ -2237,15 +2310,19 @@ mod fake { } pub(super) struct SendableCx(AsyncAppContext); - // safety: you can only get the other cx on the main thread. impl SendableCx { - pub(super) fn new(cx: AsyncAppContext) -> Self { - Self(cx) + // SAFETY: When run in test mode, GPUI is always single threaded. + pub(super) fn new(cx: &TestAppContext) -> Self { + Self(cx.to_async()) } + + // SAFETY: Enforce that we're on the main thread by requiring a valid AsyncAppContext fn get(&self, _: &AsyncAppContext) -> AsyncAppContext { self.0.clone() } } + + // SAFETY: There is no way to access a SendableCx from a different thread, see [`SendableCx::new`] and [`SendableCx::get`] unsafe impl Send for SendableCx {} unsafe impl Sync for SendableCx {} @@ -2347,7 +2424,7 @@ mod fake { _: SshPlatform, _: bool, _: &mut AsyncAppContext, - ) -> oneshot::Receiver> { + ) -> oneshot::Receiver> { unreachable!() } diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index d2623d5f472d64..92ddbee09473f7 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -22,9 +22,10 @@ debug-embed = ["dep:rust-embed"] test-support = ["fs/test-support"] [dependencies] -async-watch.workspace = true anyhow.workspace = true +async-watch.workspace = true backtrace = "0.3" +chrono.workspace = true clap.workspace = true client.workspace = true env_logger.workspace = true @@ -39,8 +40,10 @@ languages.workspace = true log.workspace = true lsp.workspace = true node_runtime.workspace = true -project.workspace = true paths = { workspace = true } +project.workspace = true +proto.workspace = true +release_channel.workspace = true remote.workspace = true reqwest_client.workspace = true rpc.workspace = true @@ -50,6 +53,7 @@ serde_json.workspace = true settings.workspace = true shellexpand.workspace = true smol.workspace = true +telemetry_events.workspace = true util.workspace = true worktree.workspace = true diff --git a/crates/remote_server/build.rs b/crates/remote_server/build.rs index 11a8969a4474f9..fae18897739476 100644 --- a/crates/remote_server/build.rs +++ b/crates/remote_server/build.rs @@ -1,3 +1,5 @@ +use std::process::Command; + const ZED_MANIFEST: &str = include_str!("../zed/Cargo.toml"); fn main() { @@ -7,4 +9,23 @@ fn main() { "cargo:rustc-env=ZED_PKG_VERSION={}", zed_cargo_toml.package.unwrap().version.unwrap() ); + + // If we're building this for nightly, we want to set the ZED_COMMIT_SHA + if let Some(release_channel) = std::env::var("ZED_RELEASE_CHANNEL").ok() { + if release_channel.as_str() == "nightly" { + // Populate git sha environment variable if git is available + println!("cargo:rerun-if-changed=../../.git/logs/HEAD"); + if let Some(output) = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + .filter(|output| output.status.success()) + { + let git_sha = String::from_utf8_lossy(&output.stdout); + let git_sha = git_sha.trim(); + + println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); + } + } + } } diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index c7a708b4e69777..1165f2c42f958a 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Result}; use fs::Fs; -use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, PromptLevel}; +use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, PromptLevel}; use http_client::HttpClient; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; use node_runtime::NodeRuntime; @@ -11,7 +11,7 @@ use project::{ search::SearchQuery, task_store::TaskStore, worktree_store::WorktreeStore, - LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, + LspStore, LspStoreEvent, PrettierStore, ProjectPath, ToolchainStore, WorktreeId, }; use remote::ssh_session::ChannelClient; use rpc::{ @@ -113,12 +113,15 @@ impl HeadlessProject { observer.shared(SSH_PROJECT_ID, session.clone().into(), cx); observer }); + let toolchain_store = + cx.new_model(|cx| ToolchainStore::local(languages.clone(), worktree_store.clone(), cx)); let lsp_store = cx.new_model(|cx| { let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), dap_store.clone(), prettier_store.clone(), + toolchain_store.clone(), environment, languages.clone(), http_client, @@ -149,10 +152,11 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store); session.subscribe_to_entity(SSH_PROJECT_ID, &task_store); + session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store); session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); - client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists); + client.add_request_handler(cx.weak_model(), Self::handle_get_path_metadata); client.add_request_handler(cx.weak_model(), Self::handle_shutdown_remote_server); client.add_request_handler(cx.weak_model(), Self::handle_ping); @@ -172,6 +176,7 @@ impl HeadlessProject { SettingsObserver::init(&client); LspStore::init(&client); TaskStore::init(Some(&client)); + ToolchainStore::init(&client); HeadlessProject { session: client, @@ -526,18 +531,20 @@ impl HeadlessProject { Ok(proto::ListRemoteDirectoryResponse { entries }) } - pub async fn handle_check_file_exists( + pub async fn handle_get_path_metadata( this: Model, - envelope: TypedEnvelope, + envelope: TypedEnvelope, cx: AsyncAppContext, - ) -> Result { + ) -> Result { let fs = cx.read_model(&this, |this, _| this.fs.clone())?; let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); - let exists = fs.is_file(&PathBuf::from(expanded.clone())).await; + let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?; + let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false); - Ok(proto::CheckFileExistsResponse { - exists, + Ok(proto::GetPathMetadataResponse { + exists: metadata.is_some(), + is_dir, path: expanded, }) } diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 72ac438e603f9e..132bd36b7b2c6d 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -72,7 +72,12 @@ fn main() { } }, Some(Commands::Version) => { - println!("{}", env!("ZED_PKG_VERSION")); + if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { + println!("{}", build_sha); + } else { + println!("{}", env!("ZED_PKG_VERSION")); + } + std::process::exit(0); } None => { diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index f7420ef5b091b7..2554aa48d2eb3d 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -26,7 +26,29 @@ use std::{ #[gpui::test] async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + "project2": { + "README.md": "# project 2", + }, + }), + ) + .await; + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], + ); + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -128,7 +150,22 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test #[gpui::test] async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, _) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; project .update(cx, |project, cx| { @@ -193,7 +230,22 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes #[gpui::test] async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; cx.update_global(|settings_store: &mut SettingsStore, cx| { settings_store.set_user_settings( @@ -304,7 +356,22 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo #[gpui::test] async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; fs.insert_tree( "/code/project1/.zed", @@ -461,9 +528,190 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext }) } +#[gpui::test] +async fn test_remote_cancel_language_server_work( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; + + fs.insert_tree( + "/code/project1/.zed", + json!({ + "settings.json": r#" + { + "languages": {"Rust":{"language_servers":["rust-analyzer"]}}, + "lsp": { + "rust-analyzer": { + "binary": { + "path": "~/.cargo/bin/rust-analyzer" + } + } + } + }"# + }), + ) + .await; + + cx.update_model(&project, |project, _| { + project.languages().register_test_language(LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".into()], + ..Default::default() + }, + ..Default::default() + }); + project.languages().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + ..Default::default() + }, + ) + }); + + let mut fake_lsp = server_cx.update(|cx| { + headless.read(cx).languages.register_fake_language_server( + LanguageServerName("rust-analyzer".into()), + Default::default(), + None, + ) + }); + + cx.run_until_parked(); + + let worktree_id = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap() + .0 + .read_with(cx, |worktree, _| worktree.id()); + + cx.run_until_parked(); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let mut fake_lsp = fake_lsp.next().await.unwrap(); + + // Cancelling all language server work for a given buffer + { + // Two operations, one cancellable and one not. + fake_lsp + .start_progress_with( + "another-token", + lsp::WorkDoneProgressBegin { + cancellable: Some(false), + ..Default::default() + }, + ) + .await; + + let progress_token = "the-progress-token"; + fake_lsp + .start_progress_with( + progress_token, + lsp::WorkDoneProgressBegin { + cancellable: Some(true), + ..Default::default() + }, + ) + .await; + + cx.executor().run_until_parked(); + + project.update(cx, |project, cx| { + project.cancel_language_server_work_for_buffers([buffer.clone()], cx) + }); + + cx.executor().run_until_parked(); + + // Verify the cancellation was received on the server side + let cancel_notification = fake_lsp + .receive_notification::() + .await; + assert_eq!( + cancel_notification.token, + lsp::NumberOrString::String(progress_token.into()) + ); + } + + // Cancelling work by server_id and token + { + let server_id = fake_lsp.server.server_id(); + let progress_token = "the-progress-token"; + + fake_lsp + .start_progress_with( + progress_token, + lsp::WorkDoneProgressBegin { + cancellable: Some(true), + ..Default::default() + }, + ) + .await; + + cx.executor().run_until_parked(); + + project.update(cx, |project, cx| { + project.cancel_language_server_work(server_id, Some(progress_token.into()), cx) + }); + + cx.executor().run_until_parked(); + + // Verify the cancellation was received on the server side + let cancel_notification = fake_lsp + .receive_notification::() + .await; + assert_eq!( + cancel_notification.token, + lsp::NumberOrString::String(progress_token.into()) + ); + } +} + #[gpui::test] async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -522,8 +770,26 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont } #[gpui::test] -async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; +async fn test_remote_resolve_path_in_buffer( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -542,10 +808,11 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut let path = project .update(cx, |project, cx| { - project.resolve_existing_file_path("/code/project1/README.md", &buffer, cx) + project.resolve_path_in_buffer("/code/project1/README.md", &buffer, cx) }) .await .unwrap(); + assert!(path.is_file()); assert_eq!( path.abs_path().unwrap().to_string_lossy(), "/code/project1/README.md" @@ -553,20 +820,100 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut let path = project .update(cx, |project, cx| { - project.resolve_existing_file_path("../README.md", &buffer, cx) + project.resolve_path_in_buffer("../README.md", &buffer, cx) }) .await .unwrap(); - + assert!(path.is_file()); assert_eq!( path.project_path().unwrap().clone(), ProjectPath::from((worktree_id, "README.md")) ); + + let path = project + .update(cx, |project, cx| { + project.resolve_path_in_buffer("../src", &buffer, cx) + }) + .await + .unwrap(); + assert_eq!( + path.project_path().unwrap().clone(), + ProjectPath::from((worktree_id, "src")) + ); + assert!(path.is_dir()); +} + +#[gpui::test] +async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/README.md", cx) + }) + .await + .unwrap(); + + assert!(path.is_file()); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/README.md" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/src", cx) + }) + .await + .unwrap(); + + assert!(path.is_dir()); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/src" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/DOESNOTEXIST", cx) + }) + .await; + assert!(path.is_none()); } #[gpui::test(iterations = 10)] async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -597,7 +944,25 @@ async fn test_adding_then_removing_then_adding_worktrees( cx: &mut TestAppContext, server_cx: &mut TestAppContext, ) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + "project2": { + "README.md": "# project 2", + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (_worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -636,9 +1001,25 @@ async fn test_adding_then_removing_then_adding_worktrees( #[gpui::test] async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let buffer = project.update(cx, |project, cx| project.open_server_settings(cx)); cx.executor().run_until_parked(); + let buffer = buffer.await.unwrap(); cx.update(|cx| { @@ -651,7 +1032,22 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test #[gpui::test(iterations = 20)] async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { @@ -690,19 +1086,8 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) ); } -fn init_logger() { - if std::env::var("RUST_LOG").is_ok() { - env_logger::try_init().ok(); - } -} - -async fn init_test( - cx: &mut TestAppContext, - server_cx: &mut TestAppContext, -) -> (Model, Model, Arc) { - init_logger(); - - let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); +#[gpui::test] +async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", @@ -710,32 +1095,109 @@ async fn init_test( "project1": { ".git": {}, "README.md": "# project 1", - "src": { - "lib.rs": "fn one() -> usize { 1 }" - } - }, - "project2": { - "README.md": "# project 2", }, }), ) .await; - fs.set_index_for_repo( - Path::new("/code/project1/.git"), - &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], - ); - server_cx.update(HeadlessProject::init); + let (project, headless_project) = init_test(&fs, cx, server_cx).await; + let branches = ["main", "dev", "feature-1"]; + fs.insert_branches(Path::new("/code/project1/.git"), &branches); + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + let root_path = ProjectPath::root_path(worktree_id); + // Give the worktree a bit of time to index the file system + cx.run_until_parked(); + + let remote_branches = project + .update(cx, |project, cx| project.branches(root_path.clone(), cx)) + .await + .unwrap(); + + let new_branch = branches[2]; + + let remote_branches = remote_branches + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&remote_branches, &branches); + + cx.update(|cx| { + project.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx.update(|cx| { + project.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), "totally-new-branch"); +} + +pub async fn init_test( + server_fs: &Arc, + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) -> (Model, Model) { + let server_fs = server_fs.clone(); + init_logger(); + + let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); let http_client = Arc::new(BlockedHttpClient); let node_runtime = NodeRuntime::unavailable(); let languages = Arc::new(LanguageRegistry::new(cx.executor())); + server_cx.update(HeadlessProject::init); let headless = server_cx.new_model(|cx| { client::init_settings(cx); HeadlessProject::new( crate::HeadlessAppState { session: ssh_server_client, - fs: fs.clone(), + fs: server_fs.clone(), http_client, node_runtime, languages, @@ -752,13 +1214,21 @@ async fn init_test( |_, cx| cx.on_release(|_, _| drop(headless)) }) .detach(); - (project, headless, fs) + (project, headless) +} + +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } } fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model { cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); + if !cx.has_global::() { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + } }); let client = cx.update(|cx| { @@ -773,6 +1243,7 @@ fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model< let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); let languages = Arc::new(LanguageRegistry::test(cx.executor())); let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { Project::init(&client, cx); language::init(cx); diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index f6f98a41c19f03..a4add3354e0859 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -1,12 +1,13 @@ use crate::headless_project::HeadlessAppState; use crate::HeadlessProject; use anyhow::{anyhow, Context, Result}; -use client::ProxySettings; +use chrono::Utc; +use client::{telemetry, ProxySettings}; use fs::{Fs, RealFs}; use futures::channel::mpsc; use futures::{select, select_biased, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, SinkExt}; use git::GitHostingProviderRegistry; -use gpui::{AppContext, Context as _, ModelContext, UpdateGlobal as _}; +use gpui::{AppContext, Context as _, Model, ModelContext, UpdateGlobal as _}; use http_client::{read_proxy_from_env, Uri}; use language::LanguageRegistry; use node_runtime::{NodeBinaryOptions, NodeRuntime}; @@ -21,19 +22,23 @@ use remote::{ }; use reqwest_client::ReqwestClient; use rpc::proto::{self, Envelope, SSH_PROJECT_ID}; +use rpc::{AnyProtoClient, TypedEnvelope}; use settings::{watch_config_file, Settings, SettingsStore}; use smol::channel::{Receiver, Sender}; use smol::io::AsyncReadExt; use smol::Async; use smol::{net::unix::UnixListener, stream::StreamExt as _}; +use std::ffi::OsStr; use std::ops::ControlFlow; +use std::{env, thread}; use std::{ io::Write, mem, path::{Path, PathBuf}, sync::Arc, }; +use telemetry_events::LocationData; use util::ResultExt; fn init_logging_proxy() { @@ -131,16 +136,97 @@ fn init_panic_hook() { backtrace.drain(0..=ix); } + let thread = thread::current(); + let thread_name = thread.name().unwrap_or(""); + log::error!( "panic occurred: {}\nBacktrace:\n{}", - payload, - backtrace.join("\n") + &payload, + (&backtrace).join("\n") ); + let panic_data = telemetry_events::Panic { + thread: thread_name.into(), + payload: payload.clone(), + location_data: info.location().map(|location| LocationData { + file: location.file().into(), + line: location.line(), + }), + app_version: format!( + "remote-server-{}", + option_env!("ZED_COMMIT_SHA").unwrap_or(&env!("ZED_PKG_VERSION")) + ), + release_channel: release_channel::RELEASE_CHANNEL.display_name().into(), + os_name: telemetry::os_name(), + os_version: Some(telemetry::os_version()), + architecture: env::consts::ARCH.into(), + panicked_on: Utc::now().timestamp_millis(), + backtrace, + system_id: None, // Set on SSH client + installation_id: None, // Set on SSH client + session_id: "".to_string(), // Set on SSH client + }; + + if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() { + let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); + let panic_file_path = paths::logs_dir().join(format!("zed-{timestamp}.panic")); + let panic_file = std::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&panic_file_path) + .log_err(); + if let Some(mut panic_file) = panic_file { + writeln!(&mut panic_file, "{panic_data_json}").log_err(); + panic_file.flush().log_err(); + } + } + std::process::abort(); })); } +fn handle_panic_requests(project: &Model, client: &Arc) { + let client: AnyProtoClient = client.clone().into(); + client.add_request_handler( + project.downgrade(), + |_, _: TypedEnvelope, _cx| async move { + let mut children = smol::fs::read_dir(paths::logs_dir()).await?; + let mut panic_files = Vec::new(); + while let Some(child) = children.next().await { + let child = child?; + let child_path = child.path(); + + if child_path.extension() != Some(OsStr::new("panic")) { + continue; + } + let filename = if let Some(filename) = child_path.file_name() { + filename.to_string_lossy() + } else { + continue; + }; + + if !filename.starts_with("zed") { + continue; + } + + let file_contents = smol::fs::read_to_string(&child_path) + .await + .context("error reading panic file")?; + + panic_files.push(file_contents); + + // We've done what we can, delete the file + std::fs::remove_file(child_path) + .context("error removing panic") + .log_err(); + } + anyhow::Ok(proto::GetPanicFilesResponse { + file_contents: panic_files, + }) + }, + ); +} + struct ServerListeners { stdin: UnixListener, stdout: UnixListener, @@ -368,7 +454,7 @@ pub fn execute_run( HeadlessProject::new( HeadlessAppState { - session, + session: session.clone(), fs, http_client, node_runtime, @@ -378,6 +464,8 @@ pub fn execute_run( ) }); + handle_panic_requests(&project, &session); + mem::forget(project); }); log::info!("gpui app is shut down. quitting."); diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index 001bf157d54c05..f035878d332654 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -21,13 +21,16 @@ client.workspace = true collections.workspace = true command_palette_hooks.workspace = true editor.workspace = true +feature_flags.workspace = true futures.workspace = true gpui.workspace = true image.workspace = true language.workspace = true log.workspace = true markdown_preview.workspace = true +menu.workspace = true multi_buffer.workspace = true +nbformat.workspace = true project.workspace = true runtimelib.workspace = true schemars.workspace = true diff --git a/crates/repl/src/notebook.rs b/crates/repl/src/notebook.rs new file mode 100644 index 00000000000000..9c6738f79979e8 --- /dev/null +++ b/crates/repl/src/notebook.rs @@ -0,0 +1,4 @@ +mod cell; +mod notebook_ui; +pub use cell::*; +pub use notebook_ui::*; diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs new file mode 100644 index 00000000000000..f86f969a96323f --- /dev/null +++ b/crates/repl/src/notebook/cell.rs @@ -0,0 +1,730 @@ +#![allow(unused, dead_code)] +use std::sync::Arc; + +use editor::{Editor, EditorMode, MultiBuffer}; +use futures::future::Shared; +use gpui::{prelude::*, AppContext, Hsla, Task, TextStyleRefinement, View}; +use language::{Buffer, Language, LanguageRegistry}; +use markdown_preview::{markdown_parser::parse_markdown, markdown_renderer::render_markdown_block}; +use nbformat::v4::{CellId, CellMetadata, CellType}; +use settings::Settings as _; +use theme::ThemeSettings; +use ui::{prelude::*, IconButtonShape}; +use util::ResultExt; + +use crate::{ + notebook::{CODE_BLOCK_INSET, GUTTER_WIDTH}, + outputs::{plain::TerminalOutput, user_error::ErrorView, Output}, +}; + +#[derive(Copy, Clone, PartialEq, PartialOrd)] +pub enum CellPosition { + First, + Middle, + Last, +} + +pub enum CellControlType { + RunCell, + RerunCell, + ClearCell, + CellOptions, + CollapseCell, + ExpandCell, +} + +impl CellControlType { + fn icon_name(&self) -> IconName { + match self { + CellControlType::RunCell => IconName::Play, + CellControlType::RerunCell => IconName::ArrowCircle, + CellControlType::ClearCell => IconName::ListX, + CellControlType::CellOptions => IconName::Ellipsis, + CellControlType::CollapseCell => IconName::ChevronDown, + CellControlType::ExpandCell => IconName::ChevronRight, + } + } +} + +pub struct CellControl { + button: IconButton, +} + +impl CellControl { + fn new(id: impl Into, control_type: CellControlType) -> Self { + let icon_name = control_type.icon_name(); + let id = id.into(); + let button = IconButton::new(id, icon_name) + .icon_size(IconSize::Small) + .shape(IconButtonShape::Square); + Self { button } + } +} + +impl Clickable for CellControl { + fn on_click(self, handler: impl Fn(&gpui::ClickEvent, &mut WindowContext) + 'static) -> Self { + let button = self.button.on_click(handler); + Self { button } + } + + fn cursor_style(self, _cursor_style: gpui::CursorStyle) -> Self { + self + } +} + +/// A notebook cell +#[derive(Clone)] +pub enum Cell { + Code(View), + Markdown(View), + Raw(View), +} + +fn convert_outputs(outputs: &Vec, cx: &mut WindowContext) -> Vec { + outputs + .into_iter() + .map(|output| match output { + nbformat::v4::Output::Stream { text, .. } => Output::Stream { + content: cx.new_view(|cx| TerminalOutput::from(&text.0, cx)), + }, + nbformat::v4::Output::DisplayData(display_data) => { + Output::new(&display_data.data, None, cx) + } + nbformat::v4::Output::ExecuteResult(execute_result) => { + Output::new(&execute_result.data, None, cx) + } + nbformat::v4::Output::Error(error) => Output::ErrorOutput(ErrorView { + ename: error.ename.clone(), + evalue: error.evalue.clone(), + traceback: cx.new_view(|cx| TerminalOutput::from(&error.traceback.join("\n"), cx)), + }), + }) + .collect() +} + +impl Cell { + pub fn load( + cell: &nbformat::v4::Cell, + languages: &Arc, + notebook_language: Shared>>>, + cx: &mut WindowContext, + ) -> Self { + match cell { + nbformat::v4::Cell::Markdown { + id, + metadata, + source, + attachments: _, + } => { + let source = source.join(""); + + let view = cx.new_view(|cx| { + let markdown_parsing_task = { + let languages = languages.clone(); + let source = source.clone(); + + cx.spawn(|this, mut cx| async move { + let parsed_markdown = cx + .background_executor() + .spawn(async move { + parse_markdown(&source, None, Some(languages)).await + }) + .await; + + this.update(&mut cx, |cell: &mut MarkdownCell, _| { + cell.parsed_markdown = Some(parsed_markdown); + }) + .log_err(); + }) + }; + + MarkdownCell { + markdown_parsing_task, + languages: languages.clone(), + id: id.clone(), + metadata: metadata.clone(), + source: source.clone(), + parsed_markdown: None, + selected: false, + cell_position: None, + } + }); + + Cell::Markdown(view) + } + nbformat::v4::Cell::Code { + id, + metadata, + execution_count, + source, + outputs, + } => Cell::Code(cx.new_view(|cx| { + let text = source.join(""); + + let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx)); + let multi_buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + + let editor_view = cx.new_view(|cx| { + let mut editor = Editor::new( + EditorMode::AutoHeight { max_lines: 1024 }, + multi_buffer, + None, + false, + cx, + ); + + let theme = ThemeSettings::get_global(cx); + + let refinement = TextStyleRefinement { + font_family: Some(theme.buffer_font.family.clone()), + font_size: Some(theme.buffer_font_size.into()), + color: Some(cx.theme().colors().editor_foreground), + background_color: Some(gpui::transparent_black()), + ..Default::default() + }; + + editor.set_text(text, cx); + editor.set_show_gutter(false, cx); + editor.set_text_style_refinement(refinement); + + // editor.set_read_only(true); + editor + }); + + let buffer = buffer.clone(); + let language_task = cx.spawn(|this, mut cx| async move { + let language = notebook_language.await; + + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(language.clone(), cx); + }); + }); + + CodeCell { + id: id.clone(), + metadata: metadata.clone(), + execution_count: *execution_count, + source: source.join(""), + editor: editor_view, + outputs: convert_outputs(outputs, cx), + selected: false, + language_task, + cell_position: None, + } + })), + nbformat::v4::Cell::Raw { + id, + metadata, + source, + } => Cell::Raw(cx.new_view(|_| RawCell { + id: id.clone(), + metadata: metadata.clone(), + source: source.join(""), + selected: false, + cell_position: None, + })), + } + } +} + +pub trait RenderableCell: Render { + const CELL_TYPE: CellType; + + fn id(&self) -> &CellId; + fn cell_type(&self) -> CellType; + fn metadata(&self) -> &CellMetadata; + fn source(&self) -> &String; + fn selected(&self) -> bool; + fn set_selected(&mut self, selected: bool) -> &mut Self; + fn selected_bg_color(&self, cx: &ViewContext) -> Hsla { + if self.selected() { + let mut color = cx.theme().colors().icon_accent; + color.fade_out(0.9); + color + } else { + // TODO: this is wrong + cx.theme().colors().tab_bar_background + } + } + fn control(&self, _cx: &ViewContext) -> Option { + None + } + + fn cell_position_spacer( + &self, + is_first: bool, + cx: &ViewContext, + ) -> Option { + let cell_position = self.cell_position(); + + if (cell_position == Some(&CellPosition::First) && is_first) + || (cell_position == Some(&CellPosition::Last) && !is_first) + { + Some(div().flex().w_full().h(Spacing::XLarge.px(cx))) + } else { + None + } + } + + fn gutter(&self, cx: &ViewContext) -> impl IntoElement { + let is_selected = self.selected(); + + div() + .relative() + .h_full() + .w(px(GUTTER_WIDTH)) + .child( + div() + .w(px(GUTTER_WIDTH)) + .flex() + .flex_none() + .justify_center() + .h_full() + .child( + div() + .flex_none() + .w(px(1.)) + .h_full() + .when(is_selected, |this| this.bg(cx.theme().colors().icon_accent)) + .when(!is_selected, |this| this.bg(cx.theme().colors().border)), + ), + ) + .when_some(self.control(cx), |this, control| { + this.child( + div() + .absolute() + .top(px(CODE_BLOCK_INSET - 2.0)) + .left_0() + .flex() + .flex_none() + .w(px(GUTTER_WIDTH)) + .h(px(GUTTER_WIDTH + 12.0)) + .items_center() + .justify_center() + .bg(cx.theme().colors().tab_bar_background) + .child(control.button), + ) + }) + } + + fn cell_position(&self) -> Option<&CellPosition>; + fn set_cell_position(&mut self, position: CellPosition) -> &mut Self; +} + +pub trait RunnableCell: RenderableCell { + fn execution_count(&self) -> Option; + fn set_execution_count(&mut self, count: i32) -> &mut Self; + fn run(&mut self, cx: &mut ViewContext) -> (); +} + +pub struct MarkdownCell { + id: CellId, + metadata: CellMetadata, + source: String, + parsed_markdown: Option, + markdown_parsing_task: Task<()>, + selected: bool, + cell_position: Option, + languages: Arc, +} + +impl RenderableCell for MarkdownCell { + const CELL_TYPE: CellType = CellType::Markdown; + + fn id(&self) -> &CellId { + &self.id + } + + fn cell_type(&self) -> CellType { + CellType::Markdown + } + + fn metadata(&self) -> &CellMetadata { + &self.metadata + } + + fn source(&self) -> &String { + &self.source + } + + fn selected(&self) -> bool { + self.selected + } + + fn set_selected(&mut self, selected: bool) -> &mut Self { + self.selected = selected; + self + } + + fn control(&self, _: &ViewContext) -> Option { + None + } + + fn cell_position(&self) -> Option<&CellPosition> { + self.cell_position.as_ref() + } + + fn set_cell_position(&mut self, cell_position: CellPosition) -> &mut Self { + self.cell_position = Some(cell_position); + self + } +} + +impl Render for MarkdownCell { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let Some(parsed) = self.parsed_markdown.as_ref() else { + return div(); + }; + + let mut markdown_render_context = + markdown_preview::markdown_renderer::RenderContext::new(None, cx); + + v_flex() + .size_full() + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(true, cx)) + .child( + h_flex() + .w_full() + .pr_6() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter(cx)) + .child( + v_flex() + .size_full() + .flex_1() + .p_3() + .font_ui(cx) + .text_size(TextSize::Default.rems(cx)) + // + .children(parsed.children.iter().map(|child| { + div().relative().child(div().relative().child( + render_markdown_block(child, &mut markdown_render_context), + )) + })), + ), + ) + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(false, cx)) + } +} + +pub struct CodeCell { + id: CellId, + metadata: CellMetadata, + execution_count: Option, + source: String, + editor: View, + outputs: Vec, + selected: bool, + cell_position: Option, + language_task: Task<()>, +} + +impl CodeCell { + pub fn is_dirty(&self, cx: &AppContext) -> bool { + self.editor.read(cx).buffer().read(cx).is_dirty(cx) + } + pub fn has_outputs(&self) -> bool { + !self.outputs.is_empty() + } + + pub fn clear_outputs(&mut self) { + self.outputs.clear(); + } + + fn output_control(&self) -> Option { + if self.has_outputs() { + Some(CellControlType::ClearCell) + } else { + None + } + } + + pub fn gutter_output(&self, cx: &ViewContext) -> impl IntoElement { + let is_selected = self.selected(); + + div() + .relative() + .h_full() + .w(px(GUTTER_WIDTH)) + .child( + div() + .w(px(GUTTER_WIDTH)) + .flex() + .flex_none() + .justify_center() + .h_full() + .child( + div() + .flex_none() + .w(px(1.)) + .h_full() + .when(is_selected, |this| this.bg(cx.theme().colors().icon_accent)) + .when(!is_selected, |this| this.bg(cx.theme().colors().border)), + ), + ) + .when(self.has_outputs(), |this| { + this.child( + div() + .absolute() + .top(px(CODE_BLOCK_INSET - 2.0)) + .left_0() + .flex() + .flex_none() + .w(px(GUTTER_WIDTH)) + .h(px(GUTTER_WIDTH + 12.0)) + .items_center() + .justify_center() + .bg(cx.theme().colors().tab_bar_background) + .child(IconButton::new("control", IconName::Ellipsis)), + ) + }) + } +} + +impl RenderableCell for CodeCell { + const CELL_TYPE: CellType = CellType::Code; + + fn id(&self) -> &CellId { + &self.id + } + + fn cell_type(&self) -> CellType { + CellType::Code + } + + fn metadata(&self) -> &CellMetadata { + &self.metadata + } + + fn source(&self) -> &String { + &self.source + } + + fn control(&self, cx: &ViewContext) -> Option { + let cell_control = if self.has_outputs() { + CellControl::new("rerun-cell", CellControlType::RerunCell) + } else { + CellControl::new("run-cell", CellControlType::RunCell) + .on_click(cx.listener(move |this, _, cx| this.run(cx))) + }; + + Some(cell_control) + } + + fn selected(&self) -> bool { + self.selected + } + + fn set_selected(&mut self, selected: bool) -> &mut Self { + self.selected = selected; + self + } + + fn cell_position(&self) -> Option<&CellPosition> { + self.cell_position.as_ref() + } + + fn set_cell_position(&mut self, cell_position: CellPosition) -> &mut Self { + self.cell_position = Some(cell_position); + self + } +} + +impl RunnableCell for CodeCell { + fn run(&mut self, cx: &mut ViewContext) { + println!("Running code cell: {}", self.id); + } + + fn execution_count(&self) -> Option { + self.execution_count + .and_then(|count| if count > 0 { Some(count) } else { None }) + } + + fn set_execution_count(&mut self, count: i32) -> &mut Self { + self.execution_count = Some(count); + self + } +} + +impl Render for CodeCell { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .size_full() + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(true, cx)) + // Editor portion + .child( + h_flex() + .w_full() + .pr_6() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter(cx)) + .child( + div().py_1p5().w_full().child( + div() + .flex() + .size_full() + .flex_1() + .py_3() + .px_5() + .rounded_lg() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_background) + .child(div().w_full().child(self.editor.clone())), + ), + ), + ) + // Output portion + .child( + h_flex() + .w_full() + .pr_6() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter_output(cx)) + .child( + div().py_1p5().w_full().child( + div() + .flex() + .size_full() + .flex_1() + .py_3() + .px_5() + .rounded_lg() + .border_1() + // .border_color(cx.theme().colors().border) + // .bg(cx.theme().colors().editor_background) + .child(div().w_full().children(self.outputs.iter().map( + |output| { + let content = match output { + Output::Plain { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Markdown { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Stream { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Image { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::Message(message) => Some( + div().child(message.clone()).into_any_element(), + ), + Output::Table { content, .. } => { + Some(content.clone().into_any_element()) + } + Output::ErrorOutput(error_view) => { + error_view.render(cx) + } + Output::ClearOutputWaitMarker => None, + }; + + div() + // .w_full() + // .mt_3() + // .p_3() + // .rounded_md() + // .bg(cx.theme().colors().editor_background) + // .border(px(1.)) + // .border_color(cx.theme().colors().border) + // .shadow_sm() + .children(content) + }, + ))), + ), + ), + ) + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(false, cx)) + } +} + +pub struct RawCell { + id: CellId, + metadata: CellMetadata, + source: String, + selected: bool, + cell_position: Option, +} + +impl RenderableCell for RawCell { + const CELL_TYPE: CellType = CellType::Raw; + + fn id(&self) -> &CellId { + &self.id + } + + fn cell_type(&self) -> CellType { + CellType::Raw + } + + fn metadata(&self) -> &CellMetadata { + &self.metadata + } + + fn source(&self) -> &String { + &self.source + } + + fn selected(&self) -> bool { + self.selected + } + + fn set_selected(&mut self, selected: bool) -> &mut Self { + self.selected = selected; + self + } + + fn cell_position(&self) -> Option<&CellPosition> { + self.cell_position.as_ref() + } + + fn set_cell_position(&mut self, cell_position: CellPosition) -> &mut Self { + self.cell_position = Some(cell_position); + self + } +} + +impl Render for RawCell { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + v_flex() + .size_full() + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(true, cx)) + .child( + h_flex() + .w_full() + .pr_2() + .rounded_sm() + .items_start() + .gap(Spacing::Large.rems(cx)) + .bg(self.selected_bg_color(cx)) + .child(self.gutter(cx)) + .child( + div() + .flex() + .size_full() + .flex_1() + .p_3() + .font_ui(cx) + .text_size(TextSize::Default.rems(cx)) + .child(self.source.clone()), + ), + ) + // TODO: Move base cell render into trait impl so we don't have to repeat this + .children(self.cell_position_spacer(false, cx)) + } +} diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs new file mode 100644 index 00000000000000..36d6e293856898 --- /dev/null +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -0,0 +1,672 @@ +#![allow(unused, dead_code)] +use std::{path::PathBuf, sync::Arc}; + +use client::proto::ViewId; +use collections::HashMap; +use feature_flags::{FeatureFlagAppExt as _, NotebookFeatureFlag}; +use futures::FutureExt; +use gpui::{ + actions, list, prelude::*, AppContext, EventEmitter, FocusHandle, FocusableView, + ListScrollEvent, ListState, Model, Task, +}; +use language::LanguageRegistry; +use project::{Project, ProjectEntryId, ProjectPath}; +use ui::{prelude::*, Tooltip}; +use workspace::item::ItemEvent; +use workspace::{Item, ItemHandle, ProjectItem, ToolbarItemLocation}; +use workspace::{ToolbarItemEvent, ToolbarItemView}; + +use super::{Cell, CellPosition, RenderableCell}; + +use nbformat::v4::CellId; +use nbformat::v4::Metadata as NotebookMetadata; + +pub(crate) const DEFAULT_NOTEBOOK_FORMAT: i32 = 4; +pub(crate) const DEFAULT_NOTEBOOK_FORMAT_MINOR: i32 = 0; + +actions!( + notebook, + [ + OpenNotebook, + RunAll, + ClearOutputs, + MoveCellUp, + MoveCellDown, + AddMarkdownBlock, + AddCodeBlock, + ] +); + +pub(crate) const MAX_TEXT_BLOCK_WIDTH: f32 = 9999.0; +pub(crate) const SMALL_SPACING_SIZE: f32 = 8.0; +pub(crate) const MEDIUM_SPACING_SIZE: f32 = 12.0; +pub(crate) const LARGE_SPACING_SIZE: f32 = 16.0; +pub(crate) const GUTTER_WIDTH: f32 = 19.0; +pub(crate) const CODE_BLOCK_INSET: f32 = MEDIUM_SPACING_SIZE; +pub(crate) const CONTROL_SIZE: f32 = 20.0; + +pub fn init(cx: &mut AppContext) { + if cx.has_flag::() || std::env::var("LOCAL_NOTEBOOK_DEV").is_ok() { + workspace::register_project_item::(cx); + } + + cx.observe_flag::({ + move |is_enabled, cx| { + if is_enabled { + workspace::register_project_item::(cx); + } else { + // todo: there is no way to unregister a project item, so if the feature flag + // gets turned off they need to restart Zed. + } + } + }) + .detach(); +} + +pub struct NotebookEditor { + languages: Arc, + + focus_handle: FocusHandle, + project: Model, + path: ProjectPath, + + remote_id: Option, + cell_list: ListState, + + metadata: NotebookMetadata, + nbformat: i32, + nbformat_minor: i32, + selected_cell_index: usize, + cell_order: Vec, + cell_map: HashMap, +} + +impl NotebookEditor { + pub fn new( + project: Model, + notebook_item: Model, + cx: &mut ViewContext, + ) -> Self { + let focus_handle = cx.focus_handle(); + + let notebook = notebook_item.read(cx).notebook.clone(); + + let languages = project.read(cx).languages().clone(); + + let metadata = notebook.metadata; + let nbformat = notebook.nbformat; + let nbformat_minor = notebook.nbformat_minor; + + let language_name = metadata + .language_info + .as_ref() + .map(|l| l.name.clone()) + .or(metadata + .kernelspec + .as_ref() + .and_then(|spec| spec.language.clone())); + + let notebook_language = if let Some(language_name) = language_name { + cx.spawn(|_, _| { + let languages = languages.clone(); + async move { languages.language_for_name(&language_name).await.ok() } + }) + .shared() + } else { + Task::ready(None).shared() + }; + + let languages = project.read(cx).languages().clone(); + let notebook_language = cx + .spawn(|_, _| { + // todo: pull from notebook metadata + const TODO: &'static str = "Python"; + let languages = languages.clone(); + async move { languages.language_for_name(TODO).await.ok() } + }) + .shared(); + + let mut cell_order = vec![]; + let mut cell_map = HashMap::default(); + + for (index, cell) in notebook.cells.iter().enumerate() { + let cell_id = cell.id(); + cell_order.push(cell_id.clone()); + cell_map.insert( + cell_id.clone(), + Cell::load(cell, &languages, notebook_language.clone(), cx), + ); + } + + let view = cx.view().downgrade(); + let cell_count = cell_order.len(); + let cell_order_for_list = cell_order.clone(); + let cell_map_for_list = cell_map.clone(); + + let cell_list = ListState::new( + cell_count, + gpui::ListAlignment::Top, + // TODO: This is a totally random number, + // not sure what this should be + px(3000.), + move |ix, cx| { + let cell_order_for_list = cell_order_for_list.clone(); + let cell_id = cell_order_for_list[ix].clone(); + if let Some(view) = view.upgrade() { + let cell_id = cell_id.clone(); + if let Some(cell) = cell_map_for_list.clone().get(&cell_id) { + view.update(cx, |view, cx| { + view.render_cell(ix, cell, cx).into_any_element() + }) + } else { + div().into_any() + } + } else { + div().into_any() + } + }, + ); + + Self { + languages: languages.clone(), + focus_handle, + project, + path: notebook_item.read(cx).project_path.clone(), + remote_id: None, + cell_list, + selected_cell_index: 0, + metadata, + nbformat, + nbformat_minor, + cell_order: cell_order.clone(), + cell_map: cell_map.clone(), + } + } + + fn has_outputs(&self, cx: &ViewContext) -> bool { + self.cell_map.values().any(|cell| { + if let Cell::Code(code_cell) = cell { + code_cell.read(cx).has_outputs() + } else { + false + } + }) + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.cell_map.values().any(|cell| { + if let Cell::Code(code_cell) = cell { + code_cell.read(cx).is_dirty(cx) + } else { + false + } + }) + } + + fn clear_outputs(&mut self, cx: &mut ViewContext) { + for cell in self.cell_map.values() { + if let Cell::Code(code_cell) = cell { + code_cell.update(cx, |cell, _cx| { + cell.clear_outputs(); + }); + } + } + } + + fn run_cells(&mut self, cx: &mut ViewContext) { + println!("Cells would all run here, if that was implemented!"); + } + + fn open_notebook(&mut self, _: &OpenNotebook, _cx: &mut ViewContext) { + println!("Open notebook triggered"); + } + + fn move_cell_up(&mut self, cx: &mut ViewContext) { + println!("Move cell up triggered"); + } + + fn move_cell_down(&mut self, cx: &mut ViewContext) { + println!("Move cell down triggered"); + } + + fn add_markdown_block(&mut self, cx: &mut ViewContext) { + println!("Add markdown block triggered"); + } + + fn add_code_block(&mut self, cx: &mut ViewContext) { + println!("Add code block triggered"); + } + + fn cell_count(&self) -> usize { + self.cell_map.len() + } + + fn selected_index(&self) -> usize { + self.selected_cell_index + } + + pub fn set_selected_index( + &mut self, + index: usize, + jump_to_index: bool, + cx: &mut ViewContext, + ) { + // let previous_index = self.selected_cell_index; + self.selected_cell_index = index; + let current_index = self.selected_cell_index; + + // in the future we may have some `on_cell_change` event that we want to fire here + + if jump_to_index { + self.jump_to_cell(current_index, cx); + } + } + + pub fn select_next(&mut self, _: &menu::SelectNext, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + let index = self.selected_index(); + let ix = if index == count - 1 { + count - 1 + } else { + index + 1 + }; + self.set_selected_index(ix, true, cx); + cx.notify(); + } + } + + pub fn select_previous(&mut self, _: &menu::SelectPrev, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + let index = self.selected_index(); + let ix = if index == 0 { 0 } else { index - 1 }; + self.set_selected_index(ix, true, cx); + cx.notify(); + } + } + + pub fn select_first(&mut self, _: &menu::SelectFirst, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + self.set_selected_index(0, true, cx); + cx.notify(); + } + } + + pub fn select_last(&mut self, _: &menu::SelectLast, cx: &mut ViewContext) { + let count = self.cell_count(); + if count > 0 { + self.set_selected_index(count - 1, true, cx); + cx.notify(); + } + } + + fn jump_to_cell(&mut self, index: usize, _cx: &mut ViewContext) { + self.cell_list.scroll_to_reveal_item(index); + } + + fn button_group(cx: &ViewContext) -> Div { + v_flex() + .gap(Spacing::Small.rems(cx)) + .items_center() + .w(px(CONTROL_SIZE + 4.0)) + .overflow_hidden() + .rounded(px(5.)) + .bg(cx.theme().colors().title_bar_background) + .p_px() + .border_1() + .border_color(cx.theme().colors().border) + } + + fn render_notebook_control( + id: impl Into, + icon: IconName, + _cx: &ViewContext, + ) -> IconButton { + let id: ElementId = ElementId::Name(id.into()); + IconButton::new(id, icon).width(px(CONTROL_SIZE).into()) + } + + fn render_notebook_controls(&self, cx: &ViewContext) -> impl IntoElement { + let has_outputs = self.has_outputs(cx); + + v_flex() + .max_w(px(CONTROL_SIZE + 4.0)) + .items_center() + .gap(Spacing::XXLarge.rems(cx)) + .justify_between() + .flex_none() + .h_full() + .py(Spacing::XLarge.px(cx)) + .child( + v_flex() + .gap(Spacing::Large.rems(cx)) + .child( + Self::button_group(cx) + .child( + Self::render_notebook_control("run-all-cells", IconName::Play, cx) + .tooltip(move |cx| { + Tooltip::for_action("Execute all cells", &RunAll, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(RunAll)); + }), + ) + .child( + Self::render_notebook_control( + "clear-all-outputs", + IconName::ListX, + cx, + ) + .disabled(!has_outputs) + .tooltip(move |cx| { + Tooltip::for_action("Clear all outputs", &ClearOutputs, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(ClearOutputs)); + }), + ), + ) + .child( + Self::button_group(cx) + .child( + Self::render_notebook_control( + "move-cell-up", + IconName::ArrowUp, + cx, + ) + .tooltip(move |cx| { + Tooltip::for_action("Move cell up", &MoveCellUp, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(MoveCellUp)); + }), + ) + .child( + Self::render_notebook_control( + "move-cell-down", + IconName::ArrowDown, + cx, + ) + .tooltip(move |cx| { + Tooltip::for_action("Move cell down", &MoveCellDown, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(MoveCellDown)); + }), + ), + ) + .child( + Self::button_group(cx) + .child( + Self::render_notebook_control( + "new-markdown-cell", + IconName::Plus, + cx, + ) + .tooltip(move |cx| { + Tooltip::for_action("Add markdown block", &AddMarkdownBlock, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(AddMarkdownBlock)); + }), + ) + .child( + Self::render_notebook_control("new-code-cell", IconName::Code, cx) + .tooltip(move |cx| { + Tooltip::for_action("Add code block", &AddCodeBlock, cx) + }) + .on_click(|_, cx| { + cx.dispatch_action(Box::new(AddCodeBlock)); + }), + ), + ), + ) + .child( + v_flex() + .gap(Spacing::Large.rems(cx)) + .items_center() + .child(Self::render_notebook_control( + "more-menu", + IconName::Ellipsis, + cx, + )) + .child( + Self::button_group(cx) + .child(IconButton::new("repl", IconName::ReplNeutral)), + ), + ) + } + + fn cell_position(&self, index: usize) -> CellPosition { + match index { + 0 => CellPosition::First, + index if index == self.cell_count() - 1 => CellPosition::Last, + _ => CellPosition::Middle, + } + } + + fn render_cell( + &self, + index: usize, + cell: &Cell, + cx: &mut ViewContext, + ) -> impl IntoElement { + let cell_position = self.cell_position(index); + + let is_selected = index == self.selected_cell_index; + + match cell { + Cell::Code(cell) => { + cell.update(cx, |cell, _cx| { + cell.set_selected(is_selected) + .set_cell_position(cell_position); + }); + cell.clone().into_any_element() + } + Cell::Markdown(cell) => { + cell.update(cx, |cell, _cx| { + cell.set_selected(is_selected) + .set_cell_position(cell_position); + }); + cell.clone().into_any_element() + } + Cell::Raw(cell) => { + cell.update(cx, |cell, _cx| { + cell.set_selected(is_selected) + .set_cell_position(cell_position); + }); + cell.clone().into_any_element() + } + } + } +} + +impl Render for NotebookEditor { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div() + .key_context("notebook") + .track_focus(&self.focus_handle) + .on_action(cx.listener(|this, &OpenNotebook, cx| this.open_notebook(&OpenNotebook, cx))) + .on_action(cx.listener(|this, &ClearOutputs, cx| this.clear_outputs(cx))) + .on_action(cx.listener(|this, &RunAll, cx| this.run_cells(cx))) + .on_action(cx.listener(|this, &MoveCellUp, cx| this.move_cell_up(cx))) + .on_action(cx.listener(|this, &MoveCellDown, cx| this.move_cell_down(cx))) + .on_action(cx.listener(|this, &AddMarkdownBlock, cx| this.add_markdown_block(cx))) + .on_action(cx.listener(|this, &AddCodeBlock, cx| this.add_code_block(cx))) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .flex() + .items_start() + .size_full() + .overflow_hidden() + .px(Spacing::XLarge.px(cx)) + .gap(Spacing::XLarge.px(cx)) + .bg(cx.theme().colors().tab_bar_background) + .child( + v_flex() + .id("notebook-cells") + .flex_1() + .size_full() + .overflow_y_scroll() + .child(list(self.cell_list.clone()).size_full()), + ) + .child(self.render_notebook_controls(cx)) + } +} + +impl FocusableView for NotebookEditor { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } +} + +pub struct NotebookItem { + path: PathBuf, + project_path: ProjectPath, + notebook: nbformat::v4::Notebook, +} + +impl project::Item for NotebookItem { + fn try_open( + project: &Model, + path: &ProjectPath, + cx: &mut AppContext, + ) -> Option>>> { + let path = path.clone(); + let project = project.clone(); + + if path.path.extension().unwrap_or_default() == "ipynb" { + Some(cx.spawn(|mut cx| async move { + let abs_path = project + .read_with(&cx, |project, cx| project.absolute_path(&path, cx))? + .ok_or_else(|| anyhow::anyhow!("Failed to find the absolute path"))?; + + let file_content = std::fs::read_to_string(abs_path.clone())?; + let notebook = nbformat::parse_notebook(&file_content); + + let notebook = match notebook { + Ok(nbformat::Notebook::V4(notebook)) => notebook, + Ok(nbformat::Notebook::Legacy(legacy_notebook)) => { + // todo!(): Decide if we want to mutate the notebook by including Cell IDs + // and any other conversions + let notebook = nbformat::upgrade_legacy_notebook(legacy_notebook)?; + notebook + } + Err(e) => { + anyhow::bail!("Failed to parse notebook: {:?}", e); + } + }; + + cx.new_model(|_| NotebookItem { + path: abs_path, + project_path: path, + notebook, + }) + })) + } else { + None + } + } + + fn entry_id(&self, _: &AppContext) -> Option { + None + } + + fn project_path(&self, _: &AppContext) -> Option { + Some(self.project_path.clone()) + } +} + +impl EventEmitter<()> for NotebookEditor {} + +// pub struct NotebookControls { +// pane_focused: bool, +// active_item: Option>, +// // subscription: Option, +// } + +// impl NotebookControls { +// pub fn new() -> Self { +// Self { +// pane_focused: false, +// active_item: Default::default(), +// // subscription: Default::default(), +// } +// } +// } + +// impl EventEmitter for NotebookControls {} + +// impl Render for NotebookControls { +// fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { +// div().child("notebook controls") +// } +// } + +// impl ToolbarItemView for NotebookControls { +// fn set_active_pane_item( +// &mut self, +// active_pane_item: Option<&dyn workspace::ItemHandle>, +// cx: &mut ViewContext, +// ) -> workspace::ToolbarItemLocation { +// cx.notify(); +// self.active_item = None; + +// let Some(item) = active_pane_item else { +// return ToolbarItemLocation::Hidden; +// }; + +// ToolbarItemLocation::PrimaryLeft +// } + +// fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext) { +// self.pane_focused = pane_focused; +// } +// } + +impl Item for NotebookEditor { + type Event = (); + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + let path = self.path.path.clone(); + + path.file_stem() + .map(|stem| stem.to_string_lossy().into_owned()) + .map(SharedString::from) + } + + fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + Some(IconName::Book.into()) + } + + fn show_toolbar(&self) -> bool { + false + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + // self.is_dirty(cx) + false + } +} + +// TODO: Implement this to allow us to persist to the database, etc: +// impl SerializableItem for NotebookEditor {} + +impl ProjectItem for NotebookEditor { + type Item = NotebookItem; + + fn for_project_item( + project: Model, + item: Model, + cx: &mut ViewContext, + ) -> Self + where + Self: Sized, + { + Self::new(project, item, cx) + } +} diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index 95fcb98ae3a8a5..b705a155681922 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -56,7 +56,7 @@ use table::TableView; pub mod plain; use plain::TerminalOutput; -mod user_error; +pub(crate) mod user_error; use user_error::ErrorView; use workspace::Workspace; @@ -201,7 +201,7 @@ impl Output { ) } - fn render( + pub fn render( &self, workspace: WeakView, diff --git a/crates/repl/src/outputs/image.rs b/crates/repl/src/outputs/image.rs index 15881aa91594f2..648f4aa82c0850 100644 --- a/crates/repl/src/outputs/image.rs +++ b/crates/repl/src/outputs/image.rs @@ -16,7 +16,7 @@ pub struct ImageView { impl ImageView { pub fn from(base64_encoded_data: &str) -> Result { - let bytes = BASE64_STANDARD.decode(base64_encoded_data)?; + let bytes = BASE64_STANDARD.decode(base64_encoded_data.trim())?; let format = image::guess_format(&bytes)?; let mut data = image::load_from_memory_with_format(&bytes, format)?.into_rgba8(); diff --git a/crates/repl/src/repl.rs b/crates/repl/src/repl.rs index b5b791665bca57..75a3da645649b8 100644 --- a/crates/repl/src/repl.rs +++ b/crates/repl/src/repl.rs @@ -1,6 +1,7 @@ mod components; mod jupyter_settings; mod kernels; +pub mod notebook; mod outputs; mod repl_editor; mod repl_sessions_ui; diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 6c86257f30456e..e07958d0e4e4ba 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; use editor::Editor; -use gpui::{prelude::*, AppContext, Entity, View, WeakView, WindowContext}; +use gpui::{prelude::*, Entity, View, WeakView, WindowContext}; use language::{BufferSnapshot, Language, LanguageName, Point}; use crate::repl_store::ReplStore; @@ -103,7 +103,7 @@ pub enum SessionSupport { Unsupported, } -pub fn session(editor: WeakView, cx: &mut AppContext) -> SessionSupport { +pub fn session(editor: WeakView, cx: &mut WindowContext) -> SessionSupport { let store = ReplStore::global(cx); let entity_id = editor.entity_id(); @@ -311,17 +311,21 @@ fn language_supported(language: &Arc) -> bool { } } -fn get_language(editor: WeakView, cx: &mut AppContext) -> Option> { - let editor = editor.upgrade()?; - let selection = editor.read(cx).selections.newest::(cx); - let buffer = editor.read(cx).buffer().read(cx).snapshot(cx); - buffer.language_at(selection.head()).cloned() +fn get_language(editor: WeakView, cx: &mut WindowContext) -> Option> { + editor + .update(cx, |editor, cx| { + let selection = editor.selections.newest::(cx); + let buffer = editor.buffer().read(cx).snapshot(cx); + buffer.language_at(selection.head()).cloned() + }) + .ok() + .flatten() } #[cfg(test)] mod tests { use super::*; - use gpui::Context; + use gpui::{AppContext, Context}; use indoc::indoc; use language::{Buffer, Language, LanguageConfig, LanguageRegistry}; diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 309ceaf0bfc7b6..13f59797323299 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -14,6 +14,7 @@ path = "src/rope.rs" [dependencies] arrayvec = "0.7.1" log.workspace = true +rayon.workspace = true smallvec.workspace = true sum_tree.workspace = true unicode-segmentation.workspace = true diff --git a/crates/rope/benches/rope_benchmark.rs b/crates/rope/benches/rope_benchmark.rs index 1f95559d771b7b..01811c0c86740c 100644 --- a/crates/rope/benches/rope_benchmark.rs +++ b/crates/rope/benches/rope_benchmark.rs @@ -171,6 +171,25 @@ fn rope_benchmarks(c: &mut Criterion) { }); } group.finish(); + + let mut group = c.benchmark_group("point_to_offset"); + for size in sizes.iter() { + group.throughput(Throughput::Bytes(*size as u64)); + group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { + let rope = generate_random_rope(rng.clone(), *size); + + b.iter_batched( + || generate_random_rope_points(rng.clone(), &rope), + |offsets| { + for offset in offsets.iter() { + black_box(rope.point_to_offset(*offset)); + } + }, + BatchSize::SmallInput, + ); + }); + } + group.finish(); } criterion_group!(benches, rope_benchmarks); diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs new file mode 100644 index 00000000000000..0490c5a9cd4117 --- /dev/null +++ b/crates/rope/src/chunk.rs @@ -0,0 +1,878 @@ +use crate::{OffsetUtf16, Point, PointUtf16, TextSummary, Unclipped}; +use arrayvec::ArrayString; +use std::{cmp, ops::Range}; +use sum_tree::Bias; +use unicode_segmentation::GraphemeCursor; +use util::debug_panic; + +pub(crate) const MIN_BASE: usize = if cfg!(test) { 6 } else { 64 }; +pub(crate) const MAX_BASE: usize = MIN_BASE * 2; + +#[derive(Clone, Debug, Default)] +pub struct Chunk { + chars: u128, + chars_utf16: u128, + newlines: u128, + pub text: ArrayString, +} + +impl Chunk { + #[inline(always)] + pub fn new(text: &str) -> Self { + let mut this = Chunk::default(); + this.push_str(text); + this + } + + #[inline(always)] + pub fn push_str(&mut self, text: &str) { + for (char_ix, c) in text.char_indices() { + let ix = self.text.len() + char_ix; + self.chars |= 1 << ix; + self.chars_utf16 |= 1 << ix; + self.chars_utf16 |= (c.len_utf16() as u128) << ix; + self.newlines |= ((c == '\n') as u128) << ix; + } + self.text.push_str(text); + } + + #[inline(always)] + pub fn append(&mut self, slice: ChunkSlice) { + if slice.is_empty() { + return; + }; + + let base_ix = self.text.len(); + self.chars |= slice.chars << base_ix; + self.chars_utf16 |= slice.chars_utf16 << base_ix; + self.newlines |= slice.newlines << base_ix; + self.text.push_str(&slice.text); + } + + #[inline(always)] + pub fn as_slice(&self) -> ChunkSlice { + ChunkSlice { + chars: self.chars, + chars_utf16: self.chars_utf16, + newlines: self.newlines, + text: &self.text, + } + } + + #[inline(always)] + pub fn slice(&self, range: Range) -> ChunkSlice { + self.as_slice().slice(range) + } +} + +#[derive(Clone, Copy, Debug)] +pub struct ChunkSlice<'a> { + chars: u128, + chars_utf16: u128, + newlines: u128, + text: &'a str, +} + +impl<'a> Into for ChunkSlice<'a> { + fn into(self) -> Chunk { + Chunk { + chars: self.chars, + chars_utf16: self.chars_utf16, + newlines: self.newlines, + text: self.text.try_into().unwrap(), + } + } +} + +impl<'a> ChunkSlice<'a> { + #[inline(always)] + pub fn is_empty(self) -> bool { + self.text.is_empty() + } + + #[inline(always)] + pub fn is_char_boundary(self, offset: usize) -> bool { + self.text.is_char_boundary(offset) + } + + #[inline(always)] + pub fn split_at(self, mid: usize) -> (ChunkSlice<'a>, ChunkSlice<'a>) { + if mid == MAX_BASE { + let left = self; + let right = ChunkSlice { + chars: 0, + chars_utf16: 0, + newlines: 0, + text: "", + }; + (left, right) + } else { + let mask = if mid == MAX_BASE { + u128::MAX + } else { + (1u128 << mid) - 1 + }; + let (left_text, right_text) = self.text.split_at(mid); + let left = ChunkSlice { + chars: self.chars & mask, + chars_utf16: self.chars_utf16 & mask, + newlines: self.newlines & mask, + text: left_text, + }; + let right = ChunkSlice { + chars: self.chars >> mid, + chars_utf16: self.chars_utf16 >> mid, + newlines: self.newlines >> mid, + text: right_text, + }; + (left, right) + } + } + + #[inline(always)] + pub fn slice(self, range: Range) -> Self { + let mask = if range.end == MAX_BASE { + u128::MAX + } else { + (1u128 << range.end) - 1 + }; + if range.start == MAX_BASE { + Self { + chars: 0, + chars_utf16: 0, + newlines: 0, + text: "", + } + } else { + Self { + chars: (self.chars & mask) >> range.start, + chars_utf16: (self.chars_utf16 & mask) >> range.start, + newlines: (self.newlines & mask) >> range.start, + text: &self.text[range], + } + } + } + + #[inline(always)] + pub fn text_summary(&self) -> TextSummary { + let (longest_row, longest_row_chars) = self.longest_row(); + TextSummary { + len: self.len(), + len_utf16: self.len_utf16(), + lines: self.lines(), + first_line_chars: self.first_line_chars(), + last_line_chars: self.last_line_chars(), + last_line_len_utf16: self.last_line_len_utf16(), + longest_row, + longest_row_chars, + } + } + + /// Get length in bytes + #[inline(always)] + pub fn len(&self) -> usize { + self.text.len() + } + + /// Get length in UTF-16 code units + #[inline(always)] + pub fn len_utf16(&self) -> OffsetUtf16 { + OffsetUtf16(self.chars_utf16.count_ones() as usize) + } + + /// Get point representing number of lines and length of last line + #[inline(always)] + pub fn lines(&self) -> Point { + let row = self.newlines.count_ones(); + let column = self.newlines.leading_zeros() - (u128::BITS - self.text.len() as u32); + Point::new(row, column) + } + + /// Get number of chars in first line + #[inline(always)] + pub fn first_line_chars(&self) -> u32 { + if self.newlines == 0 { + self.chars.count_ones() + } else { + let mask = (1u128 << self.newlines.trailing_zeros()) - 1; + (self.chars & mask).count_ones() + } + } + + /// Get number of chars in last line + #[inline(always)] + pub fn last_line_chars(&self) -> u32 { + if self.newlines == 0 { + self.chars.count_ones() + } else { + let mask = !(u128::MAX >> self.newlines.leading_zeros()); + (self.chars & mask).count_ones() + } + } + + /// Get number of UTF-16 code units in last line + #[inline(always)] + pub fn last_line_len_utf16(&self) -> u32 { + if self.newlines == 0 { + self.chars_utf16.count_ones() + } else { + let mask = !(u128::MAX >> self.newlines.leading_zeros()); + (self.chars_utf16 & mask).count_ones() + } + } + + /// Get the longest row in the chunk and its length in characters. + #[inline(always)] + pub fn longest_row(&self) -> (u32, u32) { + let mut chars = self.chars; + let mut newlines = self.newlines; + let mut row = 0; + let mut longest_row = 0; + let mut longest_row_chars = 0; + while newlines > 0 { + let newline_ix = newlines.trailing_zeros(); + let row_chars = (chars & ((1 << newline_ix) - 1)).count_ones() as u8; + if row_chars > longest_row_chars { + longest_row = row; + longest_row_chars = row_chars; + } + + newlines >>= newline_ix; + newlines >>= 1; + chars >>= newline_ix; + chars >>= 1; + row += 1; + } + + let row_chars = chars.count_ones() as u8; + if row_chars > longest_row_chars { + (row, row_chars as u32) + } else { + (longest_row, longest_row_chars as u32) + } + } + + #[inline(always)] + pub fn offset_to_point(&self, offset: usize) -> Point { + let mask = if offset == MAX_BASE { + u128::MAX + } else { + (1u128 << offset) - 1 + }; + let row = (self.newlines & mask).count_ones(); + let newline_ix = u128::BITS - (self.newlines & mask).leading_zeros(); + let column = (offset - newline_ix as usize) as u32; + Point::new(row, column) + } + + #[inline(always)] + pub fn point_to_offset(&self, point: Point) -> usize { + if point.row > self.lines().row { + debug_panic!( + "point {:?} extends beyond rows for string {:?}", + point, + self.text + ); + return self.len(); + } + + let row_offset_range = self.offset_range_for_row(point.row); + if point.column > row_offset_range.len() as u32 { + debug_panic!( + "point {:?} extends beyond row for string {:?}", + point, + self.text + ); + row_offset_range.end + } else { + row_offset_range.start + point.column as usize + } + } + + #[inline(always)] + pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { + let mask = if offset == MAX_BASE { + u128::MAX + } else { + (1u128 << offset) - 1 + }; + OffsetUtf16((self.chars_utf16 & mask).count_ones() as usize) + } + + #[inline(always)] + pub fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize { + if target.0 == 0 { + 0 + } else { + let ix = nth_set_bit(self.chars_utf16, target.0) + 1; + if ix == MAX_BASE { + MAX_BASE + } else { + let utf8_additional_len = cmp::min( + (self.chars_utf16 >> ix).trailing_zeros() as usize, + self.text.len() - ix, + ); + ix + utf8_additional_len + } + } + } + + #[inline(always)] + pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { + let mask = if offset == MAX_BASE { + u128::MAX + } else { + (1u128 << offset) - 1 + }; + let row = (self.newlines & mask).count_ones(); + let newline_ix = u128::BITS - (self.newlines & mask).leading_zeros(); + let column = if newline_ix as usize == MAX_BASE { + 0 + } else { + ((self.chars_utf16 & mask) >> newline_ix).count_ones() + }; + PointUtf16::new(row, column) + } + + #[inline(always)] + pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 { + self.offset_to_point_utf16(self.point_to_offset(point)) + } + + #[inline(always)] + pub fn point_utf16_to_offset(&self, point: PointUtf16, clip: bool) -> usize { + let lines = self.lines(); + if point.row > lines.row { + if !clip { + debug_panic!( + "point {:?} is beyond this chunk's extent {:?}", + point, + self.text + ); + } + return self.len(); + } + + let row_offset_range = self.offset_range_for_row(point.row); + let line = self.slice(row_offset_range.clone()); + if point.column > line.last_line_len_utf16() { + if !clip { + debug_panic!( + "point {:?} is beyond the end of the line in chunk {:?}", + point, + self.text + ); + } + return line.len(); + } + + let mut offset = row_offset_range.start; + if point.column > 0 { + offset += line.offset_utf16_to_offset(OffsetUtf16(point.column as usize)); + if !self.text.is_char_boundary(offset) { + offset -= 1; + while !self.text.is_char_boundary(offset) { + offset -= 1; + } + if !clip { + debug_panic!( + "point {:?} is within character in chunk {:?}", + point, + self.text, + ); + } + } + } + offset + } + + #[inline(always)] + pub fn unclipped_point_utf16_to_point(&self, point: Unclipped) -> Point { + let max_point = self.lines(); + if point.0.row > max_point.row { + return max_point; + } + + let row_offset_range = self.offset_range_for_row(point.0.row); + let line = self.slice(row_offset_range.clone()); + if point.0.column == 0 { + Point::new(point.0.row, 0) + } else if point.0.column >= line.len_utf16().0 as u32 { + Point::new(point.0.row, line.len() as u32) + } else { + let mut column = line.offset_utf16_to_offset(OffsetUtf16(point.0.column as usize)); + while !line.text.is_char_boundary(column) { + column -= 1; + } + Point::new(point.0.row, column as u32) + } + } + + #[inline(always)] + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + let max_point = self.lines(); + if point.row > max_point.row { + return max_point; + } + + let line = self.slice(self.offset_range_for_row(point.row)); + if point.column == 0 { + point + } else if point.column >= line.len() as u32 { + Point::new(point.row, line.len() as u32) + } else { + let mut column = point.column as usize; + let bytes = line.text.as_bytes(); + if bytes[column - 1] < 128 && bytes[column] < 128 { + return Point::new(point.row, column as u32); + } + + let mut grapheme_cursor = GraphemeCursor::new(column, bytes.len(), true); + loop { + if line.is_char_boundary(column) + && grapheme_cursor.is_boundary(line.text, 0).unwrap_or(false) + { + break; + } + + match bias { + Bias::Left => column -= 1, + Bias::Right => column += 1, + } + grapheme_cursor.set_cursor(column); + } + Point::new(point.row, column as u32) + } + } + + #[inline(always)] + pub fn clip_point_utf16(&self, point: Unclipped, bias: Bias) -> PointUtf16 { + let max_point = self.lines(); + if point.0.row > max_point.row { + PointUtf16::new(max_point.row, self.last_line_len_utf16()) + } else { + let line = self.slice(self.offset_range_for_row(point.0.row)); + let column = line.clip_offset_utf16(OffsetUtf16(point.0.column as usize), bias); + PointUtf16::new(point.0.row, column.0 as u32) + } + } + + #[inline(always)] + pub fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 { + if target == OffsetUtf16::default() { + OffsetUtf16::default() + } else if target >= self.len_utf16() { + self.len_utf16() + } else { + let mut offset = self.offset_utf16_to_offset(target); + while !self.text.is_char_boundary(offset) { + if bias == Bias::Left { + offset -= 1; + } else { + offset += 1; + } + } + self.offset_to_offset_utf16(offset) + } + } + + #[inline(always)] + fn offset_range_for_row(&self, row: u32) -> Range { + let row_start = if row > 0 { + nth_set_bit(self.newlines, row as usize) + 1 + } else { + 0 + }; + let row_len = if row_start == MAX_BASE { + 0 + } else { + cmp::min( + (self.newlines >> row_start).trailing_zeros(), + (self.text.len() - row_start) as u32, + ) + }; + row_start..row_start + row_len as usize + } +} + +/// Finds the n-th bit that is set to 1. +#[inline(always)] +fn nth_set_bit(v: u128, n: usize) -> usize { + let low = v as u64; + let high = (v >> 64) as u64; + + let low_count = low.count_ones() as usize; + if n > low_count { + 64 + nth_set_bit_u64(high, (n - low_count) as u64) as usize + } else { + nth_set_bit_u64(low, n as u64) as usize + } +} + +#[inline(always)] +fn nth_set_bit_u64(v: u64, mut n: u64) -> u64 { + let v = v.reverse_bits(); + let mut s: u64 = 64; + + // Parallel bit count intermediates + let a = v - ((v >> 1) & (u64::MAX / 3)); + let b = (a & (u64::MAX / 5)) + ((a >> 2) & (u64::MAX / 5)); + let c = (b + (b >> 4)) & (u64::MAX / 0x11); + let d = (c + (c >> 8)) & (u64::MAX / 0x101); + + // Branchless select + let t = (d >> 32) + (d >> 48); + s -= (t.wrapping_sub(n) & 256) >> 3; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (d >> (s - 16)) & 0xff; + s -= (t.wrapping_sub(n) & 256) >> 4; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (c >> (s - 8)) & 0xf; + s -= (t.wrapping_sub(n) & 256) >> 5; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (b >> (s - 4)) & 0x7; + s -= (t.wrapping_sub(n) & 256) >> 6; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (a >> (s - 2)) & 0x3; + s -= (t.wrapping_sub(n) & 256) >> 7; + n -= t & (t.wrapping_sub(n) >> 8); + + let t = (v >> (s - 1)) & 0x1; + s -= (t.wrapping_sub(n) & 256) >> 8; + + 65 - s - 1 +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + use util::RandomCharIter; + + #[gpui::test(iterations = 100)] + fn test_random_chunks(mut rng: StdRng) { + let chunk_len = rng.gen_range(0..=MAX_BASE); + let text = RandomCharIter::new(&mut rng) + .take(chunk_len) + .collect::(); + let mut ix = chunk_len; + while !text.is_char_boundary(ix) { + ix -= 1; + } + let text = &text[..ix]; + + log::info!("Chunk: {:?}", text); + let chunk = Chunk::new(&text); + verify_chunk(chunk.as_slice(), text); + + for _ in 0..10 { + let mut start = rng.gen_range(0..=chunk.text.len()); + let mut end = rng.gen_range(start..=chunk.text.len()); + while !chunk.text.is_char_boundary(start) { + start -= 1; + } + while !chunk.text.is_char_boundary(end) { + end -= 1; + } + let range = start..end; + log::info!("Range: {:?}", range); + let text_slice = &text[range.clone()]; + let chunk_slice = chunk.slice(range); + verify_chunk(chunk_slice, text_slice); + } + } + + #[gpui::test(iterations = 1000)] + fn test_nth_set_bit_random(mut rng: StdRng) { + let set_count = rng.gen_range(0..=128); + let mut set_bits = (0..128).choose_multiple(&mut rng, set_count); + set_bits.sort(); + let mut n = 0; + for ix in set_bits.iter().copied() { + n |= 1 << ix; + } + + for (mut ix, position) in set_bits.into_iter().enumerate() { + ix += 1; + assert_eq!( + nth_set_bit(n, ix), + position, + "nth_set_bit({:0128b}, {})", + n, + ix + ); + } + } + + fn verify_chunk(chunk: ChunkSlice<'_>, text: &str) { + let mut offset = 0; + let mut offset_utf16 = OffsetUtf16(0); + let mut point = Point::zero(); + let mut point_utf16 = PointUtf16::zero(); + + log::info!("Verifying chunk {:?}", text); + assert_eq!(chunk.offset_to_point(0), Point::zero()); + + for c in text.chars() { + let expected_point = chunk.offset_to_point(offset); + assert_eq!(point, expected_point, "mismatch at offset {}", offset); + assert_eq!( + chunk.point_to_offset(point), + offset, + "mismatch at point {:?}", + point + ); + assert_eq!( + chunk.offset_to_offset_utf16(offset), + offset_utf16, + "mismatch at offset {}", + offset + ); + assert_eq!( + chunk.offset_utf16_to_offset(offset_utf16), + offset, + "mismatch at offset_utf16 {:?}", + offset_utf16 + ); + assert_eq!( + chunk.point_to_point_utf16(point), + point_utf16, + "mismatch at point {:?}", + point + ); + assert_eq!( + chunk.point_utf16_to_offset(point_utf16, false), + offset, + "mismatch at point_utf16 {:?}", + point_utf16 + ); + assert_eq!( + chunk.unclipped_point_utf16_to_point(Unclipped(point_utf16)), + point, + "mismatch for unclipped_point_utf16_to_point at {:?}", + point_utf16 + ); + + assert_eq!( + chunk.clip_point(point, Bias::Left), + point, + "incorrect left clip at {:?}", + point + ); + assert_eq!( + chunk.clip_point(point, Bias::Right), + point, + "incorrect right clip at {:?}", + point + ); + + for i in 1..c.len_utf8() { + let test_point = Point::new(point.row, point.column + i as u32); + assert_eq!( + chunk.clip_point(test_point, Bias::Left), + point, + "incorrect left clip within multi-byte char at {:?}", + test_point + ); + assert_eq!( + chunk.clip_point(test_point, Bias::Right), + Point::new(point.row, point.column + c.len_utf8() as u32), + "incorrect right clip within multi-byte char at {:?}", + test_point + ); + } + + for i in 1..c.len_utf16() { + let test_point = Unclipped(PointUtf16::new( + point_utf16.row, + point_utf16.column + i as u32, + )); + assert_eq!( + chunk.unclipped_point_utf16_to_point(test_point), + point, + "incorrect unclipped_point_utf16_to_point within multi-byte char at {:?}", + test_point + ); + assert_eq!( + chunk.clip_point_utf16(test_point, Bias::Left), + point_utf16, + "incorrect left clip_point_utf16 within multi-byte char at {:?}", + test_point + ); + assert_eq!( + chunk.clip_point_utf16(test_point, Bias::Right), + PointUtf16::new(point_utf16.row, point_utf16.column + c.len_utf16() as u32), + "incorrect right clip_point_utf16 within multi-byte char at {:?}", + test_point + ); + + let test_offset = OffsetUtf16(offset_utf16.0 + i); + assert_eq!( + chunk.clip_offset_utf16(test_offset, Bias::Left), + offset_utf16, + "incorrect left clip_offset_utf16 within multi-byte char at {:?}", + test_offset + ); + assert_eq!( + chunk.clip_offset_utf16(test_offset, Bias::Right), + OffsetUtf16(offset_utf16.0 + c.len_utf16()), + "incorrect right clip_offset_utf16 within multi-byte char at {:?}", + test_offset + ); + } + + if c == '\n' { + point.row += 1; + point.column = 0; + point_utf16.row += 1; + point_utf16.column = 0; + } else { + point.column += c.len_utf8() as u32; + point_utf16.column += c.len_utf16() as u32; + } + + offset += c.len_utf8(); + offset_utf16.0 += c.len_utf16(); + } + + let final_point = chunk.offset_to_point(offset); + assert_eq!(point, final_point, "mismatch at final offset {}", offset); + assert_eq!( + chunk.point_to_offset(point), + offset, + "mismatch at point {:?}", + point + ); + assert_eq!( + chunk.offset_to_offset_utf16(offset), + offset_utf16, + "mismatch at offset {}", + offset + ); + assert_eq!( + chunk.offset_utf16_to_offset(offset_utf16), + offset, + "mismatch at offset_utf16 {:?}", + offset_utf16 + ); + assert_eq!( + chunk.point_to_point_utf16(point), + point_utf16, + "mismatch at final point {:?}", + point + ); + assert_eq!( + chunk.point_utf16_to_offset(point_utf16, false), + offset, + "mismatch at final point_utf16 {:?}", + point_utf16 + ); + assert_eq!( + chunk.unclipped_point_utf16_to_point(Unclipped(point_utf16)), + point, + "mismatch for unclipped_point_utf16_to_point at final point {:?}", + point_utf16 + ); + assert_eq!( + chunk.clip_point(point, Bias::Left), + point, + "incorrect left clip at final point {:?}", + point + ); + assert_eq!( + chunk.clip_point(point, Bias::Right), + point, + "incorrect right clip at final point {:?}", + point + ); + assert_eq!( + chunk.clip_point_utf16(Unclipped(point_utf16), Bias::Left), + point_utf16, + "incorrect left clip_point_utf16 at final point {:?}", + point_utf16 + ); + assert_eq!( + chunk.clip_point_utf16(Unclipped(point_utf16), Bias::Right), + point_utf16, + "incorrect right clip_point_utf16 at final point {:?}", + point_utf16 + ); + assert_eq!( + chunk.clip_offset_utf16(offset_utf16, Bias::Left), + offset_utf16, + "incorrect left clip_offset_utf16 at final offset {:?}", + offset_utf16 + ); + assert_eq!( + chunk.clip_offset_utf16(offset_utf16, Bias::Right), + offset_utf16, + "incorrect right clip_offset_utf16 at final offset {:?}", + offset_utf16 + ); + + // Verify length methods + assert_eq!(chunk.len(), text.len()); + assert_eq!( + chunk.len_utf16().0, + text.chars().map(|c| c.len_utf16()).sum::() + ); + + // Verify line counting + let lines = chunk.lines(); + let mut newline_count = 0; + let mut last_line_len = 0; + for c in text.chars() { + if c == '\n' { + newline_count += 1; + last_line_len = 0; + } else { + last_line_len += c.len_utf8() as u32; + } + } + assert_eq!(lines, Point::new(newline_count, last_line_len)); + + // Verify first/last line chars + if !text.is_empty() { + let first_line = text.split('\n').next().unwrap(); + assert_eq!(chunk.first_line_chars(), first_line.chars().count() as u32); + + let last_line = text.split('\n').last().unwrap(); + assert_eq!(chunk.last_line_chars(), last_line.chars().count() as u32); + assert_eq!( + chunk.last_line_len_utf16(), + last_line.chars().map(|c| c.len_utf16() as u32).sum::() + ); + } + + // Verify longest row + let (longest_row, longest_chars) = chunk.longest_row(); + let mut max_chars = 0; + let mut current_row = 0; + let mut current_chars = 0; + let mut max_row = 0; + + for c in text.chars() { + if c == '\n' { + if current_chars > max_chars { + max_chars = current_chars; + max_row = current_row; + } + current_row += 1; + current_chars = 0; + } else { + current_chars += 1; + } + } + + if current_chars > max_chars { + max_chars = current_chars; + max_row = current_row; + } + + assert_eq!((max_row, max_chars as u32), (longest_row, longest_chars)); + } +} diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 68ff7d5c6956aa..89cb1e7b6319f3 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -1,9 +1,11 @@ +mod chunk; mod offset_utf16; mod point; mod point_utf16; mod unclipped; -use arrayvec::ArrayString; +use chunk::{Chunk, ChunkSlice}; +use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; use smallvec::SmallVec; use std::{ cmp, fmt, io, mem, @@ -11,20 +13,12 @@ use std::{ str, }; use sum_tree::{Bias, Dimension, SumTree}; -use unicode_segmentation::GraphemeCursor; -use util::debug_panic; pub use offset_utf16::OffsetUtf16; pub use point::Point; pub use point_utf16::PointUtf16; pub use unclipped::Unclipped; -#[cfg(test)] -const CHUNK_BASE: usize = 6; - -#[cfg(not(test))] -const CHUNK_BASE: usize = 64; - #[derive(Clone, Default)] pub struct Rope { chunks: SumTree, @@ -36,18 +30,25 @@ impl Rope { } pub fn append(&mut self, rope: Rope) { - let mut chunks = rope.chunks.cursor::<()>(&()); - chunks.next(&()); - if let Some(chunk) = chunks.item() { - if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE) - || chunk.0.len() < CHUNK_BASE + if let Some(chunk) = rope.chunks.first() { + if self + .chunks + .last() + .map_or(false, |c| c.text.len() < chunk::MIN_BASE) + || chunk.text.len() < chunk::MIN_BASE { - self.push(&chunk.0); + self.push_chunk(chunk.as_slice()); + + let mut chunks = rope.chunks.cursor::<()>(&()); + chunks.next(&()); chunks.next(&()); + self.chunks.append(chunks.suffix(&()), &()); + self.check_invariants(); + return; } } - self.chunks.append(chunks.suffix(&()), &()); + self.chunks.append(rope.chunks.clone(), &()); self.check_invariants(); } @@ -77,11 +78,13 @@ impl Rope { pub fn push(&mut self, mut text: &str) { self.chunks.update_last( |last_chunk| { - let split_ix = if last_chunk.0.len() + text.len() <= 2 * CHUNK_BASE { + let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE { text.len() } else { - let mut split_ix = - cmp::min(CHUNK_BASE.saturating_sub(last_chunk.0.len()), text.len()); + let mut split_ix = cmp::min( + chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), + text.len(), + ); while !text.is_char_boundary(split_ix) { split_ix += 1; } @@ -89,7 +92,7 @@ impl Rope { }; let (suffix, remainder) = text.split_at(split_ix); - last_chunk.0.push_str(suffix); + last_chunk.push_str(suffix); text = remainder; }, &(), @@ -101,12 +104,12 @@ impl Rope { let mut new_chunks = SmallVec::<[_; 16]>::new(); while !text.is_empty() { - let mut split_ix = cmp::min(2 * CHUNK_BASE, text.len()); + let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); while !text.is_char_boundary(split_ix) { split_ix -= 1; } let (chunk, remainder) = text.split_at(split_ix); - new_chunks.push(Chunk(ArrayString::from(chunk).unwrap())); + new_chunks.push(chunk); text = remainder; } @@ -116,9 +119,11 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - self.chunks.par_extend(new_chunks.into_vec(), &()); + self.chunks + .par_extend(new_chunks.into_vec().into_par_iter().map(Chunk::new), &()); } else { - self.chunks.extend(new_chunks, &()); + self.chunks + .extend(new_chunks.into_iter().map(Chunk::new), &()); } self.check_invariants(); @@ -135,7 +140,7 @@ impl Rope { // a chunk ends with 3 bytes of a 4-byte character. These 3 bytes end up being stored in the following chunk, thus wasting // 3 bytes of storage in current chunk. // For example, a 1024-byte string can occupy between 32 (full ASCII, 1024/32) and 36 (full 4-byte UTF-8, 1024 / 29 rounded up) chunks. - const MIN_CHUNK_SIZE: usize = 2 * CHUNK_BASE - 3; + const MIN_CHUNK_SIZE: usize = chunk::MAX_BASE - 3; // We also round up the capacity up by one, for a good measure; we *really* don't want to realloc here, as we assume that the # of characters // we're working with there is large. @@ -143,12 +148,12 @@ impl Rope { let mut new_chunks = Vec::with_capacity(capacity); while !text.is_empty() { - let mut split_ix = cmp::min(2 * CHUNK_BASE, text.len()); + let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); while !text.is_char_boundary(split_ix) { split_ix -= 1; } let (chunk, remainder) = text.split_at(split_ix); - new_chunks.push(Chunk(ArrayString::from(chunk).unwrap())); + new_chunks.push(chunk); text = remainder; } @@ -158,13 +163,44 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - self.chunks.par_extend(new_chunks, &()); + self.chunks + .par_extend(new_chunks.into_par_iter().map(Chunk::new), &()); } else { - self.chunks.extend(new_chunks, &()); + self.chunks + .extend(new_chunks.into_iter().map(Chunk::new), &()); } self.check_invariants(); } + + fn push_chunk(&mut self, mut chunk: ChunkSlice) { + self.chunks.update_last( + |last_chunk| { + let split_ix = if last_chunk.text.len() + chunk.len() <= chunk::MAX_BASE { + chunk.len() + } else { + let mut split_ix = cmp::min( + chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), + chunk.len(), + ); + while !chunk.is_char_boundary(split_ix) { + split_ix += 1; + } + split_ix + }; + + let (suffix, remainder) = chunk.split_at(split_ix); + last_chunk.append(suffix); + chunk = remainder; + }, + &(), + ); + + if !chunk.is_empty() { + self.chunks.push(chunk.into(), &()); + } + } + pub fn push_front(&mut self, text: &str) { let suffix = mem::replace(self, Rope::from(text)); self.append(suffix); @@ -178,7 +214,7 @@ impl Rope { let mut chunks = self.chunks.cursor::<()>(&()).peekable(); while let Some(chunk) = chunks.next() { if chunks.peek().is_some() { - assert!(chunk.0.len() + 3 >= CHUNK_BASE); + assert!(chunk.text.len() + 3 >= chunk::MIN_BASE); } } } @@ -250,7 +286,7 @@ impl Rope { let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Default::default(), |chunk| { - chunk.offset_to_offset_utf16(overshoot) + chunk.as_slice().offset_to_offset_utf16(overshoot) }) } @@ -263,7 +299,7 @@ impl Rope { let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(Default::default(), |chunk| { - chunk.offset_utf16_to_offset(overshoot) + chunk.as_slice().offset_utf16_to_offset(overshoot) }) } @@ -275,9 +311,9 @@ impl Rope { cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 - + cursor - .item() - .map_or(Point::zero(), |chunk| chunk.offset_to_point(overshoot)) + + cursor.item().map_or(Point::zero(), |chunk| { + chunk.as_slice().offset_to_point(overshoot) + }) } pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { @@ -289,7 +325,7 @@ impl Rope { let overshoot = offset - cursor.start().0; cursor.start().1 + cursor.item().map_or(PointUtf16::zero(), |chunk| { - chunk.offset_to_point_utf16(overshoot) + chunk.as_slice().offset_to_point_utf16(overshoot) }) } @@ -302,7 +338,7 @@ impl Rope { let overshoot = point - cursor.start().0; cursor.start().1 + cursor.item().map_or(PointUtf16::zero(), |chunk| { - chunk.point_to_point_utf16(overshoot) + chunk.as_slice().point_to_point_utf16(overshoot) }) } @@ -316,7 +352,7 @@ impl Rope { cursor.start().1 + cursor .item() - .map_or(0, |chunk| chunk.point_to_offset(overshoot)) + .map_or(0, |chunk| chunk.as_slice().point_to_offset(overshoot)) } pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { @@ -335,9 +371,9 @@ impl Rope { cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 - + cursor - .item() - .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot, clip)) + + cursor.item().map_or(0, |chunk| { + chunk.as_slice().point_utf16_to_offset(overshoot, clip) + }) } pub fn unclipped_point_utf16_to_point(&self, point: Unclipped) -> Point { @@ -349,7 +385,7 @@ impl Rope { let overshoot = Unclipped(point.0 - cursor.start().0); cursor.start().1 + cursor.item().map_or(Point::zero(), |chunk| { - chunk.unclipped_point_utf16_to_point(overshoot) + chunk.as_slice().unclipped_point_utf16_to_point(overshoot) }) } @@ -358,7 +394,7 @@ impl Rope { cursor.seek(&offset, Bias::Left, &()); if let Some(chunk) = cursor.item() { let mut ix = offset - cursor.start(); - while !chunk.0.is_char_boundary(ix) { + while !chunk.text.is_char_boundary(ix) { match bias { Bias::Left => { ix -= 1; @@ -381,7 +417,7 @@ impl Rope { cursor.seek(&offset, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = offset - cursor.start(); - *cursor.start() + chunk.clip_offset_utf16(overshoot, bias) + *cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias) } else { self.summary().len_utf16 } @@ -392,7 +428,7 @@ impl Rope { cursor.seek(&point, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = point - cursor.start(); - *cursor.start() + chunk.clip_point(overshoot, bias) + *cursor.start() + chunk.as_slice().clip_point(overshoot, bias) } else { self.summary().lines } @@ -403,7 +439,7 @@ impl Rope { cursor.seek(&point.0, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = Unclipped(point.0 - cursor.start()); - *cursor.start() + chunk.clip_point_utf16(overshoot, bias) + *cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias) } else { self.summary().lines_utf16() } @@ -500,7 +536,7 @@ impl<'a> Cursor<'a> { if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); - slice.push(&start_chunk.0[start_ix..end_ix]); + slice.push_chunk(start_chunk.slice(start_ix..end_ix)); } if end_offset > self.chunks.end(&()) { @@ -510,7 +546,7 @@ impl<'a> Cursor<'a> { }); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); - slice.push(&end_chunk.0[..end_ix]); + slice.push_chunk(end_chunk.slice(0..end_ix)); } } @@ -525,9 +561,7 @@ impl<'a> Cursor<'a> { if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); - summary.add_assign(&D::from_text_summary(&TextSummary::from( - &start_chunk.0[start_ix..end_ix], - ))); + summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix))); } if end_offset > self.chunks.end(&()) { @@ -535,9 +569,7 @@ impl<'a> Cursor<'a> { summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &())); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); - summary.add_assign(&D::from_text_summary(&TextSummary::from( - &end_chunk.0[..end_ix], - ))); + summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix))); } } @@ -678,11 +710,11 @@ impl<'a> Chunks<'a> { if let Some(chunk) = self.chunks.item() { let mut end_ix = self.offset - *self.chunks.start(); - if chunk.0.as_bytes()[end_ix - 1] == b'\n' { + if chunk.text.as_bytes()[end_ix - 1] == b'\n' { end_ix -= 1; } - if let Some(newline_ix) = chunk.0[..end_ix].rfind('\n') { + if let Some(newline_ix) = chunk.text[..end_ix].rfind('\n') { self.offset = *self.chunks.start() + newline_ix + 1; if self.offset_is_valid() { return true; @@ -694,7 +726,7 @@ impl<'a> Chunks<'a> { .search_backward(|summary| summary.text.lines.row > 0, &()); self.offset = *self.chunks.start(); if let Some(chunk) = self.chunks.item() { - if let Some(newline_ix) = chunk.0.rfind('\n') { + if let Some(newline_ix) = chunk.text.rfind('\n') { self.offset += newline_ix + 1; if self.offset_is_valid() { if self.offset == self.chunks.end(&()) { @@ -731,7 +763,7 @@ impl<'a> Chunks<'a> { slice_start..slice_end }; - Some(&chunk.0[slice_range]) + Some(&chunk.text[slice_range]) } pub fn lines(self) -> Lines<'a> { @@ -798,7 +830,7 @@ impl<'a> Bytes<'a> { } let start = self.range.start.saturating_sub(chunk_start); let end = self.range.end - chunk_start; - Some(&chunk.0.as_bytes()[start..chunk.0.len().min(end)]) + Some(&chunk.text.as_bytes()[start..chunk.text.len().min(end)]) } } @@ -902,265 +934,13 @@ impl<'a> Lines<'a> { } } -#[derive(Clone, Debug, Default)] -struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>); - -impl Chunk { - fn offset_to_offset_utf16(&self, target: usize) -> OffsetUtf16 { - let mut offset = 0; - let mut offset_utf16 = OffsetUtf16(0); - for ch in self.0.chars() { - if offset >= target { - break; - } - - offset += ch.len_utf8(); - offset_utf16.0 += ch.len_utf16(); - } - offset_utf16 - } - - fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize { - let mut offset_utf16 = OffsetUtf16(0); - let mut offset = 0; - for ch in self.0.chars() { - if offset_utf16 >= target { - break; - } - - offset += ch.len_utf8(); - offset_utf16.0 += ch.len_utf16(); - } - offset - } - - fn offset_to_point(&self, target: usize) -> Point { - let mut offset = 0; - let mut point = Point::new(0, 0); - for ch in self.0.chars() { - if offset >= target { - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - } else { - point.column += ch.len_utf8() as u32; - } - offset += ch.len_utf8(); - } - point - } - - fn offset_to_point_utf16(&self, target: usize) -> PointUtf16 { - let mut offset = 0; - let mut point = PointUtf16::new(0, 0); - for ch in self.0.chars() { - if offset >= target { - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - } else { - point.column += ch.len_utf16() as u32; - } - offset += ch.len_utf8(); - } - point - } - - fn point_to_offset(&self, target: Point) -> usize { - let mut offset = 0; - let mut point = Point::new(0, 0); - - for ch in self.0.chars() { - if point >= target { - if point > target { - debug_panic!("point {target:?} is inside of character {ch:?}"); - } - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - - if point.row > target.row { - debug_panic!( - "point {target:?} is beyond the end of a line with length {}", - point.column - ); - break; - } - } else { - point.column += ch.len_utf8() as u32; - } - - offset += ch.len_utf8(); - } - - offset - } - - fn point_to_point_utf16(&self, target: Point) -> PointUtf16 { - let mut point = Point::zero(); - let mut point_utf16 = PointUtf16::new(0, 0); - for ch in self.0.chars() { - if point >= target { - break; - } - - if ch == '\n' { - point_utf16.row += 1; - point_utf16.column = 0; - point.row += 1; - point.column = 0; - } else { - point_utf16.column += ch.len_utf16() as u32; - point.column += ch.len_utf8() as u32; - } - } - point_utf16 - } - - fn point_utf16_to_offset(&self, target: PointUtf16, clip: bool) -> usize { - let mut offset = 0; - let mut point = PointUtf16::new(0, 0); - - for ch in self.0.chars() { - if point == target { - break; - } - - if ch == '\n' { - point.row += 1; - point.column = 0; - - if point.row > target.row { - if !clip { - debug_panic!( - "point {target:?} is beyond the end of a line with length {}", - point.column - ); - } - // Return the offset of the newline - return offset; - } - } else { - point.column += ch.len_utf16() as u32; - } - - if point > target { - if !clip { - debug_panic!("point {target:?} is inside of codepoint {ch:?}"); - } - // Return the offset of the codepoint which we have landed within, bias left - return offset; - } - - offset += ch.len_utf8(); - } - - offset - } - - fn unclipped_point_utf16_to_point(&self, target: Unclipped) -> Point { - let mut point = Point::zero(); - let mut point_utf16 = PointUtf16::zero(); - - for ch in self.0.chars() { - if point_utf16 == target.0 { - break; - } - - if point_utf16 > target.0 { - // If the point is past the end of a line or inside of a code point, - // return the last valid point before the target. - return point; - } - - if ch == '\n' { - point_utf16 += PointUtf16::new(1, 0); - point += Point::new(1, 0); - } else { - point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32); - point += Point::new(0, ch.len_utf8() as u32); - } - } - - point - } - - fn clip_point(&self, target: Point, bias: Bias) -> Point { - for (row, line) in self.0.split('\n').enumerate() { - if row == target.row as usize { - let bytes = line.as_bytes(); - let mut column = target.column.min(bytes.len() as u32) as usize; - if column == 0 - || column == bytes.len() - || (bytes[column - 1] < 128 && bytes[column] < 128) - { - return Point::new(row as u32, column as u32); - } - - let mut grapheme_cursor = GraphemeCursor::new(column, bytes.len(), true); - loop { - if line.is_char_boundary(column) - && grapheme_cursor.is_boundary(line, 0).unwrap_or(false) - { - break; - } - - match bias { - Bias::Left => column -= 1, - Bias::Right => column += 1, - } - grapheme_cursor.set_cursor(column); - } - return Point::new(row as u32, column as u32); - } - } - unreachable!() - } - - fn clip_point_utf16(&self, target: Unclipped, bias: Bias) -> PointUtf16 { - for (row, line) in self.0.split('\n').enumerate() { - if row == target.0.row as usize { - let mut code_units = line.encode_utf16(); - let mut column = code_units.by_ref().take(target.0.column as usize).count(); - if char::decode_utf16(code_units).next().transpose().is_err() { - match bias { - Bias::Left => column -= 1, - Bias::Right => column += 1, - } - } - return PointUtf16::new(row as u32, column as u32); - } - } - unreachable!() - } - - fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 { - let mut code_units = self.0.encode_utf16(); - let mut offset = code_units.by_ref().take(target.0).count(); - if char::decode_utf16(code_units).next().transpose().is_err() { - match bias { - Bias::Left => offset -= 1, - Bias::Right => offset += 1, - } - } - OffsetUtf16(offset) - } -} - impl sum_tree::Item for Chunk { type Summary = ChunkSummary; fn summary(&self, _cx: &()) -> Self::Summary { - ChunkSummary::from(self.0.as_str()) + ChunkSummary { + text: self.as_slice().text_summary(), + } } } @@ -1169,14 +949,6 @@ pub struct ChunkSummary { text: TextSummary, } -impl<'a> From<&'a str> for ChunkSummary { - fn from(text: &'a str) -> Self { - Self { - text: TextSummary::from(text), - } - } -} - impl sum_tree::Summary for ChunkSummary { type Context = (); @@ -1323,6 +1095,7 @@ impl std::ops::AddAssign for TextSummary { pub trait TextDimension: 'static + for<'a> Dimension<'a, ChunkSummary> { fn from_text_summary(summary: &TextSummary) -> Self; + fn from_chunk(chunk: ChunkSlice) -> Self; fn add_assign(&mut self, other: &Self); } @@ -1334,6 +1107,10 @@ impl TextDimension for (D1, D2) { ) } + fn from_chunk(chunk: ChunkSlice) -> Self { + (D1::from_chunk(chunk), D2::from_chunk(chunk)) + } + fn add_assign(&mut self, other: &Self) { self.0.add_assign(&other.0); self.1.add_assign(&other.1); @@ -1355,6 +1132,10 @@ impl TextDimension for TextSummary { summary.clone() } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.text_summary() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1375,6 +1156,10 @@ impl TextDimension for usize { summary.len } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.len() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1395,6 +1180,10 @@ impl TextDimension for OffsetUtf16 { summary.len_utf16 } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.len_utf16() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1415,6 +1204,10 @@ impl TextDimension for Point { summary.lines } + fn from_chunk(chunk: ChunkSlice) -> Self { + chunk.lines() + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1435,6 +1228,13 @@ impl TextDimension for PointUtf16 { summary.lines_utf16() } + fn from_chunk(chunk: ChunkSlice) -> Self { + PointUtf16 { + row: chunk.lines().row, + column: chunk.last_line_len_utf16(), + } + } + fn add_assign(&mut self, other: &Self) { *self += other; } @@ -1919,7 +1719,7 @@ mod tests { fn text(&self) -> String { let mut text = String::new(); for chunk in self.chunks.cursor::<()>(&()) { - text.push_str(&chunk.0); + text.push_str(&chunk.text); } text } diff --git a/crates/rope/src/unclipped.rs b/crates/rope/src/unclipped.rs index b3427e2cb98bb2..679901875c9b84 100644 --- a/crates/rope/src/unclipped.rs +++ b/crates/rope/src/unclipped.rs @@ -1,4 +1,4 @@ -use crate::{ChunkSummary, TextDimension, TextSummary}; +use crate::{chunk::ChunkSlice, ChunkSummary, TextDimension, TextSummary}; use std::ops::{Add, AddAssign, Sub, SubAssign}; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -27,6 +27,10 @@ impl TextDimension for Unclipped { Unclipped(T::from_text_summary(summary)) } + fn from_chunk(chunk: ChunkSlice) -> Self { + Unclipped(T::from_chunk(chunk)) + } + fn add_assign(&mut self, other: &Self) { TextDimension::add_assign(&mut self.0, &other.0); } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 56b13688bad2b6..9288416d5720b5 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -123,7 +123,6 @@ impl ProtoMessageHandlerSet { let extract_entity_id = *this.entity_id_extractors.get(&payload_type_id)?; let entity_type_id = *this.entity_types_by_message_type.get(&payload_type_id)?; let entity_id = (extract_entity_id)(message.as_ref()); - match this .entities_by_type_and_remote_id .get_mut(&(entity_type_id, entity_id))? @@ -145,6 +144,26 @@ pub enum EntityMessageSubscriber { Pending(Vec>), } +impl std::fmt::Debug for EntityMessageSubscriber { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + EntityMessageSubscriber::Entity { handle } => f + .debug_struct("EntityMessageSubscriber::Entity") + .field("handle", handle) + .finish(), + EntityMessageSubscriber::Pending(vec) => f + .debug_struct("EntityMessageSubscriber::Pending") + .field( + "envelopes", + &vec.iter() + .map(|envelope| envelope.payload_type_name()) + .collect::>(), + ) + .finish(), + } + } +} + impl From> for AnyProtoClient where T: ProtoClient + 'static, diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 1bc49551a71497..b018f296934270 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -327,7 +327,7 @@ impl Render for ProjectSearchView { div() .flex_1() .size_full() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child(self.results_editor.clone()) } else { let model = self.model.read(cx); @@ -365,7 +365,7 @@ impl Render for ProjectSearchView { .size_full() .justify_center() .bg(cx.theme().colors().editor_background) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( h_flex() .size_full() diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 0130adf99cba41..620055a9712d77 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -61,6 +61,7 @@ pub trait Settings: 'static + Send + Sync { anyhow::anyhow!("missing default") } + #[track_caller] fn register(cx: &mut AppContext) where Self: Sized, @@ -271,6 +272,7 @@ impl SettingsStore { pub fn register_setting(&mut self, cx: &mut AppContext) { let setting_type_id = TypeId::of::(); let entry = self.setting_values.entry(setting_type_id); + if matches!(entry, hash_map::Entry::Occupied(_)) { return; } diff --git a/crates/sqlez/src/statement.rs b/crates/sqlez/src/statement.rs index 5bebfa84a6a908..db299e428c09a4 100644 --- a/crates/sqlez/src/statement.rs +++ b/crates/sqlez/src/statement.rs @@ -64,13 +64,13 @@ impl<'a> Statement<'a> { &mut remaining_sql_ptr, ); - remaining_sql = CStr::from_ptr(remaining_sql_ptr); - statement.raw_statements.push(raw_statement); - connection.last_error().with_context(|| { format!("Prepare call failed for query:\n{}", query.as_ref()) })?; + remaining_sql = CStr::from_ptr(remaining_sql_ptr); + statement.raw_statements.push(raw_statement); + if !connection.can_write() && sqlite3_stmt_readonly(raw_statement) == 0 { let sql = CStr::from_ptr(sqlite3_sql(raw_statement)); diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index b370e6df1850b2..06ca9557673350 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] arrayvec = "0.7.1" -rayon = "1.8" +rayon.workspace = true log.workspace = true [dev-dependencies] diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 51705e32888127..ce13e18ba03506 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -445,7 +445,11 @@ impl PickerDelegate for TasksModalDelegate { ) } - fn confirm_completion(&self, _: String) -> Option { + fn confirm_completion( + &mut self, + _: String, + _: &mut ViewContext>, + ) -> Option { let task_index = self.matches.get(self.selected_index())?.candidate_id; let tasks = self.candidates.as_ref()?; let (_, task) = tasks.get(task_index)?; diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 47e66a46a74d23..26db3cf8d8d8b1 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -222,13 +222,13 @@ pub struct HangReport { pub installation_id: Option, } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct LocationData { pub file: String, pub line: u32, } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct Panic { /// The name of the thread that panicked pub thread: String, diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index eed8c8123b1be8..d192680968f156 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -975,7 +975,7 @@ impl Render for TerminalView { div() .size_full() .relative() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .key_context(self.dispatch_context(cx)) .on_action(cx.listener(TerminalView::send_text)) .on_action(cx.listener(TerminalView::send_keystroke)) diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index 9f95d199375003..73e8fe8c66db6c 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -6,16 +6,11 @@ use collections::HashMap; use derive_more::{Deref, DerefMut}; use fs::Fs; use futures::StreamExt; -use gpui::{AppContext, AssetSource, Global, HighlightStyle, SharedString}; +use gpui::{AppContext, AssetSource, Global, SharedString}; use parking_lot::RwLock; -use refineable::Refineable; use util::ResultExt; -use crate::{ - try_parse_color, AccentColors, Appearance, AppearanceContent, PlayerColors, StatusColors, - SyntaxTheme, SystemColors, Theme, ThemeColors, ThemeContent, ThemeFamily, ThemeFamilyContent, - ThemeStyles, -}; +use crate::{refine_theme_family, Appearance, Theme, ThemeFamily, ThemeFamilyContent}; /// The metadata for a theme. #[derive(Debug, Clone)] @@ -97,87 +92,10 @@ impl ThemeRegistry { #[allow(unused)] fn insert_user_theme_families(&self, families: impl IntoIterator) { for family in families.into_iter() { - self.insert_user_themes(family.themes); - } - } - - /// Inserts user themes into the registry. - pub fn insert_user_themes(&self, themes: impl IntoIterator) { - self.insert_themes(themes.into_iter().map(|user_theme| { - let mut theme_colors = match user_theme.appearance { - AppearanceContent::Light => ThemeColors::light(), - AppearanceContent::Dark => ThemeColors::dark(), - }; - theme_colors.refine(&user_theme.style.theme_colors_refinement()); - - let mut status_colors = match user_theme.appearance { - AppearanceContent::Light => StatusColors::light(), - AppearanceContent::Dark => StatusColors::dark(), - }; - status_colors.refine(&user_theme.style.status_colors_refinement()); - - let mut player_colors = match user_theme.appearance { - AppearanceContent::Light => PlayerColors::light(), - AppearanceContent::Dark => PlayerColors::dark(), - }; - player_colors.merge(&user_theme.style.players); - - let mut accent_colors = match user_theme.appearance { - AppearanceContent::Light => AccentColors::light(), - AppearanceContent::Dark => AccentColors::dark(), - }; - accent_colors.merge(&user_theme.style.accents); + let refined_family = refine_theme_family(family); - let syntax_highlights = user_theme - .style - .syntax - .iter() - .map(|(syntax_token, highlight)| { - ( - syntax_token.clone(), - HighlightStyle { - color: highlight - .color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - background_color: highlight - .background_color - .as_ref() - .and_then(|color| try_parse_color(color).ok()), - font_style: highlight.font_style.map(Into::into), - font_weight: highlight.font_weight.map(Into::into), - ..Default::default() - }, - ) - }) - .collect::>(); - let syntax_theme = - SyntaxTheme::merge(Arc::new(SyntaxTheme::default()), syntax_highlights); - - let window_background_appearance = user_theme - .style - .window_background_appearance - .map(Into::into) - .unwrap_or_default(); - - Theme { - id: uuid::Uuid::new_v4().to_string(), - name: user_theme.name.into(), - appearance: match user_theme.appearance { - AppearanceContent::Light => Appearance::Light, - AppearanceContent::Dark => Appearance::Dark, - }, - styles: ThemeStyles { - system: SystemColors::default(), - window_background_appearance, - accents: accent_colors, - colors: theme_colors, - status: status_colors, - player: player_colors, - syntax: syntax_theme, - }, - } - })); + self.insert_themes(refined_family.themes); + } } /// Removes the themes with the given names from the registry. diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index c62359242dd9d2..307ea6b287894a 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -29,10 +29,11 @@ pub use settings::*; pub use styles::*; use gpui::{ - px, AppContext, AssetSource, Hsla, Pixels, SharedString, WindowAppearance, - WindowBackgroundAppearance, + px, AppContext, AssetSource, HighlightStyle, Hsla, Pixels, Refineable, SharedString, + WindowAppearance, WindowBackgroundAppearance, }; use serde::Deserialize; +use uuid::Uuid; /// Defines window border radius for platforms that use client side decorations. pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); @@ -137,7 +138,112 @@ pub struct ThemeFamily { pub scales: ColorScales, } -impl ThemeFamily {} +impl ThemeFamily { + // This is on ThemeFamily because we will have variables here we will need + // in the future to resolve @references. + /// Refines ThemeContent into a theme, merging it's contents with the base theme. + pub fn refine_theme(&self, theme: &ThemeContent) -> Theme { + let appearance = match theme.appearance { + AppearanceContent::Light => Appearance::Light, + AppearanceContent::Dark => Appearance::Dark, + }; + + let mut refined_theme_colors = match theme.appearance { + AppearanceContent::Light => ThemeColors::light(), + AppearanceContent::Dark => ThemeColors::dark(), + }; + refined_theme_colors.refine(&theme.style.theme_colors_refinement()); + + let mut refined_status_colors = match theme.appearance { + AppearanceContent::Light => StatusColors::light(), + AppearanceContent::Dark => StatusColors::dark(), + }; + refined_status_colors.refine(&theme.style.status_colors_refinement()); + + let mut refined_player_colors = match theme.appearance { + AppearanceContent::Light => PlayerColors::light(), + AppearanceContent::Dark => PlayerColors::dark(), + }; + refined_player_colors.merge(&theme.style.players); + + let mut refined_accent_colors = match theme.appearance { + AppearanceContent::Light => AccentColors::light(), + AppearanceContent::Dark => AccentColors::dark(), + }; + refined_accent_colors.merge(&theme.style.accents); + + let syntax_highlights = theme + .style + .syntax + .iter() + .map(|(syntax_token, highlight)| { + ( + syntax_token.clone(), + HighlightStyle { + color: highlight + .color + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + background_color: highlight + .background_color + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + font_style: highlight.font_style.map(Into::into), + font_weight: highlight.font_weight.map(Into::into), + ..Default::default() + }, + ) + }) + .collect::>(); + let syntax_theme = SyntaxTheme::merge(Arc::new(SyntaxTheme::default()), syntax_highlights); + + let window_background_appearance = theme + .style + .window_background_appearance + .map(Into::into) + .unwrap_or_default(); + + Theme { + id: uuid::Uuid::new_v4().to_string(), + name: theme.name.clone().into(), + appearance, + styles: ThemeStyles { + system: SystemColors::default(), + window_background_appearance, + accents: refined_accent_colors, + colors: refined_theme_colors, + status: refined_status_colors, + player: refined_player_colors, + syntax: syntax_theme, + }, + } + } +} + +/// Refines a [ThemeFamilyContent] and it's [ThemeContent]s into a [ThemeFamily]. +pub fn refine_theme_family(theme_family_content: ThemeFamilyContent) -> ThemeFamily { + let id = Uuid::new_v4().to_string(); + let name = theme_family_content.name.clone(); + let author = theme_family_content.author.clone(); + + let mut theme_family = ThemeFamily { + id: id.clone(), + name: name.clone().into(), + author: author.clone().into(), + themes: vec![], + scales: default_color_scales(), + }; + + let refined_themes = theme_family_content + .themes + .iter() + .map(|theme_content| theme_family.refine_theme(theme_content)) + .collect(); + + theme_family.themes = refined_themes; + + theme_family +} /// A theme is the primary mechanism for defining the appearance of the UI. #[derive(Clone, PartialEq)] diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index edbc14792675f9..805c0e72029b20 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -282,6 +282,13 @@ impl TitleBar { return Vec::new(); }; + let is_connecting_to_project = self + .workspace + .update(cx, |workspace, cx| { + recent_projects::is_connecting_over_ssh(workspace, cx) + }) + .unwrap_or(false); + let room = room.read(cx); let project = self.project.read(cx); let is_local = project.is_local() || project.is_via_ssh(); @@ -298,7 +305,7 @@ impl TitleBar { let mut children = Vec::new(); - if is_local && can_share_projects { + if is_local && can_share_projects && !is_connecting_to_project { children.push( Button::new( "toggle_sharing", diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 74c5b2812a5fa3..f58eaa89a0f9b6 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -447,7 +447,7 @@ impl TitleBar { }) .on_click(move |_, cx| { let _ = workspace.update(cx, |this, cx| { - BranchList::open(this, &Default::default(), cx) + BranchList::open(this, &Default::default(), cx); }); }), ) diff --git a/crates/toolchain_selector/Cargo.toml b/crates/toolchain_selector/Cargo.toml new file mode 100644 index 00000000000000..ed80bd0dc999e4 --- /dev/null +++ b/crates/toolchain_selector/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "toolchain_selector" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[dependencies] +editor.workspace = true +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +picker.workspace = true +project.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[lints] +workspace = true + +[lib] +path = "src/toolchain_selector.rs" +doctest = false diff --git a/crates/toolchain_selector/LICENSE-GPL b/crates/toolchain_selector/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/toolchain_selector/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs new file mode 100644 index 00000000000000..74a6bd7107834f --- /dev/null +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -0,0 +1,173 @@ +use editor::Editor; +use gpui::{ + div, AsyncWindowContext, EventEmitter, IntoElement, ParentElement, Render, Subscription, Task, + View, ViewContext, WeakModel, WeakView, +}; +use language::{Buffer, BufferEvent, LanguageName, Toolchain}; +use project::WorktreeId; +use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; +use workspace::{item::ItemHandle, StatusItemView, Workspace}; + +use crate::ToolchainSelector; + +pub struct ActiveToolchain { + active_toolchain: Option, + workspace: WeakView, + active_buffer: Option<(WorktreeId, WeakModel, Subscription)>, + _observe_language_changes: Subscription, + _update_toolchain_task: Task>, +} + +struct LanguageChanged; + +impl EventEmitter for ActiveToolchain {} + +impl ActiveToolchain { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let view = cx.view().clone(); + Self { + active_toolchain: None, + active_buffer: None, + workspace: workspace.weak_handle(), + _observe_language_changes: cx.subscribe(&view, |this, _, _: &LanguageChanged, cx| { + this._update_toolchain_task = Self::spawn_tracker_task(cx); + }), + _update_toolchain_task: Self::spawn_tracker_task(cx), + } + } + fn spawn_tracker_task(cx: &mut ViewContext) -> Task> { + cx.spawn(|this, mut cx| async move { + let active_file = this + .update(&mut cx, |this, _| { + this.active_buffer + .as_ref() + .map(|(_, buffer, _)| buffer.clone()) + }) + .ok() + .flatten()?; + let workspace = this + .update(&mut cx, |this, _| this.workspace.clone()) + .ok()?; + + let language_name = active_file + .update(&mut cx, |this, _| Some(this.language()?.name())) + .ok() + .flatten()?; + + let worktree_id = active_file + .update(&mut cx, |this, cx| Some(this.file()?.worktree_id(cx))) + .ok() + .flatten()?; + let toolchain = + Self::active_toolchain(workspace, worktree_id, language_name, cx.clone()).await?; + let _ = this.update(&mut cx, |this, cx| { + this.active_toolchain = Some(toolchain); + + cx.notify(); + }); + Some(()) + }) + } + + fn update_lister(&mut self, editor: View, cx: &mut ViewContext) { + let editor = editor.read(cx); + if let Some((_, buffer, _)) = editor.active_excerpt(cx) { + if let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { + let subscription = cx.subscribe(&buffer, |_, _, event: &BufferEvent, cx| { + if let BufferEvent::LanguageChanged = event { + cx.emit(LanguageChanged) + } + }); + self.active_buffer = Some((worktree_id, buffer.downgrade(), subscription)); + cx.emit(LanguageChanged); + } + } + + cx.notify(); + } + + fn active_toolchain( + workspace: WeakView, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: AsyncWindowContext, + ) -> Task> { + cx.spawn(move |mut cx| async move { + let workspace_id = workspace + .update(&mut cx, |this, _| this.database_id()) + .ok() + .flatten()?; + let selected_toolchain = workspace + .update(&mut cx, |this, cx| { + this.project() + .read(cx) + .active_toolchain(worktree_id, language_name.clone(), cx) + }) + .ok()? + .await; + if let Some(toolchain) = selected_toolchain { + Some(toolchain) + } else { + let project = workspace + .update(&mut cx, |this, _| this.project().clone()) + .ok()?; + let toolchains = cx + .update(|cx| { + project + .read(cx) + .available_toolchains(worktree_id, language_name, cx) + }) + .ok()? + .await?; + if let Some(toolchain) = toolchains.toolchains.first() { + // Since we don't have a selected toolchain, pick one for user here. + workspace::WORKSPACE_DB + .set_toolchain(workspace_id, worktree_id, toolchain.clone()) + .await + .ok()?; + project + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain.clone(), cx) + }) + .ok()? + .await; + } + + toolchains.toolchains.first().cloned() + } + }) + } +} + +impl Render for ActiveToolchain { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().when_some(self.active_toolchain.as_ref(), |el, active_toolchain| { + el.child( + Button::new("change-toolchain", active_toolchain.name.clone()) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, cx| { + if let Some(workspace) = this.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + ToolchainSelector::toggle(workspace, cx) + }); + } + })) + .tooltip(|cx| Tooltip::text("Select Toolchain", cx)), + ) + }) + } +} + +impl StatusItemView for ActiveToolchain { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + cx: &mut ViewContext, + ) { + if let Some(editor) = active_pane_item.and_then(|item| item.act_as::(cx)) { + self.active_toolchain.take(); + self.update_lister(editor, cx); + } + cx.notify(); + } +} diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs new file mode 100644 index 00000000000000..8a3368f81675d4 --- /dev/null +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -0,0 +1,343 @@ +mod active_toolchain; + +pub use active_toolchain::ActiveToolchain; +use editor::Editor; +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{ + actions, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model, + ParentElement, Render, Styled, Task, View, ViewContext, VisualContext, WeakView, +}; +use language::{LanguageName, Toolchain, ToolchainList}; +use picker::{Picker, PickerDelegate}; +use project::{Project, WorktreeId}; +use std::{path::Path, sync::Arc}; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; +use util::ResultExt; +use workspace::{ModalView, Workspace}; + +actions!(toolchain, [Select]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(ToolchainSelector::register).detach(); +} + +pub struct ToolchainSelector { + picker: View>, +} + +impl ToolchainSelector { + fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(move |workspace, _: &Select, cx| { + Self::toggle(workspace, cx); + }); + } + + fn toggle(workspace: &mut Workspace, cx: &mut ViewContext) -> Option<()> { + let (_, buffer, _) = workspace + .active_item(cx)? + .act_as::(cx)? + .read(cx) + .active_excerpt(cx)?; + let project = workspace.project().clone(); + + let language_name = buffer.read(cx).language()?.name(); + let worktree_id = buffer.read(cx).file()?.worktree_id(cx); + let worktree_root_path = project + .read(cx) + .worktree_for_id(worktree_id, cx)? + .read(cx) + .abs_path(); + let workspace_id = workspace.database_id()?; + let weak = workspace.weak_handle(); + cx.spawn(move |workspace, mut cx| async move { + let active_toolchain = workspace::WORKSPACE_DB + .toolchain(workspace_id, worktree_id, language_name.clone()) + .await + .ok() + .flatten(); + workspace + .update(&mut cx, |this, cx| { + this.toggle_modal(cx, move |cx| { + ToolchainSelector::new( + weak, + project, + active_toolchain, + worktree_id, + worktree_root_path, + language_name, + cx, + ) + }); + }) + .ok(); + }) + .detach(); + + Some(()) + } + + fn new( + workspace: WeakView, + project: Model, + active_toolchain: Option, + worktree_id: WorktreeId, + worktree_root: Arc, + language_name: LanguageName, + cx: &mut ViewContext, + ) -> Self { + let view = cx.view().downgrade(); + let picker = cx.new_view(|cx| { + let delegate = ToolchainSelectorDelegate::new( + active_toolchain, + view, + workspace, + worktree_id, + worktree_root, + project, + language_name, + cx, + ); + Picker::uniform_list(delegate, cx) + }); + Self { picker } + } +} + +impl Render for ToolchainSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +impl FocusableView for ToolchainSelector { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl EventEmitter for ToolchainSelector {} +impl ModalView for ToolchainSelector {} + +pub struct ToolchainSelectorDelegate { + toolchain_selector: WeakView, + candidates: ToolchainList, + matches: Vec, + selected_index: usize, + workspace: WeakView, + worktree_id: WorktreeId, + worktree_abs_path_root: Arc, + _fetch_candidates_task: Task>, +} + +impl ToolchainSelectorDelegate { + #[allow(clippy::too_many_arguments)] + fn new( + active_toolchain: Option, + language_selector: WeakView, + workspace: WeakView, + worktree_id: WorktreeId, + worktree_abs_path_root: Arc, + project: Model, + language_name: LanguageName, + cx: &mut ViewContext>, + ) -> Self { + let _fetch_candidates_task = cx.spawn({ + let project = project.clone(); + move |this, mut cx| async move { + let available_toolchains = project + .update(&mut cx, |this, cx| { + this.available_toolchains(worktree_id, language_name, cx) + }) + .ok()? + .await?; + + let _ = this.update(&mut cx, move |this, cx| { + this.delegate.candidates = available_toolchains; + if let Some(active_toolchain) = active_toolchain { + if let Some(position) = this + .delegate + .candidates + .toolchains + .iter() + .position(|toolchain| *toolchain == active_toolchain) + { + this.delegate.set_selected_index(position, cx); + } + } + this.update_matches(this.query(cx), cx); + }); + + Some(()) + } + }); + + Self { + toolchain_selector: language_selector, + candidates: Default::default(), + matches: vec![], + selected_index: 0, + workspace, + worktree_id, + worktree_abs_path_root, + _fetch_candidates_task, + } + } + fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString { + Path::new(&path.as_ref()) + .strip_prefix(&worktree_root) + .ok() + .map(|suffix| Path::new(".").join(suffix)) + .and_then(|path| path.to_str().map(String::from).map(SharedString::from)) + .unwrap_or(path) + } +} + +impl PickerDelegate for ToolchainSelectorDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Select a toolchain...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(string_match) = self.matches.get(self.selected_index) { + let toolchain = self.candidates.toolchains[string_match.candidate_id].clone(); + if let Some(workspace_id) = self + .workspace + .update(cx, |this, _| this.database_id()) + .ok() + .flatten() + { + let workspace = self.workspace.clone(); + let worktree_id = self.worktree_id; + cx.spawn(|_, mut cx| async move { + workspace::WORKSPACE_DB + .set_toolchain(workspace_id, worktree_id, toolchain.clone()) + .await + .log_err(); + workspace + .update(&mut cx, |this, cx| { + this.project().update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }) + }) + .ok()? + .await; + Some(()) + }) + .detach(); + } + } + self.dismissed(cx); + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.toolchain_selector + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let background = cx.background_executor().clone(); + let candidates = self.candidates.clone(); + let worktree_root_path = self.worktree_abs_path_root.clone(); + cx.spawn(|this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .toolchains + .into_iter() + .enumerate() + .map(|(index, candidate)| { + let path = Self::relativize_path(candidate.path, &worktree_root_path); + let string = format!("{}{}", candidate.name, path); + StringMatch { + candidate_id: index, + string, + positions: Vec::new(), + score: 0.0, + } + }) + .collect() + } else { + let candidates = candidates + .toolchains + .into_iter() + .enumerate() + .map(|(candidate_id, toolchain)| { + let path = Self::relativize_path(toolchain.path, &worktree_root_path); + let string = format!("{}{}", toolchain.name, path); + StringMatchCandidate::new(candidate_id, string) + }) + .collect::>(); + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background, + ) + .await + }; + + this.update(&mut cx, |this, cx| { + let delegate = &mut this.delegate; + delegate.matches = matches; + delegate.selected_index = delegate + .selected_index + .min(delegate.matches.len().saturating_sub(1)); + cx.notify(); + }) + .log_err(); + }) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut ViewContext>, + ) -> Option { + let mat = &self.matches[ix]; + let toolchain = &self.candidates.toolchains[mat.candidate_id]; + + let label = toolchain.name.clone(); + let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root); + let (name_highlights, mut path_highlights) = mat + .positions + .iter() + .cloned() + .partition::, _>(|index| *index < label.len()); + path_highlights.iter_mut().for_each(|index| { + *index -= label.len(); + }); + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child(HighlightedLabel::new(label, name_highlights)) + .child( + HighlightedLabel::new(path, path_highlights) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + } +} diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index 92884b0182e660..702dd6a09236e2 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -348,7 +348,7 @@ impl Render for ContextMenu { .min_w(px(200.)) .max_h(vh(0.75, cx)) .overflow_y_scroll() - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .on_mouse_down_out(cx.listener(|this, _, cx| this.cancel(&menu::Cancel, cx))) .key_context("menu") .on_action(cx.listener(ContextMenu::select_first)) diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index aa8936d723ba9e..cae2eb0ed23e0d 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -223,6 +223,7 @@ pub enum IconName { LineHeight, Link, ListTree, + ListX, MagnifyingGlass, MailOpen, Maximize, @@ -302,6 +303,12 @@ pub enum IconName { ZedXCopilot, } +impl From for Icon { + fn from(icon: IconName) -> Self { + Icon::new(icon) + } +} + #[derive(IntoElement)] pub struct Icon { path: SharedString, diff --git a/crates/ui/src/components/indent_guides.rs b/crates/ui/src/components/indent_guides.rs index e45404429ce4f9..caab92053c58b8 100644 --- a/crates/ui/src/components/indent_guides.rs +++ b/crates/ui/src/components/indent_guides.rs @@ -140,13 +140,18 @@ mod uniform_list { visible_range: Range, bounds: Bounds, item_height: Pixels, + item_count: usize, cx: &mut WindowContext, ) -> AnyElement { let mut visible_range = visible_range.clone(); - visible_range.end += 1; + let includes_trailing_indent = visible_range.end < item_count; + // Check if we have entries after the visible range, + // if so extend the visible range so we can fetch a trailing indent, + // which is needed to compute indent guides correctly. + if includes_trailing_indent { + visible_range.end += 1; + } let visible_entries = &(self.compute_indents_fn)(visible_range.clone(), cx); - // Check if we have an additional indent that is outside of the visible range - let includes_trailing_indent = visible_entries.len() == visible_range.len(); let indent_guides = compute_indent_guides( &visible_entries, visible_range.start, @@ -198,8 +203,12 @@ mod uniform_list { on_hovered_indent_guide_click: Option>, } - struct IndentGuidesElementPrepaintState { - hitboxes: SmallVec<[Hitbox; 12]>, + enum IndentGuidesElementPrepaintState { + Static, + Interactive { + hitboxes: Rc>, + on_hovered_indent_guide_click: Rc, + }, } impl Element for IndentGuidesElement { @@ -225,11 +234,21 @@ mod uniform_list { _request_layout: &mut Self::RequestLayoutState, cx: &mut WindowContext, ) -> Self::PrepaintState { - let mut hitboxes = SmallVec::new(); - for guide in self.indent_guides.as_ref().iter() { - hitboxes.push(cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)); + if let Some(on_hovered_indent_guide_click) = self.on_hovered_indent_guide_click.clone() + { + let hitboxes = self + .indent_guides + .as_ref() + .iter() + .map(|guide| cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)) + .collect(); + Self::PrepaintState::Interactive { + hitboxes: Rc::new(hitboxes), + on_hovered_indent_guide_click, + } + } else { + Self::PrepaintState::Static } - Self::PrepaintState { hitboxes } } fn paint( @@ -240,81 +259,96 @@ mod uniform_list { prepaint: &mut Self::PrepaintState, cx: &mut WindowContext, ) { - let callback = self.on_hovered_indent_guide_click.clone(); - if let Some(callback) = callback { - cx.on_mouse_event({ - let hitboxes = prepaint.hitboxes.clone(); - let indent_guides = self.indent_guides.clone(); - move |event: &MouseDownEvent, phase, cx| { - if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { - let mut active_hitbox_ix = None; - for (i, hitbox) in hitboxes.iter().enumerate() { - if hitbox.is_hovered(cx) { - active_hitbox_ix = Some(i); - break; + match prepaint { + IndentGuidesElementPrepaintState::Static => { + for indent_guide in self.indent_guides.as_ref() { + let fill_color = if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); + } + } + IndentGuidesElementPrepaintState::Interactive { + hitboxes, + on_hovered_indent_guide_click, + } => { + cx.on_mouse_event({ + let hitboxes = hitboxes.clone(); + let indent_guides = self.indent_guides.clone(); + let on_hovered_indent_guide_click = on_hovered_indent_guide_click.clone(); + move |event: &MouseDownEvent, phase, cx| { + if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { + let mut active_hitbox_ix = None; + for (i, hitbox) in hitboxes.iter().enumerate() { + if hitbox.is_hovered(cx) { + active_hitbox_ix = Some(i); + break; + } } - } - let Some(active_hitbox_ix) = active_hitbox_ix else { - return; - }; + let Some(active_hitbox_ix) = active_hitbox_ix else { + return; + }; - let active_indent_guide = &indent_guides[active_hitbox_ix].layout; - callback(active_indent_guide, cx); + let active_indent_guide = &indent_guides[active_hitbox_ix].layout; + on_hovered_indent_guide_click(active_indent_guide, cx); - cx.stop_propagation(); - cx.prevent_default(); + cx.stop_propagation(); + cx.prevent_default(); + } } - } - }); - } - - let mut hovered_hitbox_id = None; - for (i, hitbox) in prepaint.hitboxes.iter().enumerate() { - cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); - let indent_guide = &self.indent_guides[i]; - let fill_color = if hitbox.is_hovered(cx) { - hovered_hitbox_id = Some(hitbox.id); - self.colors.hover - } else if indent_guide.is_active { - self.colors.active - } else { - self.colors.default - }; - - cx.paint_quad(fill(indent_guide.bounds, fill_color)); - } - - cx.on_mouse_event({ - let prev_hovered_hitbox_id = hovered_hitbox_id; - let hitboxes = prepaint.hitboxes.clone(); - move |_: &MouseMoveEvent, phase, cx| { + }); let mut hovered_hitbox_id = None; - for hitbox in &hitboxes { - if hitbox.is_hovered(cx) { + for (i, hitbox) in hitboxes.iter().enumerate() { + cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); + let indent_guide = &self.indent_guides[i]; + let fill_color = if hitbox.is_hovered(cx) { hovered_hitbox_id = Some(hitbox.id); - break; - } + self.colors.hover + } else if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); } - if phase == DispatchPhase::Capture { - // If the hovered hitbox has changed, we need to re-paint the indent guides. - match (prev_hovered_hitbox_id, hovered_hitbox_id) { - (Some(prev_id), Some(id)) => { - if prev_id != id { - cx.refresh(); + + cx.on_mouse_event({ + let prev_hovered_hitbox_id = hovered_hitbox_id; + let hitboxes = hitboxes.clone(); + move |_: &MouseMoveEvent, phase, cx| { + let mut hovered_hitbox_id = None; + for hitbox in hitboxes.as_ref() { + if hitbox.is_hovered(cx) { + hovered_hitbox_id = Some(hitbox.id); + break; } } - (None, Some(_)) => { - cx.refresh(); - } - (Some(_), None) => { - cx.refresh(); + if phase == DispatchPhase::Capture { + // If the hovered hitbox has changed, we need to re-paint the indent guides. + match (prev_hovered_hitbox_id, hovered_hitbox_id) { + (Some(prev_id), Some(id)) => { + if prev_id != id { + cx.refresh(); + } + } + (None, Some(_)) => { + cx.refresh(); + } + (Some(_), None) => { + cx.refresh(); + } + (None, None) => {} + } } - (None, None) => {} } - } + }); } - }); + } } } diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index cd45a11d9fd4d4..c1381e6fdfe9a1 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -184,7 +184,7 @@ pub struct KeyIcon { impl RenderOnce for KeyIcon { fn render(self, _cx: &mut WindowContext) -> impl IntoElement { Icon::new(self.icon) - .size(IconSize::Small) + .size(IconSize::XSmall) .color(Color::Muted) } } diff --git a/crates/util/src/arc_cow.rs b/crates/util/src/arc_cow.rs index 02ad1fa1f0a171..06a2fa9cd03cc2 100644 --- a/crates/util/src/arc_cow.rs +++ b/crates/util/src/arc_cow.rs @@ -75,6 +75,12 @@ impl From for ArcCow<'_, str> { } } +impl From<&String> for ArcCow<'_, str> { + fn from(value: &String) -> Self { + Self::Owned(value.clone().into()) + } +} + impl<'a> From> for ArcCow<'a, str> { fn from(value: Cow<'a, str>) -> Self { match value { diff --git a/crates/vcs_menu/Cargo.toml b/crates/vcs_menu/Cargo.toml index 75dcad83dff317..11de371868953d 100644 --- a/crates/vcs_menu/Cargo.toml +++ b/crates/vcs_menu/Cargo.toml @@ -14,6 +14,7 @@ fuzzy.workspace = true git.workspace = true gpui.workspace = true picker.workspace = true +project.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/vcs_menu/src/lib.rs b/crates/vcs_menu/src/lib.rs index 720a427ae90efe..8f73153dd88f5a 100644 --- a/crates/vcs_menu/src/lib.rs +++ b/crates/vcs_menu/src/lib.rs @@ -2,24 +2,23 @@ use anyhow::{Context, Result}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::repository::Branch; use gpui::{ - actions, rems, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, - InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription, - Task, View, ViewContext, VisualContext, WindowContext, + actions, rems, AnyElement, AppContext, AsyncAppContext, DismissEvent, EventEmitter, + FocusHandle, FocusableView, InteractiveElement, IntoElement, ParentElement, Render, + SharedString, Styled, Subscription, Task, View, ViewContext, VisualContext, WindowContext, }; use picker::{Picker, PickerDelegate}; +use project::ProjectPath; use std::{ops::Not, sync::Arc}; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::ResultExt; -use workspace::notifications::NotificationId; -use workspace::{ModalView, Toast, Workspace}; +use workspace::notifications::DetachAndPromptErr; +use workspace::{ModalView, Workspace}; actions!(branches, [OpenRecent]); pub fn init(cx: &mut AppContext) { cx.observe_new_views(|workspace: &mut Workspace, _| { - workspace.register_action(|workspace, action, cx| { - BranchList::open(workspace, action, cx).log_err(); - }); + workspace.register_action(BranchList::open); }) .detach(); } @@ -31,6 +30,21 @@ pub struct BranchList { } impl BranchList { + pub fn open(_: &mut Workspace, _: &OpenRecent, cx: &mut ViewContext) { + let this = cx.view().clone(); + cx.spawn(|_, mut cx| async move { + // Modal branch picker has a longer trailoff than a popover one. + let delegate = BranchListDelegate::new(this.clone(), 70, &cx).await?; + + this.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx)) + })?; + + Ok(()) + }) + .detach_and_prompt_err("Failed to read branches", cx, |_, _| None) + } + fn new(delegate: BranchListDelegate, rem_width: f32, cx: &mut ViewContext) -> Self { let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent)); @@ -40,17 +54,6 @@ impl BranchList { _subscription, } } - pub fn open( - workspace: &mut Workspace, - _: &OpenRecent, - cx: &mut ViewContext, - ) -> Result<()> { - // Modal branch picker has a longer trailoff than a popover one. - let delegate = BranchListDelegate::new(workspace, cx.view().clone(), 70, cx)?; - workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx)); - - Ok(()) - } } impl ModalView for BranchList {} impl EventEmitter for BranchList {} @@ -100,36 +103,32 @@ pub struct BranchListDelegate { } impl BranchListDelegate { - fn new( - workspace: &Workspace, - handle: View, + async fn new( + workspace: View, branch_name_trailoff_after: usize, - cx: &AppContext, + cx: &AsyncAppContext, ) -> Result { - let project = workspace.project().read(cx); - let repo = project - .get_first_worktree_root_repo(cx) - .context("failed to get root repository for first worktree")?; + let all_branches_request = cx.update(|cx| { + let project = workspace.read(cx).project().read(cx); + let first_worktree = project + .visible_worktrees(cx) + .next() + .context("No worktrees found")?; + let project_path = ProjectPath::root_path(first_worktree.read(cx).id()); + anyhow::Ok(project.branches(project_path, cx)) + })??; + + let all_branches = all_branches_request.await?; - let all_branches = repo.branches()?; Ok(Self { matches: vec![], - workspace: handle, + workspace, all_branches, selected_index: 0, last_query: Default::default(), branch_name_trailoff_after, }) } - - fn display_error_toast(&self, message: String, cx: &mut WindowContext<'_>) { - self.workspace.update(cx, |model, ctx| { - struct GitCheckoutFailure; - let id = NotificationId::unique::(); - - model.show_toast(Toast::new(id, message), ctx) - }); - } } impl PickerDelegate for BranchListDelegate { @@ -235,40 +234,32 @@ impl PickerDelegate for BranchListDelegate { cx.spawn({ let branch = branch.clone(); |picker, mut cx| async move { - picker - .update(&mut cx, |this, cx| { - let project = this.delegate.workspace.read(cx).project().read(cx); - let repo = project - .get_first_worktree_root_repo(cx) - .context("failed to get root repository for first worktree")?; - - let branch_to_checkout = match branch { - BranchEntry::Branch(branch) => branch.string, - BranchEntry::NewBranch { name: branch_name } => { - let status = repo.create_branch(&branch_name); - if status.is_err() { - this.delegate.display_error_toast(format!("Failed to create branch '{branch_name}', check for conflicts or unstashed files"), cx); - status?; - } - - branch_name - } - }; - - let status = repo.change_branch(&branch_to_checkout); - if status.is_err() { - this.delegate.display_error_toast(format!("Failed to checkout branch '{branch_to_checkout}', check for conflicts or unstashed files"), cx); - status?; - } + let branch_change_task = picker.update(&mut cx, |this, cx| { + let project = this.delegate.workspace.read(cx).project().read(cx); - cx.emit(DismissEvent); + let branch_to_checkout = match branch { + BranchEntry::Branch(branch) => branch.string, + BranchEntry::NewBranch { name: branch_name } => branch_name, + }; + let worktree = project + .visible_worktrees(cx) + .next() + .context("worktree disappeared")?; + let repository = ProjectPath::root_path(worktree.read(cx).id()); - Ok::<(), anyhow::Error>(()) - }) - .log_err(); + anyhow::Ok(project.update_or_create_branch(repository, branch_to_checkout, cx)) + })??; + + branch_change_task.await?; + + picker.update(&mut cx, |_, cx| { + cx.emit(DismissEvent); + + Ok::<(), anyhow::Error>(()) + }) } }) - .detach(); + .detach_and_prompt_err("Failed to change branch", cx, |_, _| None); } fn dismissed(&mut self, cx: &mut ViewContext>) { diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index b61cb405e1104e..f9dfcdd2c3a8f1 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -281,7 +281,7 @@ impl VimGlobals { &mut self, register: Option, editor: Option<&mut Editor>, - cx: &ViewContext, + cx: &mut ViewContext, ) -> Option { let Some(register) = register.filter(|reg| *reg != '"') else { let setting = VimSettings::get_global(cx).use_system_clipboard; diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 86a52aca255e53..6ec708d8b81048 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -620,9 +620,11 @@ impl Vim { let Some(editor) = self.editor() else { return; }; + let newest_selection_empty = editor.update(cx, |editor, cx| { + editor.selections.newest::(cx).is_empty() + }); let editor = editor.read(cx); let editor_mode = editor.mode(); - let newest_selection_empty = editor.selections.newest::(cx).is_empty(); if editor_mode == EditorMode::Full && !newest_selection_empty @@ -717,11 +719,12 @@ impl Vim { globals.recorded_count = None; let selections = self.editor().map(|editor| { - let editor = editor.read(cx); - ( - editor.selections.oldest::(cx), - editor.selections.newest::(cx), - ) + editor.update(cx, |editor, cx| { + ( + editor.selections.oldest::(cx), + editor.selections.newest::(cx), + ) + }) }); if let Some((oldest, newest)) = selections { diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index 1be2567c0af003..0be48bd82ef952 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -72,7 +72,7 @@ impl Render for WelcomePage { h_flex() .size_full() .bg(cx.theme().colors().editor_background) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .child( v_flex() .w_80() diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 28c462fbfc19a8..2317d02a5ac8b7 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -658,7 +658,7 @@ impl Render for Dock { div() .key_context(dispatch_context) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .flex() .bg(cx.theme().colors().panel_background) .border_color(cx.theme().colors().border) @@ -689,7 +689,7 @@ impl Render for Dock { } else { div() .key_context(dispatch_context) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) } } } @@ -826,8 +826,8 @@ pub mod test { } impl Render for TestPanel { - fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - div().id("test").track_focus(&self.focus_handle) + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().id("test").track_focus(&self.focus_handle(cx)) } } diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 9dc0b957f86de9..2f1c900ecf9c42 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -1173,8 +1173,8 @@ pub mod test { } impl Render for TestItem { - fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { - gpui::div().track_focus(&self.focus_handle) + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + gpui::div().track_focus(&self.focus_handle(cx)) } } diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 92038acd0cf3db..198d5c41a5e1c5 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2581,7 +2581,7 @@ impl Render for Pane { v_flex() .key_context(key_context) - .track_focus(&self.focus_handle) + .track_focus(&self.focus_handle(cx)) .size_full() .flex_none() .overflow_hidden() @@ -2703,6 +2703,7 @@ impl Render for Pane { .flex_1() .relative() .group("") + .overflow_hidden() .on_drag_move::(cx.listener(Self::handle_drag_move)) .on_drag_move::(cx.listener(Self::handle_drag_move)) .when(is_local, |div| { @@ -2711,6 +2712,8 @@ impl Render for Pane { .map(|div| { if let Some(item) = self.active_item() { div.v_flex() + .size_full() + .overflow_hidden() .child(self.toolbar.clone()) .child(item.to_any()) } else { diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index ca6abe82f2d77d..6d3daf90d062ee 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -12,6 +12,8 @@ use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros:: use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId}; use project::dap_store::{BreakpointKind, SerializedBreakpoint}; +use language::{LanguageName, Toolchain}; +use project::WorktreeId; use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -284,7 +286,8 @@ define_connection! { // log_message: String, // log message for log breakpoints, otherwise it's Null // ) pub static ref DB: WorkspaceDb<()> = - &[sql!( + &[ + sql!( CREATE TABLE workspaces( workspace_id INTEGER PRIMARY KEY, workspace_location BLOB UNIQUE, @@ -447,7 +450,18 @@ define_connection! { sql!( ALTER TABLE ssh_projects RENAME COLUMN path TO paths; ), - sql!(CREATE TABLE breakpoints ( + sql!( + CREATE TABLE toolchains ( + workspace_id INTEGER, + worktree_id INTEGER, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + PRIMARY KEY (workspace_id, worktree_id, language_name) + ); + ), + sql!( + CREATE TABLE breakpoints ( workspace_id INTEGER NOT NULL, worktree_path BLOB NOT NULL, relative_path BLOB NOT NULL, @@ -457,7 +471,7 @@ define_connection! { FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) ON DELETE CASCADE ON UPDATE CASCADE - ) STRICT; + ); ), ]; } @@ -690,6 +704,7 @@ impl WorkspaceDb { match workspace.location { SerializedWorkspaceLocation::Local(local_paths, local_paths_order) => { conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE local_paths = ? AND workspace_id != ? ))?((&local_paths, workspace.id)) .context("clearing out old locations")?; @@ -738,6 +753,7 @@ impl WorkspaceDb { } SerializedWorkspaceLocation::Ssh(ssh_project) => { conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? ))?((ssh_project.id.0, workspace.id)) .context("clearing out old locations")?; @@ -930,6 +946,7 @@ impl WorkspaceDb { query! { pub async fn delete_workspace_by_id(id: WorkspaceId) -> Result<()> { + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE workspace_id IS ? } @@ -944,6 +961,7 @@ impl WorkspaceDb { DELETE FROM dev_server_projects WHERE id = ? ))?(id.0)?; conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE dev_server_project_id IS ? ))?(id.0) @@ -1246,6 +1264,83 @@ impl WorkspaceDb { WHERE workspace_id = ?1 } } + + pub async fn toolchain( + &self, + workspace_id: WorkspaceId, + worktree_id: WorktreeId, + language_name: LanguageName, + ) -> Result> { + self.write(move |this| { + let mut select = this + .select_bound(sql!( + SELECT name, path FROM toolchains WHERE workspace_id = ? AND language_name = ? AND worktree_id = ? + )) + .context("Preparing insertion")?; + + let toolchain: Vec<(String, String)> = + select((workspace_id, language_name.0.to_owned(), worktree_id.to_usize()))?; + + Ok(toolchain.into_iter().next().map(|(name, path)| Toolchain { + name: name.into(), + path: path.into(), + language_name, + })) + }) + .await + } + + pub(crate) async fn toolchains( + &self, + workspace_id: WorkspaceId, + ) -> Result> { + self.write(move |this| { + let mut select = this + .select_bound(sql!( + SELECT name, path, worktree_id, language_name FROM toolchains WHERE workspace_id = ? + )) + .context("Preparing insertion")?; + + let toolchain: Vec<(String, String, u64, String)> = + select(workspace_id)?; + + Ok(toolchain.into_iter().map(|(name, path, worktree_id, language_name)| (Toolchain { + name: name.into(), + path: path.into(), + language_name: LanguageName::new(&language_name), + }, WorktreeId::from_proto(worktree_id))).collect()) + }) + .await + } + pub async fn set_toolchain( + &self, + workspace_id: WorkspaceId, + worktree_id: WorktreeId, + toolchain: Toolchain, + ) -> Result<()> { + self.write(move |conn| { + let mut insert = conn + .exec_bound(sql!( + INSERT INTO toolchains(workspace_id, worktree_id, language_name, name, path) VALUES (?, ?, ?, ?, ?) + ON CONFLICT DO + UPDATE SET + name = ?4, + path = ?5 + + )) + .context("Preparing insertion")?; + + insert(( + workspace_id, + worktree_id.to_usize(), + toolchain.language_name.0.as_ref(), + toolchain.name.as_ref(), + toolchain.path.as_ref(), + ))?; + + Ok(()) + }).await + } } #[cfg(test)] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index fec739e819a2f6..01f548eaf095e9 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -16,7 +16,7 @@ use anyhow::{anyhow, Context as _, Result}; use call::{call_settings::CallSettings, ActiveCall}; use client::{ proto::{self, ErrorCode, PanelId, PeerId}, - ChannelId, Client, ErrorExt, ProjectId, Status, TypedEnvelope, UserStore, + ChannelId, Client, ErrorExt, Status, TypedEnvelope, UserStore, }; use collections::{hash_map, HashMap, HashSet}; use derive_more::{Deref, DerefMut}; @@ -1160,6 +1160,14 @@ impl Workspace { DB.next_id().await.unwrap_or_else(|_| Default::default()) }; + let toolchains = DB.toolchains(workspace_id).await?; + for (toolchain, worktree_id) in toolchains { + project_handle + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + })? + .await; + } let window = if let Some(window) = requesting_window { cx.update_window(window.into(), |_, cx| { cx.replace_root_view(|cx| { @@ -1217,7 +1225,7 @@ impl Workspace { notify_if_database_failed(window, &mut cx); let opened_items = window .update(&mut cx, |_workspace, cx| { - open_items(serialized_workspace, project_paths, app_state, cx) + open_items(serialized_workspace, project_paths, cx) })? .await .unwrap_or_default(); @@ -2057,8 +2065,10 @@ impl Workspace { cx: &mut ViewContext, ) -> Task>> { match path { - ResolvedPath::ProjectPath(project_path) => self.open_path(project_path, None, true, cx), - ResolvedPath::AbsPath(path) => self.open_abs_path(path, false, cx), + ResolvedPath::ProjectPath { project_path, .. } => { + self.open_path(project_path, None, true, cx) + } + ResolvedPath::AbsPath { path, .. } => self.open_abs_path(path, false, cx), } } @@ -4485,7 +4495,7 @@ impl Workspace { self.modal_layer.read(cx).has_active_modal() } - pub fn active_modal(&mut self, cx: &AppContext) -> Option> { + pub fn active_modal(&self, cx: &AppContext) -> Option> { self.modal_layer.read(cx).active_modal() } @@ -4585,7 +4595,6 @@ fn window_bounds_env_override() -> Option> { fn open_items( serialized_workspace: Option, mut project_paths_to_open: Vec<(PathBuf, Option)>, - app_state: Arc, cx: &mut ViewContext, ) -> impl 'static + Future>>>>> { let restored_items = serialized_workspace.map(|serialized_workspace| { @@ -4641,14 +4650,20 @@ fn open_items( .enumerate() .map(|(ix, (abs_path, project_path))| { let workspace = workspace.clone(); - cx.spawn(|mut cx| { - let fs = app_state.fs.clone(); - async move { - let file_project_path = project_path?; - if fs.is_dir(&abs_path).await { - None - } else { - Some(( + cx.spawn(|mut cx| async move { + let file_project_path = project_path?; + let abs_path_task = workspace.update(&mut cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.resolve_abs_path(abs_path.to_string_lossy().as_ref(), cx) + }) + }); + + // We only want to open file paths here. If one of the items + // here is a directory, it was already opened further above + // with a `find_or_create_worktree`. + if let Ok(task) = abs_path_task { + if task.await.map_or(true, |p| p.is_file()) { + return Some(( ix, workspace .update(&mut cx, |workspace, cx| { @@ -4656,9 +4671,10 @@ fn open_items( }) .log_err()? .await, - )) + )); } } + None }) }); @@ -5499,58 +5515,6 @@ pub fn create_and_open_local_file( }) } -pub fn join_hosted_project( - hosted_project_id: ProjectId, - app_state: Arc, - cx: &mut AppContext, -) -> Task> { - cx.spawn(|mut cx| async move { - let existing_window = cx.update(|cx| { - cx.windows().into_iter().find_map(|window| { - let workspace = window.downcast::()?; - workspace - .read(cx) - .is_ok_and(|workspace| { - workspace.project().read(cx).hosted_project_id() == Some(hosted_project_id) - }) - .then_some(workspace) - }) - })?; - - let workspace = if let Some(existing_window) = existing_window { - existing_window - } else { - let project = Project::hosted( - hosted_project_id, - app_state.user_store.clone(), - app_state.client.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx.clone(), - ) - .await?; - - let window_bounds_override = window_bounds_env_override(); - cx.update(|cx| { - let mut options = (app_state.build_window_options)(None, cx); - options.window_bounds = window_bounds_override.map(WindowBounds::Windowed); - cx.open_window(options, |cx| { - cx.new_view(|cx| { - Workspace::new(Default::default(), project, app_state.clone(), cx) - }) - }) - })?? - }; - - workspace.update(&mut cx, |_, cx| { - cx.activate(true); - cx.activate_window(); - })?; - - Ok(()) - }) -} - pub fn open_ssh_project( window: WindowHandle, connection_options: SshConnectionOptions, @@ -5604,6 +5568,14 @@ pub fn open_ssh_project( ) })?; + let toolchains = DB.toolchains(workspace_id).await?; + for (toolchain, worktree_id) in toolchains { + project + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + })? + .await; + } let mut project_paths_to_open = vec![]; let mut project_path_errors = vec![]; @@ -5646,7 +5618,7 @@ pub fn open_ssh_project( .update(&mut cx, |_, cx| { cx.activate_window(); - open_items(serialized_workspace, project_paths_to_open, app_state, cx) + open_items(serialized_workspace, project_paths_to_open, cx) })? .await?; @@ -5773,7 +5745,7 @@ pub fn join_in_room_project( .read(cx) .collaborators() .values() - .find(|collaborator| collaborator.replica_id == 0)?; + .find(|collaborator| collaborator.is_host)?; Some(collaborator.peer_id) }); diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 9437358e1a4def..da3676f15c768b 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -29,6 +29,7 @@ fs.workspace = true futures.workspace = true fuzzy.workspace = true git.workspace = true +git_hosting_providers.workspace = true gpui.workspace = true ignore.workspace = true language.workspace = true diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 722a7b3f0abfd4..8114f2dd7beba4 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -19,6 +19,7 @@ use futures::{ FutureExt as _, Stream, StreamExt, }; use fuzzy::CharBag; +use git::GitHostingProviderRegistry; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, status::GitStatus, @@ -299,6 +300,7 @@ struct BackgroundScannerState { removed_entries: HashMap, changed_paths: Vec>, prev_snapshot: Snapshot, + git_hosting_provider_registry: Option>, } #[derive(Debug, Clone)] @@ -1004,6 +1006,7 @@ impl LocalWorktree { let share_private_files = self.share_private_files; let next_entry_id = self.next_entry_id.clone(); let fs = self.fs.clone(); + let git_hosting_provider_registry = GitHostingProviderRegistry::try_global(cx); let settings = self.settings.clone(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_executor().spawn({ @@ -1039,6 +1042,7 @@ impl LocalWorktree { paths_to_scan: Default::default(), removed_entries: Default::default(), changed_paths: Default::default(), + git_hosting_provider_registry, }), phase: BackgroundScannerPhase::InitialScan, share_private_files, @@ -2385,6 +2389,12 @@ impl Snapshot { .map(|entry| entry.to_owned()) } + pub fn git_entry(&self, work_directory_path: Arc) -> Option { + self.repository_entries + .get(&RepositoryWorkDirectory(work_directory_path)) + .map(|entry| entry.to_owned()) + } + pub fn git_entries(&self) -> impl Iterator { self.repository_entries.values() } @@ -2942,6 +2952,13 @@ impl BackgroundScannerState { log::trace!("constructed libgit2 repo in {:?}", t0.elapsed()); let work_directory = RepositoryWorkDirectory(work_dir_path.clone()); + if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() { + git_hosting_providers::register_additional_providers( + git_hosting_provider_registry, + repository.clone(), + ); + } + self.snapshot.repository_entries.insert( work_directory.clone(), RepositoryEntry { diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e1a2cf422ed159..3d878feb2197c6 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.160.0" +version = "0.161.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -79,6 +79,7 @@ profiling.workspace = true project.workspace = true project_panel.workspace = true project_symbols.workspace = true +proto.workspace = true quick_action_bar.workspace = true recent_projects.workspace = true release_channel.workspace = true @@ -106,6 +107,7 @@ terminal_view.workspace = true theme.workspace = true theme_selector.workspace = true time.workspace = true +toolchain_selector.workspace = true ui.workspace = true reqwest_client.workspace = true url.workspace = true diff --git a/crates/zed/resources/flatpak/zed.metainfo.xml.in b/crates/zed/resources/flatpak/zed.metainfo.xml.in index 15a50559611965..b8a88d92213df9 100644 --- a/crates/zed/resources/flatpak/zed.metainfo.xml.in +++ b/crates/zed/resources/flatpak/zed.metainfo.xml.in @@ -38,7 +38,7 @@ https://github.com/zed-industries/zed/issues https://zed.dev/faq https://zed.dev/docs/getting-started - https://zed.dev/docs/feedback-and-support + https://zed.dev/community-links https://github.com/zed-industries/zed https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 52c1aaab31f827..2139ab3d450af9 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -32,7 +32,7 @@ use node_runtime::{NodeBinaryOptions, NodeRuntime}; use parking_lot::Mutex; use project::project_settings::ProjectSettings; use recent_projects::{open_ssh_project, SshSettings}; -use release_channel::{AppCommitSha, AppVersion}; +use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use session::{AppSession, Session}; use settings::{ handle_settings_file_changes, watch_config_file, InvalidSettingsError, Settings, SettingsStore, @@ -164,32 +164,29 @@ fn main() { let (open_listener, mut open_rx) = OpenListener::new(); - #[cfg(target_os = "linux")] - { - if env::var("ZED_STATELESS").is_err() { - if crate::zed::listen_for_cli_connections(open_listener.clone()).is_err() { - println!("zed is already running"); - return; + let failed_single_instance_check = + if *db::ZED_STATELESS || *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { + false + } else { + #[cfg(target_os = "linux")] + { + crate::zed::listen_for_cli_connections(open_listener.clone()).is_err() } - } - } - #[cfg(target_os = "windows")] - { - use zed::windows_only_instance::*; - if !check_single_instance() { - println!("zed is already running"); - return; - } - } + #[cfg(target_os = "windows")] + { + !crate::zed::windows_only_instance::check_single_instance() + } - #[cfg(target_os = "macos")] - { - use zed::mac_only_instance::*; - if ensure_only_instance() != IsOnlyInstance::Yes { - println!("zed is already running"); - return; - } + #[cfg(target_os = "macos")] + { + use zed::mac_only_instance::*; + ensure_only_instance() != IsOnlyInstance::Yes + } + }; + if failed_single_instance_check { + println!("zed is already running"); + return; } let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); @@ -334,7 +331,7 @@ fn main() { telemetry.start( system_id.as_ref().map(|id| id.to_string()), installation_id.as_ref().map(|id| id.to_string()), - session_id, + session_id.clone(), cx, ); @@ -370,7 +367,9 @@ fn main() { auto_update::init(client.http_client(), cx); reliability::init( client.http_client(), + system_id.as_ref().map(|id| id.to_string()), installation_id.clone().map(|id| id.to_string()), + session_id.clone(), cx, ); @@ -423,6 +422,7 @@ fn main() { app_state.languages.set_theme(cx.theme().clone()); editor::init(cx); image_viewer::init(cx); + repl::notebook::init(cx); diagnostics::init(cx); audio::init(Assets, cx); @@ -443,6 +443,7 @@ fn main() { terminal_view::init(cx); journal::init(app_state.clone(), cx); language_selector::init(cx); + toolchain_selector::init(cx); theme_selector::init(cx); language_tools::init(cx); call::init(app_state.client.clone(), app_state.user_store.clone(), cx); diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 9d76a6c47f78a2..b02afb8c0df484 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -1,13 +1,14 @@ use anyhow::{Context, Result}; use backtrace::{self, Backtrace}; use chrono::Utc; -use client::telemetry; +use client::{telemetry, TelemetrySettings}; use db::kvp::KEY_VALUE_STORE; use gpui::{AppContext, SemanticVersion}; use http_client::{HttpRequestExt, Method}; use http_client::{self, HttpClient, HttpClientWithUrl}; use paths::{crashes_dir, crashes_retired_dir}; +use project::Project; use release_channel::ReleaseChannel; use release_channel::RELEASE_CHANNEL; use settings::Settings; @@ -21,6 +22,7 @@ use std::{io::Write, panic, sync::atomic::AtomicU32, thread}; use telemetry_events::LocationData; use telemetry_events::Panic; use telemetry_events::PanicRequest; +use url::Url; use util::ResultExt; use crate::stdout_is_a_pty; @@ -133,13 +135,73 @@ pub fn init_panic_hook( pub fn init( http_client: Arc, + system_id: Option, installation_id: Option, + session_id: String, cx: &mut AppContext, ) { #[cfg(target_os = "macos")] monitor_main_thread_hangs(http_client.clone(), installation_id.clone(), cx); - upload_panics_and_crashes(http_client, installation_id, cx) + let Some(panic_report_url) = http_client + .build_zed_api_url("/telemetry/panics", &[]) + .log_err() + else { + return; + }; + + upload_panics_and_crashes( + http_client.clone(), + panic_report_url.clone(), + installation_id.clone(), + cx, + ); + + cx.observe_new_models(move |project: &mut Project, cx| { + let http_client = http_client.clone(); + let panic_report_url = panic_report_url.clone(); + let session_id = session_id.clone(); + let installation_id = installation_id.clone(); + let system_id = system_id.clone(); + + if let Some(ssh_client) = project.ssh_client() { + ssh_client.update(cx, |client, cx| { + if TelemetrySettings::get_global(cx).diagnostics { + let request = client.proto_client().request(proto::GetPanicFiles {}); + cx.background_executor() + .spawn(async move { + let panic_files = request.await?; + for file in panic_files.file_contents { + let panic: Option = serde_json::from_str(&file) + .log_err() + .or_else(|| { + file.lines() + .next() + .and_then(|line| serde_json::from_str(line).ok()) + }) + .unwrap_or_else(|| { + log::error!("failed to deserialize panic file {:?}", file); + None + }); + + if let Some(mut panic) = panic { + panic.session_id = session_id.clone(); + panic.system_id = system_id.clone(); + panic.installation_id = installation_id.clone(); + + upload_panic(&http_client, &panic_report_url, panic, &mut None) + .await?; + } + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + }) + } + }) + .detach(); } #[cfg(target_os = "macos")] @@ -346,16 +408,18 @@ pub fn monitor_main_thread_hangs( fn upload_panics_and_crashes( http: Arc, + panic_report_url: Url, installation_id: Option, cx: &AppContext, ) { let telemetry_settings = *client::TelemetrySettings::get_global(cx); cx.background_executor() .spawn(async move { - let most_recent_panic = upload_previous_panics(http.clone(), telemetry_settings) - .await - .log_err() - .flatten(); + let most_recent_panic = + upload_previous_panics(http.clone(), &panic_report_url, telemetry_settings) + .await + .log_err() + .flatten(); upload_previous_crashes(http, most_recent_panic, installation_id, telemetry_settings) .await .log_err() @@ -366,9 +430,9 @@ fn upload_panics_and_crashes( /// Uploads panics via `zed.dev`. async fn upload_previous_panics( http: Arc, + panic_report_url: &Url, telemetry_settings: client::TelemetrySettings, -) -> Result> { - let panic_report_url = http.build_zed_api_url("/telemetry/panics", &[])?; +) -> anyhow::Result> { let mut children = smol::fs::read_dir(paths::logs_dir()).await?; let mut most_recent_panic = None; @@ -396,7 +460,7 @@ async fn upload_previous_panics( .context("error reading panic file")?; let panic: Option = serde_json::from_str(&panic_file_content) - .ok() + .log_err() .or_else(|| { panic_file_content .lines() @@ -409,26 +473,8 @@ async fn upload_previous_panics( }); if let Some(panic) = panic { - most_recent_panic = Some((panic.panicked_on, panic.payload.clone())); - - let json_bytes = serde_json::to_vec(&PanicRequest { panic }).unwrap(); - - let Some(checksum) = client::telemetry::calculate_json_checksum(&json_bytes) else { + if !upload_panic(&http, &panic_report_url, panic, &mut most_recent_panic).await? { continue; - }; - - let Ok(request) = http_client::Request::builder() - .method(Method::POST) - .uri(panic_report_url.as_ref()) - .header("x-zed-checksum", checksum) - .body(json_bytes.into()) - else { - continue; - }; - - let response = http.send(request).await.context("error sending panic")?; - if !response.status().is_success() { - log::error!("Error uploading panic to server: {}", response.status()); } } } @@ -438,9 +484,42 @@ async fn upload_previous_panics( .context("error removing panic") .log_err(); } - Ok::<_, anyhow::Error>(most_recent_panic) + Ok(most_recent_panic) } +async fn upload_panic( + http: &Arc, + panic_report_url: &Url, + panic: telemetry_events::Panic, + most_recent_panic: &mut Option<(i64, String)>, +) -> Result { + *most_recent_panic = Some((panic.panicked_on, panic.payload.clone())); + + let json_bytes = serde_json::to_vec(&PanicRequest { + panic: panic.clone(), + }) + .unwrap(); + + let Some(checksum) = client::telemetry::calculate_json_checksum(&json_bytes) else { + return Ok(false); + }; + + let Ok(request) = http_client::Request::builder() + .method(Method::POST) + .uri(panic_report_url.as_ref()) + .header("x-zed-checksum", checksum) + .body(json_bytes.into()) + else { + return Ok(false); + }; + + let response = http.send(request).await.context("error sending panic")?; + if !response.status().is_success() { + log::error!("Error uploading panic to server: {}", response.status()); + } + + Ok(true) +} const LAST_CRASH_UPLOADED: &str = "LAST_CRASH_UPLOADED"; /// upload crashes from apple's diagnostic reports to our server. diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index b6f5dadccd31cf..ceef72b008015c 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -69,7 +69,6 @@ actions!( Hide, HideOthers, Minimize, - OpenDefaultKeymap, OpenDefaultSettings, OpenProjectSettings, OpenProjectTasks, @@ -209,6 +208,8 @@ pub fn initialize_workspace( activity_indicator::ActivityIndicator::new(workspace, app_state.languages.clone(), cx); let active_buffer_language = cx.new_view(|_| language_selector::ActiveBufferLanguage::new(workspace)); + let active_toolchain_language = + cx.new_view(|cx| toolchain_selector::ActiveToolchain::new(workspace, cx)); let vim_mode_indicator = cx.new_view(vim::ModeIndicator::new); let cursor_position = cx.new_view(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); @@ -217,6 +218,7 @@ pub fn initialize_workspace( status_bar.add_left_item(activity_indicator, cx); status_bar.add_right_item(inline_completion_button, cx); status_bar.add_right_item(active_buffer_language, cx); + status_bar.add_right_item(active_toolchain_language, cx); status_bar.add_right_item(vim_mode_indicator, cx); status_bar.add_right_item(cursor_position, cx); }); @@ -477,7 +479,7 @@ pub fn initialize_workspace( .register_action(open_project_tasks_file) .register_action( move |workspace: &mut Workspace, - _: &OpenDefaultKeymap, + _: &zed_actions::OpenDefaultKeymap, cx: &mut ViewContext| { open_bundled_file( workspace, @@ -3510,6 +3512,7 @@ mod tests { app_state.client.telemetry().clone(), cx, ); + repl::notebook::init(cx); tasks_ui::init(cx); debugger_ui::init(cx); initialize_workspace(app_state.clone(), prompt_builder, cx); diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index 34c19932dd6313..5c01724ba74d79 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -18,7 +18,10 @@ pub fn app_menus() -> Vec { MenuItem::action("Open Settings", super::OpenSettings), MenuItem::action("Open Key Bindings", zed_actions::OpenKeymap), MenuItem::action("Open Default Settings", super::OpenDefaultSettings), - MenuItem::action("Open Default Key Bindings", super::OpenDefaultKeymap), + MenuItem::action( + "Open Default Key Bindings", + zed_actions::OpenDefaultKeymap, + ), MenuItem::action("Open Project Settings", super::OpenProjectSettings), MenuItem::action("Select Theme...", theme_selector::Toggle::default()), ], diff --git a/crates/zed/src/zed/mac_only_instance.rs b/crates/zed/src/zed/mac_only_instance.rs index 2c8f564201be02..716c2224e31924 100644 --- a/crates/zed/src/zed/mac_only_instance.rs +++ b/crates/zed/src/zed/mac_only_instance.rs @@ -87,10 +87,6 @@ pub enum IsOnlyInstance { } pub fn ensure_only_instance() -> IsOnlyInstance { - if *db::ZED_STATELESS || *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { - return IsOnlyInstance::Yes; - } - if check_got_handshake() { return IsOnlyInstance::No; } diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index f1cfc43a6a171c..b37bc78dcebbed 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -47,6 +47,9 @@ impl OpenRequest { this.parse_file_path(file) } else if let Some(file) = url.strip_prefix("zed://file") { this.parse_file_path(file) + } else if let Some(file) = url.strip_prefix("zed://ssh") { + let ssh_url = "ssh:/".to_string() + file; + this.parse_ssh_file_path(&ssh_url, cx)? } else if url.starts_with("ssh://") { this.parse_ssh_file_path(&url, cx)? } else if let Some(request_path) = parse_zed_link(&url, cx) { diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index e8d32e7ed00010..2645650bfae70e 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -17,14 +17,6 @@ fn retrieve_app_instance_event_identifier() -> &'static str { } pub fn check_single_instance() -> bool { - if *db::ZED_STATELESS || *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { - return true; - } - - check_single_instance_event() -} - -fn check_single_instance_event() -> bool { unsafe { CreateEventW( None, diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index cedacb6d8495c8..7ea5c923c26522 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -26,6 +26,7 @@ actions!( zed, [ OpenSettings, + OpenDefaultKeymap, OpenAccountSettings, OpenServerSettings, Quit, diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 967b91d59de035..8383e990d982df 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -129,10 +129,3 @@ - [Local Collaboration](./development/local-collaboration.md) - [Release Process](./development/releases.md) - [Debugging Crashes](./development/debugging-crashes.md) - -# Community - -- [Code of Conduct](./code-of-conduct.md) -- [Contributing to Zed](./contribute-to-zed.md) -- [Conversations](./conversations.md) -- [Feedback and Support](./feedback-and-support.md) diff --git a/docs/src/assistant/prompting.md b/docs/src/assistant/prompting.md index 0dca671b47b79a..18bda28bbf8fa9 100644 --- a/docs/src/assistant/prompting.md +++ b/docs/src/assistant/prompting.md @@ -137,7 +137,7 @@ Zed has the following internal prompt templates: - `content_prompt.hbs`: Used for generating content in the editor. - `terminal_assistant_prompt.hbs`: Used for the terminal assistant feature. -- `edit_workflow.hbs`: Used for generating the edit workflow prompt. +- `suggest_edits.hbs`: Used for generating the model instructions for the XML Suggest Edits should return. - `step_resolution.hbs`: Used for generating the step resolution prompt. At this point it is unknown if we will expand templates further to be user-creatable. @@ -215,7 +215,7 @@ The following templates can be overridden: given system information and latest terminal output if relevant. ``` -3. `edit_workflow.hbs`: Used for generating the edit workflow prompt. +3. `suggest_edits.hbs`: Used for generating the model instructions for the XML Suggest Edits should return. 4. `step_resolution.hbs`: Used for generating the step resolution prompt. diff --git a/docs/src/code-of-conduct.md b/docs/src/code-of-conduct.md deleted file mode 100644 index 6c168dcc7ddcc9..00000000000000 --- a/docs/src/code-of-conduct.md +++ /dev/null @@ -1,127 +0,0 @@ -# Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -- Demonstrating empathy and kindness toward other people -- Being respectful of differing opinions, viewpoints, and experiences -- Giving and gracefully accepting constructive feedback -- Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -- Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -- The use of sexualized language or imagery, and sexual attention or - advances of any kind -- Trolling, insulting or derogatory comments, and personal or political attacks -- Public or private harassment -- Publishing others' private information, such as a physical or email - address, without their explicit permission -- Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -hi@zed.dev. -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -[homepage]: https://www.contributor-covenant.org - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html). - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -For answers to common questions about this code of conduct, see [the Contributor Covenant FAQ](https://www.contributor-covenant.org/faq). -For translations, see [Contributor Covenant Translations](https://www.contributor-covenant.org/translations). diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 78c7c62c941db7..d8105b4537b684 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2042,10 +2042,14 @@ Run the `theme selector: toggle` action in the command palette to see a current "folder_icons": true, "git_status": true, "indent_size": 20, + "indent_guides": true, "auto_reveal_entries": true, "auto_fold_dirs": true, "scrollbar": { "show": null + }, + "indent_guides": { + "show": "always" } } } @@ -2163,21 +2167,54 @@ Run the `theme selector: toggle` action in the command palette to see a current - Setting: `indent_size` - Default: `20` -### Scrollbar +### Indent Guides: Show + +- Description: Whether to show indent guides in the project panel. Possible values: "always", "never". +- Setting: `indent_guides` + +```json +"indent_guides": { + "show": "always" +} +``` + +**Options** + +1. Show indent guides in the project panel + +```json +{ + "indent_guides": { + "show": "always" + } +} +``` + +2. Hide indent guides in the project panel + +```json +{ + "indent_guides": { + "show": "never" + } +} +``` + +### Scrollbar: Show -- Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. +- Description: Whether to show a scrollbar in the project panel. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. - Setting: `scrollbar` - Default: ```json "scrollbar": { - "show": null + "show": null } ``` **Options** -1. Show scrollbar in project panel +1. Show scrollbar in the project panel ```json { @@ -2187,7 +2224,7 @@ Run the `theme selector: toggle` action in the command palette to see a current } ``` -2. Hide scrollbar in project panel +2. Hide scrollbar in the project panel ```json { @@ -2232,6 +2269,12 @@ Run the `theme selector: toggle` action in the command palette to see a current "indent_size": 20, "auto_reveal_entries": true, "auto_fold_dirs": true, + "indent_guides": { + "show": "always" + }, + "scrollbar": { + "show": null + } } ``` diff --git a/docs/src/contribute-to-zed.md b/docs/src/contribute-to-zed.md deleted file mode 100644 index 14a189b0e5a734..00000000000000 --- a/docs/src/contribute-to-zed.md +++ /dev/null @@ -1,15 +0,0 @@ -# Contributing to Zed - -Thank you for your interest in contributing to the Zed! Before making contributions, we recommend reading our [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md) guidelines. This document provides a detailed guide to contributing to Zed. - -### Issues - -If you're eager to dive in and start contributing immediately, check out the issues in the [issue tracker](https://github.com/zed-industries/zed/issues). - -## Public Roadmap - -If you're wanting to contribute by building out a feature, we recommend taking a look at our [#roadmap](https://zed.dev/roadmap). This roadmap documents, at a high level, the larger features we're planning to build out in the future. - -## Zed GitHub Repository - -Check [our codebase out on GitHub](https://github.com/zed-industries/zed). diff --git a/docs/src/conversations.md b/docs/src/conversations.md deleted file mode 100644 index bab2fd6d5ddfa0..00000000000000 --- a/docs/src/conversations.md +++ /dev/null @@ -1,21 +0,0 @@ -# Conversations - -## Zed Channels - -Our Zed channel tree is public. You can find us hanging out and writing code across various channels. The root channel, [#zed](https://zed.dev/channel/zed-283), is a great place to ask questions and get to know other Zed users. - -## Discord Channel - -[Zed Community](https://discord.gg/zed-community) is our official Discord channel. We drop in pretty regularly to answer questions and chat with the community. - -## Twitter - -We use Twitter to highlight new Zed features and to share our blog posts. Follow us [@zeddotdev](https://x.com/zeddotdev). - -## YouTube - -We have a [YouTube channel](https://www.youtube.com/@zeddotdev) where we post longer-form videos about Zed. - -## Blog - -Our [blog](https://zed.dev/blog) gets frequent updates. We post about big releases, new features, and under-the-hood Zed tech. diff --git a/docs/src/development.md b/docs/src/development.md index ecd68a1181f82f..96993389c21a9d 100644 --- a/docs/src/development.md +++ b/docs/src/development.md @@ -15,5 +15,5 @@ If you'd like to develop collaboration features, additionally see: - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md) - [Releases](./development/releases.md) - [Debugging Crashes](./development/debugging-crashes.md) -- [Code of Conduct](./code-of-conduct.md) +- [Code of Conduct](https://zed.dev/code-of-conduct) - [Zed Contributor License](https://zed.dev/cla) diff --git a/docs/src/feedback-and-support.md b/docs/src/feedback-and-support.md deleted file mode 100644 index 2e4be92caaa589..00000000000000 --- a/docs/src/feedback-and-support.md +++ /dev/null @@ -1,39 +0,0 @@ -# Feedback and Support - -## Community Support - -Our [Discord community](https://discord.gg/zed-community) is vibrant and a great place to ask questions and learn from others. Be sure to check out the `#support` channel. - -Use the [community forum](https://github.com/zed-industries/zed/discussions) to ask questions and learn from one another. We will be present in the forum and answering questions as well. - -## Frequently Asked Questions - -Our [FAQ](https://zed.dev/faq) is a great place to start for common questions about Zed. - -## Issue Tracking - -We track our issues at [`zed-industries/zed`](https://github.com/zed-industries/zed/issues). - -### Feature Requests - -Try to focus on the things that are most critical to you rather than exhaustively listing all features another editor you have used has. - -Command palette: `zed: request feature` - -### Bug Reports - -Try to add as much detail as possible, if it is not obvious to reproduce. Let us know how severe the problem is for you; is the issue more of a minor inconvenience or something that would prevent you from using Zed? - -Command palette: `zed: file bug report` - -## Feedback Channels - -In-app feedback can be submitted from within Zed via the feedback modal. - -Command palette: `feedback: give feedback` - -If you prefer to write up your thoughts as an email, you can send them to [hi@zed.dev](mailto:hi@zed.dev). - -## Merch Store - -We have a [merch store](https://zedindustries.creator-spring.com/) where you can buy Zed stickers, shirts, and more. diff --git a/docs/src/languages/dart.md b/docs/src/languages/dart.md index 6571166b6d3061..32f312e5dd04eb 100644 --- a/docs/src/languages/dart.md +++ b/docs/src/languages/dart.md @@ -1,6 +1,6 @@ # Dart -Dart support is available through the [Dart extension](https://github.com/zed-industries/zed/tree/main/extensions/dart). +Dart support is available through the [Dart extension](https://github.com/zed-extensions/dart). - Tree Sitter: [UserNobody14/tree-sitter-dart](https://github.com/UserNobody14/tree-sitter-dart) - Language Server: [dart language-server](https://github.com/dart-lang/sdk) diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 330b5fa9d0151d..2af91f1fdc7861 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -163,6 +163,23 @@ Here's a snippet for Zed settings.json (the language server will restart automat } ``` +### Multi-project workspaces + +If you want rust-analyzer to analyze multiple Rust projects in the same folder that are not listed in `[members]` in the Cargo workspace, +you can list them in `linkedProjects` in the local project settings: + +```json +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "linkedProjects": ["./path/to/a/Cargo.toml", "./path/to/b/Cargo.toml"] + } + } + } +} +``` + ### Snippets There's a way get custom completion items from rust-analyzer, that will transform the code according to the snippet body: diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 708d0e0b393eae..771be830bc4a12 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -8,6 +8,10 @@ Remote Development allows you to code at the speed of thought, even when your co Remote development requires two computers, your local machine that runs the Zed UI and the remote server which runs a Zed headless server. The two communicate over SSH, so you will need to be able to SSH from your local machine into the remote server to use this feature. +![Architectural overview of Zed Remote Development](https://zed.dev/img/remote-development/diagram.png) + +On your local machine, Zed runs its UI, talks to language models, uses Tree-sitter to parse and syntax-highlight code, and store unsaved changes and recent projects. The source code, language servers, tasks, and the terminal all run on the remote server. + > **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use that mode. ## Setup @@ -15,11 +19,11 @@ Remote development requires two computers, your local machine that runs the Zed 1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). You need at least Zed v0.159. 1. Open the remote projects dialogue with cmd-shift-p remote or cmd-control-o. 1. Click "Connect New Server" and enter the command you use to SSH into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass. -1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, it will download the latest version of the Zed server and upload it to the remote over SSH. +1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, Zed will download the server on the remote host and start it. 1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server. > **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos. -For simple cases where you don't need any SSH arguments, you can run `zed ssh://[@][:]/` to open a remote folder/file directly. +For simple cases where you don't need any SSH arguments, you can run `zed ssh://[@][:]/` to open a remote folder/file directly. If you'd like to hotlink into an SSH project, use a link of the format: `zed://ssh/[@][:]/`. ## Supported platforms @@ -109,7 +113,9 @@ Any prompts that SSH needs will be shown in the UI, so you can verify host keys, Once the master connection is established, Zed will check to see if the remote server binary is present in `~/.zed_server` on the remote, and that its version matches the current version of Zed that you're using. -If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"remote_server": {"download":false}}` in your local settings, it will download the binary to your local machine and then upload it to the remote server. +If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"upload_binary_over_ssh":true}` in your settings for that server, it will download the binary to your local machine and then upload it to the remote server. + +If you'd like to maintain the server binary yourself you can. You can either download our prebuilt versions from [Github](https://github.com/zed-industries/zed/releases), or [build your own](https://zed.dev/docs/development) with `cargo build -p remote_server --release`. If you do this, you must upload it to `~/.zed_server/zed-remote-server-{RELEASE_CHANNEL}-{OS}-{ARCH}` on the server, for example `.zed-server/zed-remote-server-preview-linux-x86_64`. The version must exactly match the version of Zed itself you are using. ## Maintaining the SSH connection diff --git a/extensions/dart/Cargo.toml b/extensions/dart/Cargo.toml deleted file mode 100644 index 3d79e104c105c6..00000000000000 --- a/extensions/dart/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_dart" -version = "0.1.1" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/dart.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/dart/LICENSE-APACHE b/extensions/dart/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3affa..00000000000000 --- a/extensions/dart/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/dart/README.md b/extensions/dart/README.md deleted file mode 100644 index bf6976dd60577a..00000000000000 --- a/extensions/dart/README.md +++ /dev/null @@ -1,6 +0,0 @@ -## Roadmap - -1. Add `dart run` command. -2. Add `dart test` command. -3. Add `flutter test --name` command, to allow running a single test or group of tests. -4. Auto hot reload Flutter app when files change. diff --git a/extensions/dart/extension.toml b/extensions/dart/extension.toml deleted file mode 100644 index 5ea8c37c2f917f..00000000000000 --- a/extensions/dart/extension.toml +++ /dev/null @@ -1,16 +0,0 @@ -id = "dart" -name = "Dart" -description = "Dart support." -version = "0.1.1" -schema_version = 1 -authors = ["Abdullah Alsigar ", "Flo ", "ybbond "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.dart] -name = "Dart LSP" -language = "Dart" -languages = ["Dart"] - -[grammars.dart] -repository = "https://github.com/UserNobody14/tree-sitter-dart" -commit = "6da46473ab8accb13da48113f4634e729a71d335" diff --git a/extensions/dart/languages/dart/brackets.scm b/extensions/dart/languages/dart/brackets.scm deleted file mode 100644 index 8d96f95f864cef..00000000000000 --- a/extensions/dart/languages/dart/brackets.scm +++ /dev/null @@ -1,6 +0,0 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("\"" @open "\"" @close) -("'" @open "'" @close) diff --git a/extensions/dart/languages/dart/config.toml b/extensions/dart/languages/dart/config.toml deleted file mode 100644 index d723d4d6d3713b..00000000000000 --- a/extensions/dart/languages/dart/config.toml +++ /dev/null @@ -1,15 +0,0 @@ -name = "Dart" -grammar = "dart" -path_suffixes = ["dart"] -line_comments = ["// "] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = true, newline = false}, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string"] }, - { start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string", "comment"] }, -] diff --git a/extensions/dart/languages/dart/highlights.scm b/extensions/dart/languages/dart/highlights.scm deleted file mode 100644 index e75a86cd79f105..00000000000000 --- a/extensions/dart/languages/dart/highlights.scm +++ /dev/null @@ -1,269 +0,0 @@ -(dotted_identifier_list) @string - -; Methods -; -------------------- -(super) @function - -(function_expression_body (identifier) @type) -; ((identifier)(selector (argument_part)) @function) - -(((identifier) @function (#match? @function "^_?[a-z]")) - . (selector . (argument_part))) @function - -; Annotations -; -------------------- -(annotation - name: (identifier) @attribute) - -; Operators and Tokens -; -------------------- -(template_substitution - "$" @punctuation.special - "{" @punctuation.special - "}" @punctuation.special) @none - -(template_substitution - "$" @punctuation.special - (identifier_dollar_escaped) @variable) @none - -(escape_sequence) @string.escape - -[ - "@" - "=>" - ".." - "??" - "==" - "?" - ":" - "&&" - "%" - "<" - ">" - "=" - ">=" - "<=" - "||" - (multiplicative_operator) - (increment_operator) - (is_operator) - (prefix_operator) - (equality_operator) - (additive_operator) - ] @operator - -[ - "(" - ")" - "[" - "]" - "{" - "}" - ] @punctuation.bracket - -; Delimiters -; -------------------- -[ - ";" - "." - "," - ] @punctuation.delimiter - -; Types -; -------------------- -(class_definition - name: (identifier) @type) -(constructor_signature - name: (identifier) @type) -(scoped_identifier - scope: (identifier) @type) -(function_signature - name: (identifier) @function.method) - -(getter_signature - (identifier) @function.method) - -(setter_signature - name: (identifier) @function.method) -(enum_declaration - name: (identifier) @type) -(enum_constant - name: (identifier) @type) -(void_type) @type - -((scoped_identifier - scope: (identifier) @type - name: (identifier) @type) - (#match? @type "^[a-zA-Z]")) - -(type_identifier) @type - -(type_alias - (type_identifier) @type.definition) - -; Variables -; -------------------- -; var keyword -(inferred_type) @keyword - -((identifier) @type - (#match? @type "^_?[A-Z].*[a-z]")) - -("Function" @type) - -; properties -(unconditional_assignable_selector - (identifier) @property) - -(conditional_assignable_selector - (identifier) @property) - -; assignments -(assignment_expression - left: (assignable_expression) @variable) - -(this) @variable.builtin - -; Parameters -; -------------------- -(formal_parameter - name: (identifier) @variable.parameter) - -(named_argument - (label - (identifier) @variable.parameter)) - -; Literals -; -------------------- -[ - (hex_integer_literal) - (decimal_integer_literal) - (decimal_floating_point_literal) - ; TODO: inaccessible nodes - ; (octal_integer_literal) - ; (hex_floating_point_literal) - ] @number - -(symbol_literal) @string.special.symbol - -(string_literal) @string -(true) @boolean -(false) @boolean -(null_literal) @constant.builtin - -(comment) @comment - -(documentation_comment) @comment.documentation - -; Keywords -; -------------------- -[ - "import" - "library" - "export" - "as" - "show" - "hide" - ] @keyword.import - -; Reserved words (cannot be used as identifiers) -[ - (case_builtin) - "late" - "required" - "extension" - "on" - "class" - "enum" - "extends" - "in" - "is" - "new" - "super" - "with" - ] @keyword - -"return" @keyword.return - -; Built in identifiers: -; alone these are marked as keywords -[ - "deferred" - "factory" - "get" - "implements" - "interface" - "library" - "operator" - "mixin" - "part" - "set" - "typedef" - ] @keyword - -[ - "async" - "async*" - "sync*" - "await" - "yield" -] @keyword.coroutine - -[ - (const_builtin) - (final_builtin) - "abstract" - "covariant" - "dynamic" - "external" - "static" - "final" - "base" - "sealed" - ] @type.qualifier - -; when used as an identifier: -((identifier) @variable.builtin - (#any-of? @variable.builtin - "abstract" - "as" - "covariant" - "deferred" - "dynamic" - "export" - "external" - "factory" - "Function" - "get" - "implements" - "import" - "interface" - "library" - "operator" - "mixin" - "part" - "set" - "static" - "typedef")) - -[ - "if" - "else" - "switch" - "default" -] @keyword.conditional - -[ - "try" - "throw" - "catch" - "finally" - (break_statement) -] @keyword.exception - -[ - "do" - "while" - "continue" - "for" -] @keyword.repeat diff --git a/extensions/dart/languages/dart/indents.scm b/extensions/dart/languages/dart/indents.scm deleted file mode 100644 index 112b414aa45f27..00000000000000 --- a/extensions/dart/languages/dart/indents.scm +++ /dev/null @@ -1,3 +0,0 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/extensions/dart/languages/dart/outline.scm b/extensions/dart/languages/dart/outline.scm deleted file mode 100644 index 4d6f8c1cb75b69..00000000000000 --- a/extensions/dart/languages/dart/outline.scm +++ /dev/null @@ -1,18 +0,0 @@ -(class_definition - "class" @context - name: (_) @name) @item - -(function_signature - name: (_) @name) @item - -(getter_signature - "get" @context - name: (_) @name) @item - -(setter_signature - "set" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item diff --git a/extensions/dart/languages/dart/runnables.scm b/extensions/dart/languages/dart/runnables.scm deleted file mode 100644 index 509cd38d05d78a..00000000000000 --- a/extensions/dart/languages/dart/runnables.scm +++ /dev/null @@ -1,45 +0,0 @@ -; Flutter main -( - ( - (import_or_export - (library_import - (import_specification - ("import" - (configurable_uri - (uri - (string_literal) @_import - (#match? @_import "package:flutter/(material|widgets|cupertino).dart") - (#not-match? @_import "package:flutter_test/flutter_test.dart") - (#not-match? @_import "package:test/test.dart") - )))))) - ( - (function_signature - name: (_) @run - ) - (#eq? @run "main") - ) - (#set! tag flutter-main) - ) -) - -; Flutter test main -( - ( - (import_or_export - (library_import - (import_specification - ("import" - (configurable_uri - (uri - (string_literal) @_import - (#match? @_import "package:flutter_test/flutter_test.dart") - )))))) - ( - (function_signature - name: (_) @run - ) - (#eq? @run "main") - ) - (#set! tag flutter-test-main) - ) -) diff --git a/extensions/dart/languages/dart/tasks.json b/extensions/dart/languages/dart/tasks.json deleted file mode 100644 index c177d04cec190f..00000000000000 --- a/extensions/dart/languages/dart/tasks.json +++ /dev/null @@ -1,26 +0,0 @@ -[ - { - "label": "flutter run", - "command": "flutter", - "args": ["run"], - "tags": ["flutter-main"] - }, - { - "label": "fvm flutter run", - "command": "fvm flutter", - "args": ["run"], - "tags": ["flutter-main"] - }, - { - "label": "flutter test $ZED_STEM", - "command": "flutter", - "args": ["test", "$ZED_FILE"], - "tags": ["flutter-test-main"] - }, - { - "label": "fvm flutter test $ZED_STEM", - "command": "fvm flutter", - "args": ["test", "$ZED_FILE"], - "tags": ["flutter-test-main"] - } -] diff --git a/extensions/dart/src/dart.rs b/extensions/dart/src/dart.rs deleted file mode 100644 index 38a2cf25a690f5..00000000000000 --- a/extensions/dart/src/dart.rs +++ /dev/null @@ -1,162 +0,0 @@ -use zed::lsp::CompletionKind; -use zed::settings::LspSettings; -use zed::{CodeLabel, CodeLabelSpan}; -use zed_extension_api::{self as zed, serde_json, Result}; - -struct DartBinary { - pub path: String, - pub args: Option>, -} - -struct DartExtension; - -impl DartExtension { - fn language_server_binary( - &mut self, - _language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary_settings = LspSettings::for_worktree("dart", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.binary); - let binary_args = binary_settings - .as_ref() - .and_then(|binary_settings| binary_settings.arguments.clone()); - - if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { - return Ok(DartBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = worktree.which("dart") { - return Ok(DartBinary { - path, - args: binary_args, - }); - } - - Err( - "dart must be installed from dart.dev/get-dart or pointed to by the LSP binary settings" - .to_string(), - ) - } -} - -impl zed::Extension for DartExtension { - fn new() -> Self { - Self - } - - fn language_server_command( - &mut self, - language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let dart_binary = self.language_server_binary(language_server_id, worktree)?; - - Ok(zed::Command { - command: dart_binary.path, - args: dart_binary.args.unwrap_or_else(|| { - vec!["language-server".to_string(), "--protocol=lsp".to_string()] - }), - env: Default::default(), - }) - } - - fn language_server_workspace_configuration( - &mut self, - _language_server_id: &zed::LanguageServerId, - worktree: &zed::Worktree, - ) -> Result> { - let settings = LspSettings::for_worktree("dart", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.settings.clone()) - .unwrap_or_default(); - - Ok(Some(serde_json::json!({ - "dart": settings - }))) - } - - fn label_for_completion( - &self, - _language_server_id: &zed::LanguageServerId, - completion: zed::lsp::Completion, - ) -> Option { - let arrow = " → "; - - match completion.kind? { - CompletionKind::Class => Some(CodeLabel { - filter_range: (0..completion.label.len()).into(), - spans: vec![CodeLabelSpan::literal( - completion.label, - Some("type".into()), - )], - code: String::new(), - }), - CompletionKind::Function | CompletionKind::Constructor | CompletionKind::Method => { - let mut parts = completion.detail.as_ref()?.split(arrow); - let (name, _) = completion.label.split_once('(')?; - let parameter_list = parts.next()?; - let return_type = parts.next()?; - let fn_name = " a"; - let fat_arrow = " => "; - let call_expr = "();"; - - let code = - format!("{return_type}{fn_name}{parameter_list}{fat_arrow}{name}{call_expr}"); - - let parameter_list_start = return_type.len() + fn_name.len(); - - Some(CodeLabel { - spans: vec![ - CodeLabelSpan::code_range( - code.len() - call_expr.len() - name.len()..code.len() - call_expr.len(), - ), - CodeLabelSpan::code_range( - parameter_list_start..parameter_list_start + parameter_list.len(), - ), - CodeLabelSpan::literal(arrow, None), - CodeLabelSpan::code_range(0..return_type.len()), - ], - filter_range: (0..name.len()).into(), - code, - }) - } - CompletionKind::Property => { - let class_start = "class A {"; - let get = " get "; - let property_end = " => a; }"; - let ty = completion.detail?; - let name = completion.label; - - let code = format!("{class_start}{ty}{get}{name}{property_end}"); - let name_start = class_start.len() + ty.len() + get.len(); - - Some(CodeLabel { - spans: vec![ - CodeLabelSpan::code_range(name_start..name_start + name.len()), - CodeLabelSpan::literal(arrow, None), - CodeLabelSpan::code_range(class_start.len()..class_start.len() + ty.len()), - ], - filter_range: (0..name.len()).into(), - code, - }) - } - CompletionKind::Variable => { - let name = completion.label; - - Some(CodeLabel { - filter_range: (0..name.len()).into(), - spans: vec![CodeLabelSpan::literal(name, Some("variable".into()))], - code: String::new(), - }) - } - _ => None, - } - } -} - -zed::register_extension!(DartExtension); diff --git a/extensions/zig/languages/zig/outline.scm b/extensions/zig/languages/zig/outline.scm index d87cff2058dfc0..7ae683a876e2e7 100644 --- a/extensions/zig/languages/zig/outline.scm +++ b/extensions/zig/languages/zig/outline.scm @@ -19,6 +19,9 @@ ( TestDecl ( "test" @context - (STRINGLITERALSINGLE)? @name + [ + (STRINGLITERALSINGLE) + (IDENTIFIER) + ]? @name ) ) @item diff --git a/script/bundle-mac b/script/bundle-mac index 230722ecfa99ed..7a25881535981c 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -63,6 +63,12 @@ if [[ $# -gt 0 ]]; then fi fi +# Get release channel +pushd crates/zed +channel=$(