diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yml b/.github/ISSUE_TEMPLATE/1_bug_report.yml index 783a1ec05e0ca5..a2064a0157a4d1 100644 --- a/.github/ISSUE_TEMPLATE/1_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/1_bug_report.yml @@ -10,16 +10,39 @@ body: value: | - + SUMMARY_SENTENCE_HERE + + + + Steps to trigger the problem: 1. 2. 3. + 4. Actual Behavior: Expected Behavior: + + validations: required: true diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 419127b6aea9fe..912f19e4978b50 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,8 +109,16 @@ jobs: - name: cargo clippy run: ./script/clippy + - name: Install cargo-machete + uses: clechasseur/rs-cargo@v2 + with: + command: install + args: cargo-machete@0.7.0 + - name: Check unused dependencies - uses: bnjbvr/cargo-machete@main + uses: clechasseur/rs-cargo@v2 + with: + command: machete - name: Check licenses run: | @@ -298,8 +306,9 @@ jobs: env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} - APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} - APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 64c719e7de14cc..fa9ec83e7e1dc0 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -62,8 +62,9 @@ jobs: env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} - APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} - APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} diff --git a/Cargo.lock b/Cargo.lock index c2371d6efa4ecd..916237c84a41b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -394,7 +394,6 @@ dependencies = [ "language", "language_model", "language_model_selector", - "language_models", "languages", "log", "lsp", @@ -462,7 +461,6 @@ dependencies = [ "language", "language_model", "language_model_selector", - "language_models", "log", "lsp", "markdown", @@ -517,7 +515,6 @@ dependencies = [ "language", "language_model", "language_model_selector", - "language_models", "languages", "log", "multi_buffer", @@ -1268,6 +1265,30 @@ dependencies = [ "uuid", ] +[[package]] +name = "aws-sdk-bedrockruntime" +version = "1.74.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6938541d1948a543bca23303fec4cff9c36bf0e63b8fa3ae1b337bcb9d5b81af" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes 1.10.0", + "fastrand 2.3.0", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + [[package]] name = "aws-sdk-kinesis" version = "1.61.0" @@ -1597,6 +1618,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "aws_http_client" +version = "0.1.0" +dependencies = [ + "aws-smithy-runtime-api", + "aws-smithy-types", + "futures 0.3.31", + "http_client", + "tokio", +] + [[package]] name = "axum" version = "0.6.20" @@ -1726,6 +1758,22 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bedrock" +version = "0.1.0" +dependencies = [ + "anyhow", + "aws-sdk-bedrockruntime", + "aws-smithy-types", + "futures 0.3.31", + "schemars", + "serde", + "serde_json", + "strum", + "thiserror 1.0.69", + "tokio", +] + [[package]] name = "bigdecimal" version = "0.4.7" @@ -3114,7 +3162,9 @@ dependencies = [ "clock", "collections", "command_palette_hooks", + "ctor", "editor", + "env_logger 0.11.6", "fs", "futures 0.3.31", "gpui", @@ -3122,6 +3172,7 @@ dependencies = [ "indoc", "inline_completion", "language", + "log", "lsp", "menu", "node_runtime", @@ -5412,6 +5463,7 @@ dependencies = [ "pretty_assertions", "regex", "rope", + "schemars", "serde", "serde_json", "smol", @@ -7076,17 +7128,14 @@ dependencies = [ "anthropic", "anyhow", "base64 0.22.1", + "client", "collections", - "deepseek", "futures 0.3.31", "google_ai", "gpui", "http_client", "image", - "lmstudio", "log", - "mistral", - "ollama", "open_ai", "parking_lot", "proto", @@ -7095,6 +7144,7 @@ dependencies = [ "serde_json", "smol", "strum", + "telemetry_events", "thiserror 1.0.69", "ui", "util", @@ -7121,6 +7171,10 @@ version = "0.1.0" dependencies = [ "anthropic", "anyhow", + "aws-config", + "aws-credential-types", + "aws_http_client", + "bedrock", "client", "collections", "copilot", @@ -7132,6 +7186,7 @@ dependencies = [ "futures 0.3.31", "google_ai", "gpui", + "gpui_tokio", "http_client", "language_model", "lmstudio", @@ -7147,10 +7202,9 @@ dependencies = [ "settings", "smol", "strum", - "telemetry_events", "theme", - "thiserror 1.0.69", "tiktoken-rs", + "tokio", "ui", "util", ] @@ -17263,7 +17317,7 @@ dependencies = [ "indoc", "inline_completion", "language", - "language_models", + "language_model", "log", "menu", "migrator", diff --git a/Cargo.toml b/Cargo.toml index 3949f1a08cc086..43e2f0532a6265 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,6 +15,8 @@ members = [ "crates/audio", "crates/auto_update", "crates/auto_update_ui", + "crates/aws_http_client", + "crates/bedrock", "crates/breadcrumbs", "crates/buffer_diff", "crates/call", @@ -222,6 +224,8 @@ assistant_tools = { path = "crates/assistant_tools" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } auto_update_ui = { path = "crates/auto_update_ui" } +aws_http_client = { path = "crates/aws_http_client" } +bedrock = { path = "crates/bedrock" } breadcrumbs = { path = "crates/breadcrumbs" } call = { path = "crates/call" } channel = { path = "crates/channel" } @@ -390,6 +394,11 @@ async-trait = "0.1" async-tungstenite = "0.28" async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } +aws-config = { version = "1.5.16", features = ["behavior-version-latest"] } +aws-credential-types = { version = "1.2.1", features = ["hardcoded-credentials"] } +aws-sdk-bedrockruntime = { version = "1.73.0", features = ["behavior-version-latest"] } +aws-smithy-runtime-api = { version = "1.7.3", features = ["http-1x", "client"] } +aws-smithy-types = { version = "1.2.13", features = ["http-body-1-x"] } base64 = "0.22" bitflags = "2.6.0" blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } diff --git a/assets/icons/ai_bedrock.svg b/assets/icons/ai_bedrock.svg new file mode 100644 index 00000000000000..2b672c364ea42e --- /dev/null +++ b/assets/icons/ai_bedrock.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 1eb646c311d538..1b4a3c9523747c 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -184,9 +184,9 @@ "ctrl-alt-/": "assistant::ToggleModelSelector", "ctrl-k h": "assistant::DeployHistory", "ctrl-k l": "assistant::DeployPromptLibrary", - "new": "assistant::NewContext", - "ctrl-t": "assistant::NewContext", - "ctrl-n": "assistant::NewContext" + "new": "assistant::NewChat", + "ctrl-t": "assistant::NewChat", + "ctrl-n": "assistant::NewChat" } }, { @@ -368,7 +368,12 @@ "ctrl-\\": "pane::SplitRight", "ctrl-k v": "markdown::OpenPreviewToTheSide", "ctrl-shift-v": "markdown::OpenPreview", - "ctrl-alt-shift-c": "editor::DisplayCursorNames" + "ctrl-alt-shift-c": "editor::DisplayCursorNames", + "ctrl-alt-y": "git::ToggleStaged", + "alt-y": "git::StageAndNext", + "alt-shift-y": "git::UnstageAndNext", + "alt-.": "editor::GoToHunk", + "alt-,": "editor::GoToPrevHunk" } }, { @@ -705,12 +710,6 @@ "space": "project_panel::Open" } }, - { - "context": "GitPanel && !CommitEditor", - "bindings": { - "escape": "git_panel::Close" - } - }, { "context": "GitPanel && ChangesList", "bindings": { @@ -722,19 +721,36 @@ "ctrl-shift-space": "git::UnstageAll", "tab": "git_panel::FocusEditor", "shift-tab": "git_panel::FocusEditor", - "escape": "git_panel::ToggleFocus" + "escape": "git_panel::ToggleFocus", + "ctrl-enter": "git::Commit", + "alt-enter": "menu::SecondaryConfirm" + } + }, + { + "context": "GitCommit > Editor", + "bindings": { + "enter": "editor::Newline", + "ctrl-enter": "git::Commit" } }, { "context": "GitPanel > Editor", "bindings": { "escape": "git_panel::FocusChanges", - "ctrl-enter": "git::Commit", "tab": "git_panel::FocusChanges", "shift-tab": "git_panel::FocusChanges", + "ctrl-enter": "git::Commit", "alt-up": "git_panel::FocusChanges" } }, + { + "context": "GitCommit > Editor", + "use_key_equivalents": true, + "bindings": { + "enter": "editor::Newline", + "ctrl-enter": "git::Commit" + } + }, { "context": "CollabPanel && not_editing", "bindings": { @@ -813,6 +829,7 @@ "pagedown": ["terminal::SendKeystroke", "pagedown"], "escape": ["terminal::SendKeystroke", "escape"], "enter": ["terminal::SendKeystroke", "enter"], + "ctrl-b": ["terminal::SendKeystroke", "ctrl-b"], "ctrl-c": ["terminal::SendKeystroke", "ctrl-c"], "shift-pageup": "terminal::ScrollPageUp", "shift-pagedown": "terminal::ScrollPageDown", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 3c10d6fa003bc2..63cd8f348b5add 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -211,8 +211,8 @@ "cmd-alt-/": "assistant::ToggleModelSelector", "cmd-k h": "assistant::DeployHistory", "cmd-k l": "assistant::DeployPromptLibrary", - "cmd-t": "assistant::NewContext", - "cmd-n": "assistant::NewContext" + "cmd-t": "assistant::NewChat", + "cmd-n": "assistant::NewChat" } }, { @@ -751,22 +751,22 @@ } }, { - "context": "GitCommit > Editor", + "context": "GitPanel > Editor", "use_key_equivalents": true, "bindings": { "enter": "editor::Newline", - "cmd-enter": "git::Commit" + "cmd-enter": "git::Commit", + "tab": "git_panel::FocusChanges", + "shift-tab": "git_panel::FocusChanges", + "alt-up": "git_panel::FocusChanges" } }, { - "context": "GitPanel > Editor", + "context": "GitCommit > Editor", "use_key_equivalents": true, "bindings": { "enter": "editor::Newline", - "cmd-enter": "git::Commit", - "tab": "git_panel::FocusChanges", - "shift-tab": "git_panel::FocusChanges", - "alt-up": "git_panel::FocusChanges" + "cmd-enter": "git::Commit" } }, { diff --git a/assets/keymaps/linux/emacs.json b/assets/keymaps/linux/emacs.json index 2c1128d8d66922..cf1afd309c8497 100755 --- a/assets/keymaps/linux/emacs.json +++ b/assets/keymaps/linux/emacs.json @@ -48,6 +48,8 @@ "ctrl-_": "editor::Undo", // undo "ctrl-/": "editor::Undo", // undo "ctrl-x u": "editor::Undo", // undo + "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph + "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph "ctrl-v": "editor::MovePageDown", // scroll-up "alt-v": "editor::MovePageUp", // scroll-down "ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer diff --git a/assets/keymaps/macos/emacs.json b/assets/keymaps/macos/emacs.json index 2c1128d8d66922..cf1afd309c8497 100755 --- a/assets/keymaps/macos/emacs.json +++ b/assets/keymaps/macos/emacs.json @@ -48,6 +48,8 @@ "ctrl-_": "editor::Undo", // undo "ctrl-/": "editor::Undo", // undo "ctrl-x u": "editor::Undo", // undo + "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph + "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph "ctrl-v": "editor::MovePageDown", // scroll-up "alt-v": "editor::MovePageUp", // scroll-down "ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer diff --git a/assets/settings/default.json b/assets/settings/default.json index b6f3c71a8e4964..1c103c026857b3 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1093,6 +1093,7 @@ "tab_size": 2 }, "Diff": { + "show_edit_predictions": false, "remove_trailing_whitespace_on_save": false, "ensure_final_newline_on_save": false }, diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 958cf4797dc6f4..f1a6b5c519ba2f 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -379,7 +379,7 @@ "font_weight": null }, "variable": { - "color": "#83a598ff", + "color": "#ebdbb2ff", "font_style": null, "font_weight": null }, @@ -767,7 +767,7 @@ "font_weight": null }, "variable": { - "color": "#83a598ff", + "color": "#ebdbb2ff", "font_style": null, "font_weight": null }, @@ -1155,7 +1155,7 @@ "font_weight": null }, "variable": { - "color": "#83a598ff", + "color": "#ebdbb2ff", "font_style": null, "font_weight": null }, @@ -1543,7 +1543,7 @@ "font_weight": null }, "variable": { - "color": "#066578ff", + "color": "#282828ff", "font_style": null, "font_weight": null }, @@ -1931,7 +1931,7 @@ "font_weight": null }, "variable": { - "color": "#066578ff", + "color": "#282828ff", "font_style": null, "font_weight": null }, @@ -2319,7 +2319,7 @@ "font_weight": null }, "variable": { - "color": "#066578ff", + "color": "#282828ff", "font_style": null, "font_weight": null }, diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 9d7a29cd957783..3c7421c04ec39c 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -365,7 +365,7 @@ "font_weight": null }, "variable": { - "color": "#dce0e5ff", + "color": "#acb2beff", "font_style": null, "font_weight": null }, diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 8100cbe5c86ceb..c64d621143c29d 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -30,6 +30,8 @@ pub enum Model { #[default] #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")] Claude3_5Sonnet, + #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")] + Claude3_7Sonnet, #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")] Claude3_5Haiku, #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")] @@ -59,6 +61,8 @@ impl Model { pub fn from_id(id: &str) -> Result { if id.starts_with("claude-3-5-sonnet") { Ok(Self::Claude3_5Sonnet) + } else if id.starts_with("claude-3-7-sonnet") { + Ok(Self::Claude3_7Sonnet) } else if id.starts_with("claude-3-5-haiku") { Ok(Self::Claude3_5Haiku) } else if id.starts_with("claude-3-opus") { @@ -75,6 +79,7 @@ impl Model { pub fn id(&self) -> &str { match self { Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest", + Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest", Model::Claude3_5Haiku => "claude-3-5-haiku-latest", Model::Claude3Opus => "claude-3-opus-latest", Model::Claude3Sonnet => "claude-3-sonnet-20240229", @@ -85,6 +90,7 @@ impl Model { pub fn display_name(&self) -> &str { match self { + Self::Claude3_7Sonnet => "Claude 3.7 Sonnet", Self::Claude3_5Sonnet => "Claude 3.5 Sonnet", Self::Claude3_5Haiku => "Claude 3.5 Haiku", Self::Claude3Opus => "Claude 3 Opus", @@ -98,13 +104,14 @@ impl Model { pub fn cache_configuration(&self) -> Option { match self { - Self::Claude3_5Sonnet | Self::Claude3_5Haiku | Self::Claude3Haiku => { - Some(AnthropicModelCacheConfiguration { - min_total_token: 2_048, - should_speculate: true, - max_cache_anchors: 4, - }) - } + Self::Claude3_5Sonnet + | Self::Claude3_5Haiku + | Self::Claude3_7Sonnet + | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration { + min_total_token: 2_048, + should_speculate: true, + max_cache_anchors: 4, + }), Self::Custom { cache_configuration, .. @@ -117,6 +124,7 @@ impl Model { match self { Self::Claude3_5Sonnet | Self::Claude3_5Haiku + | Self::Claude3_7Sonnet | Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 200_000, @@ -127,7 +135,7 @@ impl Model { pub fn max_output_tokens(&self) -> u32 { match self { Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096, - Self::Claude3_5Sonnet | Self::Claude3_5Haiku => 8_192, + Self::Claude3_5Sonnet | Self::Claude3_7Sonnet | Self::Claude3_5Haiku => 8_192, Self::Custom { max_output_tokens, .. } => max_output_tokens.unwrap_or(4_096), @@ -137,6 +145,7 @@ impl Model { pub fn default_temperature(&self) -> f32 { match self { Self::Claude3_5Sonnet + | Self::Claude3_7Sonnet | Self::Claude3_5Haiku | Self::Claude3Opus | Self::Claude3Sonnet diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index 1bb7cbc1ae1163..7df6ff10ac4295 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -43,7 +43,6 @@ indoc.workspace = true language.workspace = true language_model.workspace = true language_model_selector.workspace = true -language_models.workspace = true log.workspace = true lsp.workspace = true menu.workspace = true diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 7817c958b395c8..3c2c1db9974541 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -33,7 +33,7 @@ actions!( [ InsertActivePrompt, DeployHistory, - NewContext, + NewChat, CycleNextInlineAssist, CyclePreviousInlineAssist ] diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index e0791e003937ac..d40640246d6c23 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -1,6 +1,6 @@ use crate::assistant_configuration::{ConfigurationView, ConfigurationViewEvent}; use crate::{ - terminal_inline_assistant::TerminalInlineAssistant, DeployHistory, InlineAssistant, NewContext, + terminal_inline_assistant::TerminalInlineAssistant, DeployHistory, InlineAssistant, NewChat, }; use anyhow::{anyhow, Result}; use assistant_context_editor::{ @@ -129,7 +129,7 @@ impl AssistantPanel { workspace.project().clone(), Default::default(), None, - NewContext.boxed_clone(), + NewChat.boxed_clone(), window, cx, ); @@ -228,12 +228,12 @@ impl AssistantPanel { IconButton::new("new-chat", IconName::Plus) .icon_size(IconSize::Small) .on_click(cx.listener(|_, _, window, cx| { - window.dispatch_action(NewContext.boxed_clone(), cx) + window.dispatch_action(NewChat.boxed_clone(), cx) })) .tooltip(move |window, cx| { Tooltip::for_action_in( "New Chat", - &NewContext, + &NewChat, &focus_handle, window, cx, @@ -256,7 +256,7 @@ impl AssistantPanel { let focus_handle = _pane.focus_handle(cx); Some(ContextMenu::build(window, cx, move |menu, _, _| { menu.context(focus_handle.clone()) - .action("New Chat", Box::new(NewContext)) + .action("New Chat", Box::new(NewChat)) .action("History", Box::new(DeployHistory)) .action("Prompt Library", Box::new(DeployPromptLibrary)) .action("Configure", Box::new(ShowConfiguration)) @@ -760,7 +760,7 @@ impl AssistantPanel { pub fn create_new_context( workspace: &mut Workspace, - _: &NewContext, + _: &NewChat, window: &mut Window, cx: &mut Context, ) { @@ -1206,7 +1206,7 @@ impl Render for AssistantPanel { v_flex() .key_context("AssistantPanel") .size_full() - .on_action(cx.listener(|this, _: &NewContext, window, cx| { + .on_action(cx.listener(|this, _: &NewChat, window, cx| { this.new_context(window, cx); })) .on_action(cx.listener(|this, _: &ShowConfiguration, window, cx| { diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index eb154ea0209a9c..02ae9a2333d502 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -32,11 +32,10 @@ use gpui::{ }; use language::{line_diff, Buffer, IndentKind, Point, Selection, TransactionId}; use language_model::{ - LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelTextStream, Role, + report_assistant_event, LanguageModel, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelTextStream, Role, }; use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu}; -use language_models::report_assistant_event; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; use project::{CodeAction, ProjectTransaction}; diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index e8b049371b2c1f..87b5f4bbda654e 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -16,10 +16,10 @@ use gpui::{ }; use language::Buffer; use language_model::{ - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + report_assistant_event, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, }; use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu}; -use language_models::report_assistant_event; use prompt_library::PromptBuilder; use settings::{update_settings_file, Settings}; use std::{ diff --git a/crates/assistant2/Cargo.toml b/crates/assistant2/Cargo.toml index 9a74a5e2fec07b..13116c2ab5f022 100644 --- a/crates/assistant2/Cargo.toml +++ b/crates/assistant2/Cargo.toml @@ -46,7 +46,6 @@ itertools.workspace = true language.workspace = true language_model.workspace = true language_model_selector.workspace = true -language_models.workspace = true log.workspace = true lsp.workspace = true markdown.workspace = true diff --git a/crates/assistant2/src/buffer_codegen.rs b/crates/assistant2/src/buffer_codegen.rs index 4e62f9549dc820..a07b288f71dd26 100644 --- a/crates/assistant2/src/buffer_codegen.rs +++ b/crates/assistant2/src/buffer_codegen.rs @@ -9,10 +9,9 @@ use futures::{channel::mpsc, future::LocalBoxFuture, join, SinkExt, Stream, Stre use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription, Task}; use language::{line_diff, Buffer, IndentKind, Point, TransactionId}; use language_model::{ - LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelTextStream, Role, + report_assistant_event, LanguageModel, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelTextStream, Role, }; -use language_models::report_assistant_event; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; use prompt_library::PromptBuilder; diff --git a/crates/assistant2/src/inline_assistant.rs b/crates/assistant2/src/inline_assistant.rs index c307b47cb14294..a7f7b1601de030 100644 --- a/crates/assistant2/src/inline_assistant.rs +++ b/crates/assistant2/src/inline_assistant.rs @@ -24,8 +24,7 @@ use gpui::{ UpdateGlobal, WeakEntity, Window, }; use language::{Buffer, Point, Selection, TransactionId}; -use language_model::LanguageModelRegistry; -use language_models::report_assistant_event; +use language_model::{report_assistant_event, LanguageModelRegistry}; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; use project::{CodeAction, ProjectTransaction}; diff --git a/crates/assistant2/src/terminal_codegen.rs b/crates/assistant2/src/terminal_codegen.rs index c9b6a541080449..5a1873fe556dfa 100644 --- a/crates/assistant2/src/terminal_codegen.rs +++ b/crates/assistant2/src/terminal_codegen.rs @@ -2,8 +2,7 @@ use crate::inline_prompt_editor::CodegenStatus; use client::telemetry::Telemetry; use futures::{channel::mpsc, SinkExt, StreamExt}; use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task}; -use language_model::{LanguageModelRegistry, LanguageModelRequest}; -use language_models::report_assistant_event; +use language_model::{report_assistant_event, LanguageModelRegistry, LanguageModelRequest}; use std::{sync::Arc, time::Instant}; use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; use terminal::Terminal; diff --git a/crates/assistant2/src/terminal_inline_assistant.rs b/crates/assistant2/src/terminal_inline_assistant.rs index 9abe2cbadb3338..788ccc9ecacc8e 100644 --- a/crates/assistant2/src/terminal_inline_assistant.rs +++ b/crates/assistant2/src/terminal_inline_assistant.rs @@ -13,9 +13,9 @@ use fs::Fs; use gpui::{App, Entity, Focusable, Global, Subscription, UpdateGlobal, WeakEntity}; use language::Buffer; use language_model::{ - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + report_assistant_event, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, }; -use language_models::report_assistant_event; use prompt_library::PromptBuilder; use std::sync::Arc; use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; diff --git a/crates/assistant2/src/thread.rs b/crates/assistant2/src/thread.rs index 9ccb0664807bf5..7eeb13f8726e42 100644 --- a/crates/assistant2/src/thread.rs +++ b/crates/assistant2/src/thread.rs @@ -10,9 +10,9 @@ use gpui::{App, Context, EventEmitter, SharedString, Task}; use language_model::{ LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolUse, - LanguageModelToolUseId, MessageContent, Role, StopReason, + LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError, + Role, StopReason, }; -use language_models::provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError}; use serde::{Deserialize, Serialize}; use util::{post_inc, TryFutureExt as _}; use uuid::Uuid; diff --git a/crates/assistant_context_editor/Cargo.toml b/crates/assistant_context_editor/Cargo.toml index aebadc4ca9d029..0feb0543bf85f0 100644 --- a/crates/assistant_context_editor/Cargo.toml +++ b/crates/assistant_context_editor/Cargo.toml @@ -30,7 +30,6 @@ indexed_docs.workspace = true language.workspace = true language_model.workspace = true language_model_selector.workspace = true -language_models.workspace = true log.workspace = true multi_buffer.workspace = true open_ai.workspace = true diff --git a/crates/assistant_context_editor/src/context.rs b/crates/assistant_context_editor/src/context.rs index 7b72c4c04de8c4..d6447572bc7e03 100644 --- a/crates/assistant_context_editor/src/context.rs +++ b/crates/assistant_context_editor/src/context.rs @@ -19,13 +19,10 @@ use gpui::{ }; use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset}; use language_model::{ - LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent, - LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelToolUseId, MessageContent, Role, StopReason, -}; -use language_models::{ - provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError}, - report_assistant_event, + report_assistant_event, LanguageModel, LanguageModelCacheConfiguration, + LanguageModelCompletionEvent, LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelToolUseId, MaxMonthlySpendReachedError, + MessageContent, PaymentRequiredError, Role, StopReason, }; use open_ai::Model as OpenAiModel; use paths::contexts_dir; diff --git a/crates/assistant_context_editor/src/context_editor.rs b/crates/assistant_context_editor/src/context_editor.rs index e3be2eec77f98d..12a522f56a36a1 100644 --- a/crates/assistant_context_editor/src/context_editor.rs +++ b/crates/assistant_context_editor/src/context_editor.rs @@ -1234,8 +1234,8 @@ impl ContextEditor { .px_1() .mr_0p5() .border_1() - .border_color(theme::color_alpha(colors.border_variant, 0.6)) - .bg(theme::color_alpha(colors.element_background, 0.6)) + .border_color(colors.border_variant.alpha(0.6)) + .bg(colors.element_background.alpha(0.6)) .child("esc"), ) .child("to cancel") @@ -1514,15 +1514,11 @@ impl ContextEditor { (!text.is_empty()).then_some((text, true)) } else { - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); + let selection = context_editor.selections.newest_adjusted(cx); + let buffer = context_editor.buffer().read(cx).snapshot(cx); + let selected_text = buffer.text_for_range(selection.range()).collect::(); - (!text.is_empty()).then_some((text, false)) + (!selected_text.is_empty()).then_some((selected_text, false)) } }) } @@ -1777,23 +1773,16 @@ impl ContextEditor { &mut self, cx: &mut Context, ) -> (String, CopyMetadata, Vec>) { - let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| { - let mut selection = editor.selections.newest::(cx); + let (selection, creases) = self.editor.update(cx, |editor, cx| { + let mut selection = editor.selections.newest_adjusted(cx); let snapshot = editor.buffer().read(cx).snapshot(cx); - let is_entire_line = selection.is_empty() || editor.selections.line_mode; - if is_entire_line { - selection.start = Point::new(selection.start.row, 0); - selection.end = - cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0)); - selection.goal = SelectionGoal::None; - } + selection.goal = SelectionGoal::None; let selection_start = snapshot.point_to_offset(selection.start); ( - snapshot.clone(), - selection.clone(), + selection.map(|point| snapshot.point_to_offset(point)), editor.display_map.update(cx, |display_map, cx| { display_map .snapshot(cx) @@ -1833,7 +1822,6 @@ impl ContextEditor { ) }); - let selection = selection.map(|point| snapshot.point_to_offset(point)); let context = self.context.read(cx); let mut text = String::new(); diff --git a/crates/assistant_settings/src/assistant_settings.rs b/crates/assistant_settings/src/assistant_settings.rs index 5e044282b07b49..d12f4a23f05fe3 100644 --- a/crates/assistant_settings/src/assistant_settings.rs +++ b/crates/assistant_settings/src/assistant_settings.rs @@ -359,6 +359,7 @@ fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema: schemars::schema::SchemaObject { enum_values: Some(vec![ "anthropic".into(), + "bedrock".into(), "google".into(), "lmstudio".into(), "ollama".into(), diff --git a/crates/assistant_tools/src/now_tool.rs b/crates/assistant_tools/src/now_tool.rs index b9d22b66b48c95..4e4e7228922e0e 100644 --- a/crates/assistant_tools/src/now_tool.rs +++ b/crates/assistant_tools/src/now_tool.rs @@ -17,7 +17,7 @@ pub enum Timezone { } #[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct FileToolInput { +pub struct NowToolInput { /// The timezone to use for the datetime. timezone: Timezone, } @@ -34,7 +34,7 @@ impl Tool for NowTool { } fn input_schema(&self) -> serde_json::Value { - let schema = schemars::schema_for!(FileToolInput); + let schema = schemars::schema_for!(NowToolInput); serde_json::to_value(&schema).unwrap() } @@ -45,7 +45,7 @@ impl Tool for NowTool { _window: &mut Window, _cx: &mut App, ) -> Task> { - let input: FileToolInput = match serde_json::from_value(input) { + let input: NowToolInput = match serde_json::from_value(input) { Ok(input) => input, Err(err) => return Task::ready(Err(anyhow!(err))), }; diff --git a/crates/aws_http_client/Cargo.toml b/crates/aws_http_client/Cargo.toml new file mode 100644 index 00000000000000..8715fe1b56de18 --- /dev/null +++ b/crates/aws_http_client/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "aws_http_client" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/aws_http_client.rs" + +[features] +default = [] + +[dependencies] +aws-smithy-runtime-api.workspace = true +aws-smithy-types.workspace = true +futures.workspace = true +http_client.workspace = true +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } diff --git a/crates/aws_http_client/LICENSE-GPL b/crates/aws_http_client/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/aws_http_client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/aws_http_client/src/aws_http_client.rs b/crates/aws_http_client/src/aws_http_client.rs new file mode 100644 index 00000000000000..f99280658131e2 --- /dev/null +++ b/crates/aws_http_client/src/aws_http_client.rs @@ -0,0 +1,118 @@ +use std::fmt; +use std::sync::Arc; + +use aws_smithy_runtime_api::client::http::{ + HttpClient as AwsClient, HttpConnector as AwsConnector, + HttpConnectorFuture as AwsConnectorFuture, HttpConnectorFuture, HttpConnectorSettings, + SharedHttpConnector, +}; +use aws_smithy_runtime_api::client::orchestrator::{HttpRequest as AwsHttpRequest, HttpResponse}; +use aws_smithy_runtime_api::client::result::ConnectorError; +use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents; +use aws_smithy_runtime_api::http::StatusCode; +use aws_smithy_types::body::SdkBody; +use futures::AsyncReadExt; +use http_client::{AsyncBody, Inner}; +use http_client::{HttpClient, Request}; +use tokio::runtime::Handle; + +struct AwsHttpConnector { + client: Arc, + handle: Handle, +} + +impl std::fmt::Debug for AwsHttpConnector { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AwsHttpConnector").finish() + } +} + +impl AwsConnector for AwsHttpConnector { + fn call(&self, request: AwsHttpRequest) -> AwsConnectorFuture { + let req = match request.try_into_http1x() { + Ok(req) => req, + Err(err) => { + return HttpConnectorFuture::ready(Err(ConnectorError::other(err.into(), None))) + } + }; + + let (parts, body) = req.into_parts(); + + let response = self + .client + .send(Request::from_parts(parts, convert_to_async_body(body))); + + let handle = self.handle.clone(); + + HttpConnectorFuture::new(async move { + let response = match response.await { + Ok(response) => response, + Err(err) => return Err(ConnectorError::other(err.into(), None)), + }; + let (parts, body) = response.into_parts(); + let body = convert_to_sdk_body(body, handle).await; + + Ok(HttpResponse::new( + StatusCode::try_from(parts.status.as_u16()).unwrap(), + body, + )) + }) + } +} + +#[derive(Clone)] +pub struct AwsHttpClient { + client: Arc, + handler: Handle, +} + +impl std::fmt::Debug for AwsHttpClient { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AwsHttpClient").finish() + } +} + +impl AwsHttpClient { + pub fn new(client: Arc, handle: Handle) -> Self { + Self { + client, + handler: handle, + } + } +} + +impl AwsClient for AwsHttpClient { + fn http_connector( + &self, + _settings: &HttpConnectorSettings, + _components: &RuntimeComponents, + ) -> SharedHttpConnector { + SharedHttpConnector::new(AwsHttpConnector { + client: self.client.clone(), + handle: self.handler.clone(), + }) + } +} + +pub async fn convert_to_sdk_body(body: AsyncBody, handle: Handle) -> SdkBody { + match body.0 { + Inner::Empty => SdkBody::empty(), + Inner::Bytes(bytes) => SdkBody::from(bytes.into_inner()), + Inner::AsyncReader(mut reader) => { + let buffer = handle.spawn(async move { + let mut buffer = Vec::new(); + let _ = reader.read_to_end(&mut buffer).await; + buffer + }); + + SdkBody::from(buffer.await.unwrap_or_default()) + } + } +} + +pub fn convert_to_async_body(body: SdkBody) -> AsyncBody { + match body.bytes() { + Some(bytes) => AsyncBody::from((*bytes).to_vec()), + None => AsyncBody::empty(), + } +} diff --git a/crates/bedrock/Cargo.toml b/crates/bedrock/Cargo.toml new file mode 100644 index 00000000000000..e99f7e2cf08327 --- /dev/null +++ b/crates/bedrock/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "bedrock" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/bedrock.rs" + +[features] +default = [] +schemars = ["dep:schemars"] + +[dependencies] +anyhow.workspace = true +aws-sdk-bedrockruntime = { workspace = true, features = ["behavior-version-latest"] } +aws-smithy-types = {workspace = true} +futures.workspace = true +schemars = { workspace = true, optional = true } +serde.workspace = true +serde_json.workspace = true +strum.workspace = true +thiserror.workspace = true +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } diff --git a/crates/bedrock/LICENSE-GPL b/crates/bedrock/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/bedrock/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/bedrock/src/bedrock.rs b/crates/bedrock/src/bedrock.rs new file mode 100644 index 00000000000000..fa17bc03834941 --- /dev/null +++ b/crates/bedrock/src/bedrock.rs @@ -0,0 +1,166 @@ +mod models; + +use std::pin::Pin; + +use anyhow::{anyhow, Context, Error, Result}; +use aws_sdk_bedrockruntime as bedrock; +pub use aws_sdk_bedrockruntime as bedrock_client; +pub use aws_sdk_bedrockruntime::types::{ + ContentBlock as BedrockInnerContent, SpecificToolChoice as BedrockSpecificTool, + ToolChoice as BedrockToolChoice, ToolInputSchema as BedrockToolInputSchema, + ToolSpecification as BedrockTool, +}; +use aws_smithy_types::{Document, Number as AwsNumber}; +pub use bedrock::operation::converse_stream::ConverseStreamInput as BedrockStreamingRequest; +pub use bedrock::types::{ + ContentBlock as BedrockRequestContent, ConversationRole as BedrockRole, + ConverseOutput as BedrockResponse, ConverseStreamOutput as BedrockStreamingResponse, + Message as BedrockMessage, ResponseStream as BedrockResponseStream, +}; +use futures::stream::{self, BoxStream, Stream}; +use serde::{Deserialize, Serialize}; +use serde_json::{Number, Value}; +use thiserror::Error; + +pub use crate::models::*; + +pub async fn complete( + client: &bedrock::Client, + request: Request, +) -> Result { + let response = bedrock::Client::converse(client) + .model_id(request.model.clone()) + .set_messages(request.messages.into()) + .send() + .await + .context("failed to send request to Bedrock"); + + match response { + Ok(output) => output + .output + .ok_or_else(|| BedrockError::Other(anyhow!("no output"))), + Err(err) => Err(BedrockError::Other(err)), + } +} + +pub async fn stream_completion( + client: bedrock::Client, + request: Request, + handle: tokio::runtime::Handle, +) -> Result>, Error> { + handle + .spawn(async move { + let response = bedrock::Client::converse_stream(&client) + .model_id(request.model.clone()) + .set_messages(request.messages.into()) + .send() + .await; + + match response { + Ok(output) => { + let stream: Pin< + Box< + dyn Stream> + + Send, + >, + > = Box::pin(stream::unfold(output.stream, |mut stream| async move { + match stream.recv().await { + Ok(Some(output)) => Some((Ok(output), stream)), + Ok(None) => None, + Err(err) => { + Some(( + // TODO: Figure out how we can capture Throttling Exceptions + Err(BedrockError::ClientError(anyhow!( + "{:?}", + aws_sdk_bedrockruntime::error::DisplayErrorContext(err) + ))), + stream, + )) + } + } + })); + Ok(stream) + } + Err(err) => Err(anyhow!( + "{:?}", + aws_sdk_bedrockruntime::error::DisplayErrorContext(err) + )), + } + }) + .await + .map_err(|err| anyhow!("failed to spawn task: {err:?}"))? +} + +pub fn aws_document_to_value(document: &Document) -> Value { + match document { + Document::Null => Value::Null, + Document::Bool(value) => Value::Bool(*value), + Document::Number(value) => match *value { + AwsNumber::PosInt(value) => Value::Number(Number::from(value)), + AwsNumber::NegInt(value) => Value::Number(Number::from(value)), + AwsNumber::Float(value) => Value::Number(Number::from_f64(value).unwrap()), + }, + Document::String(value) => Value::String(value.clone()), + Document::Array(array) => Value::Array(array.iter().map(aws_document_to_value).collect()), + Document::Object(map) => Value::Object( + map.iter() + .map(|(key, value)| (key.clone(), aws_document_to_value(value))) + .collect(), + ), + } +} + +pub fn value_to_aws_document(value: &Value) -> Document { + match value { + Value::Null => Document::Null, + Value::Bool(value) => Document::Bool(*value), + Value::Number(value) => { + if let Some(value) = value.as_u64() { + Document::Number(AwsNumber::PosInt(value)) + } else if let Some(value) = value.as_i64() { + Document::Number(AwsNumber::NegInt(value)) + } else if let Some(value) = value.as_f64() { + Document::Number(AwsNumber::Float(value)) + } else { + Document::Null + } + } + Value::String(value) => Document::String(value.clone()), + Value::Array(array) => Document::Array(array.iter().map(value_to_aws_document).collect()), + Value::Object(map) => Document::Object( + map.iter() + .map(|(key, value)| (key.clone(), value_to_aws_document(value))) + .collect(), + ), + } +} + +#[derive(Debug)] +pub struct Request { + pub model: String, + pub max_tokens: u32, + pub messages: Vec, + pub tools: Vec, + pub tool_choice: Option, + pub system: Option, + pub metadata: Option, + pub stop_sequences: Vec, + pub temperature: Option, + pub top_k: Option, + pub top_p: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Metadata { + pub user_id: Option, +} + +#[derive(Error, Debug)] +pub enum BedrockError { + #[error("client error: {0}")] + ClientError(anyhow::Error), + #[error("extension error: {0}")] + ExtensionError(anyhow::Error), + #[error(transparent)] + Other(#[from] anyhow::Error), +} diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs new file mode 100644 index 00000000000000..a8d0614e5d21b2 --- /dev/null +++ b/crates/bedrock/src/models.rs @@ -0,0 +1,199 @@ +use anyhow::anyhow; +use serde::{Deserialize, Serialize}; +use strum::EnumIter; + +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] +pub enum Model { + // Anthropic models (already included) + #[default] + #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")] + Claude3_5Sonnet, + #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")] + Claude3Opus, + #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")] + Claude3Sonnet, + #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")] + Claude3_5Haiku, + // Amazon Nova Models + AmazonNovaLite, + AmazonNovaMicro, + AmazonNovaPro, + // AI21 models + AI21J2GrandeInstruct, + AI21J2JumboInstruct, + AI21J2Mid, + AI21J2MidV1, + AI21J2Ultra, + AI21J2UltraV1_8k, + AI21J2UltraV1, + AI21JambaInstructV1, + AI21Jamba15LargeV1, + AI21Jamba15MiniV1, + // Cohere models + CohereCommandTextV14_4k, + CohereCommandRV1, + CohereCommandRPlusV1, + CohereCommandLightTextV14_4k, + // Meta models + MetaLlama38BInstructV1, + MetaLlama370BInstructV1, + MetaLlama318BInstructV1_128k, + MetaLlama318BInstructV1, + MetaLlama3170BInstructV1_128k, + MetaLlama3170BInstructV1, + MetaLlama3211BInstructV1, + MetaLlama3290BInstructV1, + MetaLlama321BInstructV1, + MetaLlama323BInstructV1, + // Mistral models + MistralMistral7BInstructV0, + MistralMixtral8x7BInstructV0, + MistralMistralLarge2402V1, + MistralMistralSmall2402V1, + #[serde(rename = "custom")] + Custom { + name: String, + max_tokens: usize, + /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + display_name: Option, + max_output_tokens: Option, + default_temperature: Option, + }, +} + +impl Model { + pub fn from_id(id: &str) -> anyhow::Result { + if id.starts_with("claude-3-5-sonnet") { + Ok(Self::Claude3_5Sonnet) + } else if id.starts_with("claude-3-opus") { + Ok(Self::Claude3Opus) + } else if id.starts_with("claude-3-sonnet") { + Ok(Self::Claude3Sonnet) + } else if id.starts_with("claude-3-5-haiku") { + Ok(Self::Claude3_5Haiku) + } else { + Err(anyhow!("invalid model id")) + } + } + + pub fn id(&self) -> &str { + match self { + Model::Claude3_5Sonnet => "us.anthropic.claude-3-5-sonnet-20241022-v2:0", + Model::Claude3Opus => "us.anthropic.claude-3-opus-20240229-v1:0", + Model::Claude3Sonnet => "us.anthropic.claude-3-sonnet-20240229-v1:0", + Model::Claude3_5Haiku => "us.anthropic.claude-3-5-haiku-20241022-v1:0", + Model::AmazonNovaLite => "us.amazon.nova-lite-v1:0", + Model::AmazonNovaMicro => "us.amazon.nova-micro-v1:0", + Model::AmazonNovaPro => "us.amazon.nova-pro-v1:0", + Model::AI21J2GrandeInstruct => "ai21.j2-grande-instruct", + Model::AI21J2JumboInstruct => "ai21.j2-jumbo-instruct", + Model::AI21J2Mid => "ai21.j2-mid", + Model::AI21J2MidV1 => "ai21.j2-mid-v1", + Model::AI21J2Ultra => "ai21.j2-ultra", + Model::AI21J2UltraV1_8k => "ai21.j2-ultra-v1:0:8k", + Model::AI21J2UltraV1 => "ai21.j2-ultra-v1", + Model::AI21JambaInstructV1 => "ai21.jamba-instruct-v1:0", + Model::AI21Jamba15LargeV1 => "ai21.jamba-1-5-large-v1:0", + Model::AI21Jamba15MiniV1 => "ai21.jamba-1-5-mini-v1:0", + Model::CohereCommandTextV14_4k => "cohere.command-text-v14:7:4k", + Model::CohereCommandRV1 => "cohere.command-r-v1:0", + Model::CohereCommandRPlusV1 => "cohere.command-r-plus-v1:0", + Model::CohereCommandLightTextV14_4k => "cohere.command-light-text-v14:7:4k", + Model::MetaLlama38BInstructV1 => "meta.llama3-8b-instruct-v1:0", + Model::MetaLlama370BInstructV1 => "meta.llama3-70b-instruct-v1:0", + Model::MetaLlama318BInstructV1_128k => "meta.llama3-1-8b-instruct-v1:0:128k", + Model::MetaLlama318BInstructV1 => "meta.llama3-1-8b-instruct-v1:0", + Model::MetaLlama3170BInstructV1_128k => "meta.llama3-1-70b-instruct-v1:0:128k", + Model::MetaLlama3170BInstructV1 => "meta.llama3-1-70b-instruct-v1:0", + Model::MetaLlama3211BInstructV1 => "meta.llama3-2-11b-instruct-v1:0", + Model::MetaLlama3290BInstructV1 => "meta.llama3-2-90b-instruct-v1:0", + Model::MetaLlama321BInstructV1 => "meta.llama3-2-1b-instruct-v1:0", + Model::MetaLlama323BInstructV1 => "meta.llama3-2-3b-instruct-v1:0", + Model::MistralMistral7BInstructV0 => "mistral.mistral-7b-instruct-v0:2", + Model::MistralMixtral8x7BInstructV0 => "mistral.mixtral-8x7b-instruct-v0:1", + Model::MistralMistralLarge2402V1 => "mistral.mistral-large-2402-v1:0", + Model::MistralMistralSmall2402V1 => "mistral.mistral-small-2402-v1:0", + Self::Custom { name, .. } => name, + } + } + + pub fn display_name(&self) -> &str { + match self { + Self::Claude3_5Sonnet => "Claude 3.5 Sonnet", + Self::Claude3Opus => "Claude 3 Opus", + Self::Claude3Sonnet => "Claude 3 Sonnet", + Self::Claude3_5Haiku => "Claude 3.5 Haiku", + Self::AmazonNovaLite => "Amazon Nova Lite", + Self::AmazonNovaMicro => "Amazon Nova Micro", + Self::AmazonNovaPro => "Amazon Nova Pro", + Self::AI21J2GrandeInstruct => "AI21 Jurassic2 Grande Instruct", + Self::AI21J2JumboInstruct => "AI21 Jurassic2 Jumbo Instruct", + Self::AI21J2Mid => "AI21 Jurassic2 Mid", + Self::AI21J2MidV1 => "AI21 Jurassic2 Mid V1", + Self::AI21J2Ultra => "AI21 Jurassic2 Ultra", + Self::AI21J2UltraV1_8k => "AI21 Jurassic2 Ultra V1 8K", + Self::AI21J2UltraV1 => "AI21 Jurassic2 Ultra V1", + Self::AI21JambaInstructV1 => "AI21 Jamba Instruct", + Self::AI21Jamba15LargeV1 => "AI21 Jamba 1.5 Large", + Self::AI21Jamba15MiniV1 => "AI21 Jamba 1.5 Mini", + Self::CohereCommandTextV14_4k => "Cohere Command Text V14 4K", + Self::CohereCommandRV1 => "Cohere Command R V1", + Self::CohereCommandRPlusV1 => "Cohere Command R Plus V1", + Self::CohereCommandLightTextV14_4k => "Cohere Command Light Text V14 4K", + Self::MetaLlama38BInstructV1 => "Meta Llama 3 8B Instruct V1", + Self::MetaLlama370BInstructV1 => "Meta Llama 3 70B Instruct V1", + Self::MetaLlama318BInstructV1_128k => "Meta Llama 3 1.8B Instruct V1 128K", + Self::MetaLlama318BInstructV1 => "Meta Llama 3 1.8B Instruct V1", + Self::MetaLlama3170BInstructV1_128k => "Meta Llama 3 1 70B Instruct V1 128K", + Self::MetaLlama3170BInstructV1 => "Meta Llama 3 1 70B Instruct V1", + Self::MetaLlama3211BInstructV1 => "Meta Llama 3 2 11B Instruct V1", + Self::MetaLlama3290BInstructV1 => "Meta Llama 3 2 90B Instruct V1", + Self::MetaLlama321BInstructV1 => "Meta Llama 3 2 1B Instruct V1", + Self::MetaLlama323BInstructV1 => "Meta Llama 3 2 3B Instruct V1", + Self::MistralMistral7BInstructV0 => "Mistral 7B Instruct V0", + Self::MistralMixtral8x7BInstructV0 => "Mistral Mixtral 8x7B Instruct V0", + Self::MistralMistralLarge2402V1 => "Mistral Large 2402 V1", + Self::MistralMistralSmall2402V1 => "Mistral Small 2402 V1", + Self::Custom { + display_name, name, .. + } => display_name.as_deref().unwrap_or(name), + } + } + + pub fn max_token_count(&self) -> usize { + match self { + Self::Claude3_5Sonnet + | Self::Claude3Opus + | Self::Claude3Sonnet + | Self::Claude3_5Haiku => 200_000, + Self::Custom { max_tokens, .. } => *max_tokens, + _ => 200_000, + } + } + + pub fn max_output_tokens(&self) -> u32 { + match self { + Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096, + Self::Claude3_5Sonnet => 8_192, + Self::Custom { + max_output_tokens, .. + } => max_output_tokens.unwrap_or(4_096), + _ => 4_096, + } + } + + pub fn default_temperature(&self) -> f32 { + match self { + Self::Claude3_5Sonnet + | Self::Claude3Opus + | Self::Claude3Sonnet + | Self::Claude3_5Haiku => 1.0, + Self::Custom { + default_temperature, + .. + } => default_temperature.unwrap_or(1.0), + _ => 1.0, + } + } +} diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 7223bb7086bbf3..cc1767b4cbb553 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -3,7 +3,8 @@ use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter}; use language::{Language, LanguageRegistry}; use rope::Rope; -use std::{cmp, future::Future, iter, ops::Range, sync::Arc}; +use std::cmp::Ordering; +use std::{future::Future, iter, ops::Range, sync::Arc}; use sum_tree::SumTree; use text::ToOffset as _; use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point}; @@ -68,7 +69,6 @@ pub struct DiffHunk { /// The range in the buffer's diff base text to which this hunk corresponds. pub diff_base_byte_range: Range, pub secondary_status: DiffHunkSecondaryStatus, - pub secondary_diff_base_byte_range: Option>, } /// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range. @@ -110,12 +110,17 @@ impl sum_tree::Summary for DiffHunkSummary { } impl<'a> sum_tree::SeekTarget<'a, DiffHunkSummary, DiffHunkSummary> for Anchor { - fn cmp( - &self, - cursor_location: &DiffHunkSummary, - buffer: &text::BufferSnapshot, - ) -> cmp::Ordering { - self.cmp(&cursor_location.buffer_range.end, buffer) + fn cmp(&self, cursor_location: &DiffHunkSummary, buffer: &text::BufferSnapshot) -> Ordering { + if self + .cmp(&cursor_location.buffer_range.start, buffer) + .is_lt() + { + Ordering::Less + } else if self.cmp(&cursor_location.buffer_range.end, buffer).is_gt() { + Ordering::Greater + } else { + Ordering::Equal + } } } @@ -171,97 +176,96 @@ impl BufferDiffSnapshot { } } - fn buffer_range_to_unchanged_diff_base_range( - &self, - buffer_range: Range, - buffer: &text::BufferSnapshot, - ) -> Option> { - let mut hunks = self.inner.hunks.iter(); - let mut start = 0; - let mut pos = buffer.anchor_before(0); - while let Some(hunk) = hunks.next() { - assert!(buffer_range.start.cmp(&pos, buffer).is_ge()); - assert!(hunk.buffer_range.start.cmp(&pos, buffer).is_ge()); - if hunk - .buffer_range - .start - .cmp(&buffer_range.end, buffer) - .is_ge() - { - // target buffer range is contained in the unchanged stretch leading up to this next hunk, - // so do a final adjustment based on that - break; - } - - // if the target buffer range intersects this hunk at all, no dice - if buffer_range - .start - .cmp(&hunk.buffer_range.end, buffer) - .is_lt() - { - return None; - } - - start += hunk.buffer_range.start.to_offset(buffer) - pos.to_offset(buffer); - start += hunk.diff_base_byte_range.end - hunk.diff_base_byte_range.start; - pos = hunk.buffer_range.end; - } - start += buffer_range.start.to_offset(buffer) - pos.to_offset(buffer); - let end = start + buffer_range.end.to_offset(buffer) - buffer_range.start.to_offset(buffer); - Some(start..end) - } - - pub fn secondary_edits_for_stage_or_unstage( + pub fn new_secondary_text_for_stage_or_unstage( &self, stage: bool, - hunks: impl Iterator, Option>, Range)>, + hunks: impl Iterator, Range)>, buffer: &text::BufferSnapshot, - ) -> Vec<(Range, String)> { - let Some(secondary_diff) = self.secondary_diff() else { - log::debug!("no secondary diff"); - return Vec::new(); + cx: &mut App, + ) -> Option { + let secondary_diff = self.secondary_diff()?; + let index_base = if let Some(index_base) = secondary_diff.base_text() { + index_base.text.as_rope().clone() + } else if stage { + Rope::from("") + } else { + return None; }; - let index_base = secondary_diff.base_text().map_or_else( - || Rope::from(""), - |snapshot| snapshot.text.as_rope().clone(), - ); let head_base = self.base_text().map_or_else( || Rope::from(""), |snapshot| snapshot.text.as_rope().clone(), ); - log::debug!("original: {:?}", index_base.to_string()); + + let mut secondary_cursor = secondary_diff.inner.hunks.cursor::(buffer); + secondary_cursor.next(buffer); let mut edits = Vec::new(); - for (diff_base_byte_range, secondary_diff_base_byte_range, buffer_range) in hunks { - let (index_byte_range, replacement_text) = if stage { + let mut prev_secondary_hunk_buffer_offset = 0; + let mut prev_secondary_hunk_base_text_offset = 0; + for (buffer_range, diff_base_byte_range) in hunks { + let skipped_hunks = secondary_cursor.slice(&buffer_range.start, Bias::Left, buffer); + + if let Some(secondary_hunk) = skipped_hunks.last() { + prev_secondary_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end; + prev_secondary_hunk_buffer_offset = + secondary_hunk.buffer_range.end.to_offset(buffer); + } + + let mut buffer_offset_range = buffer_range.to_offset(buffer); + let start_overshoot = buffer_offset_range.start - prev_secondary_hunk_buffer_offset; + let mut secondary_base_text_start = + prev_secondary_hunk_base_text_offset + start_overshoot; + + while let Some(secondary_hunk) = secondary_cursor.item().filter(|item| { + item.buffer_range + .start + .cmp(&buffer_range.end, buffer) + .is_le() + }) { + let secondary_hunk_offset_range = secondary_hunk.buffer_range.to_offset(buffer); + prev_secondary_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end; + prev_secondary_hunk_buffer_offset = secondary_hunk_offset_range.end; + + secondary_base_text_start = + secondary_base_text_start.min(secondary_hunk.diff_base_byte_range.start); + buffer_offset_range.start = buffer_offset_range + .start + .min(secondary_hunk_offset_range.start); + + secondary_cursor.next(buffer); + } + + let end_overshoot = buffer_offset_range + .end + .saturating_sub(prev_secondary_hunk_buffer_offset); + let secondary_base_text_end = prev_secondary_hunk_base_text_offset + end_overshoot; + + let secondary_base_text_range = secondary_base_text_start..secondary_base_text_end; + buffer_offset_range.end = buffer_offset_range + .end + .max(prev_secondary_hunk_buffer_offset); + + let replacement_text = if stage { log::debug!("staging"); - let mut replacement_text = String::new(); - let Some(index_byte_range) = secondary_diff_base_byte_range.clone() else { - log::debug!("not a stageable hunk"); - continue; - }; - log::debug!("using {:?}", index_byte_range); - for chunk in buffer.text_for_range(buffer_range.clone()) { - replacement_text.push_str(chunk); - } - (index_byte_range, replacement_text) + buffer + .text_for_range(buffer_offset_range) + .collect::() } else { log::debug!("unstaging"); - let mut replacement_text = String::new(); - let Some(index_byte_range) = secondary_diff - .buffer_range_to_unchanged_diff_base_range(buffer_range.clone(), &buffer) - else { - log::debug!("not an unstageable hunk"); - continue; - }; - for chunk in head_base.chunks_in_range(diff_base_byte_range.clone()) { - replacement_text.push_str(chunk); - } - (index_byte_range, replacement_text) + head_base + .chunks_in_range(diff_base_byte_range.clone()) + .collect::() }; - edits.push((index_byte_range, replacement_text)); + edits.push((secondary_base_text_range, replacement_text)); } - log::debug!("edits: {edits:?}"); - edits + + let buffer = cx.new(|cx| { + language::Buffer::local_normalized(index_base, text::LineEnding::default(), cx) + }); + let new_text = buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + buffer.as_rope().clone() + }); + Some(new_text) } } @@ -322,13 +326,12 @@ impl BufferDiffInner { } let mut secondary_status = DiffHunkSecondaryStatus::None; - let mut secondary_diff_base_byte_range = None; if let Some(secondary_cursor) = secondary_cursor.as_mut() { if start_anchor .cmp(&secondary_cursor.start().buffer_range.start, buffer) .is_gt() { - secondary_cursor.seek_forward(&end_anchor, Bias::Left, buffer); + secondary_cursor.seek_forward(&start_anchor, Bias::Left, buffer); } if let Some(secondary_hunk) = secondary_cursor.item() { @@ -339,12 +342,12 @@ impl BufferDiffInner { } if secondary_range == (start_point..end_point) { secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk; - secondary_diff_base_byte_range = - Some(secondary_hunk.diff_base_byte_range.clone()); } else if secondary_range.start <= end_point { secondary_status = DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk; } } + } else { + log::debug!("no secondary cursor!!"); } return Some(DiffHunk { @@ -352,7 +355,6 @@ impl BufferDiffInner { diff_base_byte_range: start_base..end_base, buffer_range: start_anchor..end_anchor, secondary_status, - secondary_diff_base_byte_range, }); }) } @@ -387,7 +389,6 @@ impl BufferDiffInner { buffer_range: hunk.buffer_range.clone(), // The secondary status is not used by callers of this method. secondary_status: DiffHunkSecondaryStatus::None, - secondary_diff_base_byte_range: None, }) }) } @@ -408,12 +409,12 @@ impl BufferDiffInner { .start .cmp(&old_hunk.buffer_range.start, new_snapshot) { - cmp::Ordering::Less => { + Ordering::Less => { start.get_or_insert(new_hunk.buffer_range.start); end.replace(new_hunk.buffer_range.end); new_cursor.next(new_snapshot); } - cmp::Ordering::Equal => { + Ordering::Equal => { if new_hunk != old_hunk { start.get_or_insert(new_hunk.buffer_range.start); if old_hunk @@ -431,7 +432,7 @@ impl BufferDiffInner { new_cursor.next(new_snapshot); old_cursor.next(new_snapshot); } - cmp::Ordering::Greater => { + Ordering::Greater => { start.get_or_insert(old_hunk.buffer_range.start); end.replace(old_hunk.buffer_range.end); old_cursor.next(new_snapshot); @@ -1059,6 +1060,7 @@ mod tests { use rand::{rngs::StdRng, Rng as _}; use text::{Buffer, BufferId, Rope}; use unindent::Unindent as _; + use util::test::marked_text_ranges; #[ctor::ctor] fn init_logger() { @@ -1257,6 +1259,208 @@ mod tests { ); } + #[gpui::test] + async fn test_stage_hunk(cx: &mut TestAppContext) { + struct Example { + name: &'static str, + head_text: String, + index_text: String, + buffer_marked_text: String, + final_index_text: String, + } + + let table = [ + Example { + name: "uncommitted hunk straddles end of unstaged hunk", + head_text: " + one + two + three + four + five + " + .unindent(), + index_text: " + one + TWO_HUNDRED + three + FOUR_HUNDRED + five + " + .unindent(), + buffer_marked_text: " + ZERO + one + two + «THREE_HUNDRED + FOUR_HUNDRED» + five + SIX + " + .unindent(), + final_index_text: " + one + two + THREE_HUNDRED + FOUR_HUNDRED + five + " + .unindent(), + }, + Example { + name: "uncommitted hunk straddles start of unstaged hunk", + head_text: " + one + two + three + four + five + " + .unindent(), + index_text: " + one + TWO_HUNDRED + three + FOUR_HUNDRED + five + " + .unindent(), + buffer_marked_text: " + ZERO + one + «TWO_HUNDRED + THREE_HUNDRED» + four + five + SIX + " + .unindent(), + final_index_text: " + one + TWO_HUNDRED + THREE_HUNDRED + four + five + " + .unindent(), + }, + Example { + name: "uncommitted hunk strictly contains unstaged hunks", + head_text: " + one + two + three + four + five + six + seven + " + .unindent(), + index_text: " + one + TWO + THREE + FOUR + FIVE + SIX + seven + " + .unindent(), + buffer_marked_text: " + one + TWO + «THREE_HUNDRED + FOUR + FIVE_HUNDRED» + SIX + seven + " + .unindent(), + final_index_text: " + one + TWO + THREE_HUNDRED + FOUR + FIVE_HUNDRED + SIX + seven + " + .unindent(), + }, + Example { + name: "uncommitted deletion hunk", + head_text: " + one + two + three + four + five + " + .unindent(), + index_text: " + one + two + three + four + five + " + .unindent(), + buffer_marked_text: " + one + ˇfive + " + .unindent(), + final_index_text: " + one + five + " + .unindent(), + }, + ]; + + for example in table { + let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false); + let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); + let uncommitted_diff = + BufferDiff::build_sync(buffer.clone(), example.head_text.clone(), cx); + let unstaged_diff = + BufferDiff::build_sync(buffer.clone(), example.index_text.clone(), cx); + let uncommitted_diff = BufferDiffSnapshot { + inner: uncommitted_diff, + secondary_diff: Some(Box::new(BufferDiffSnapshot { + inner: unstaged_diff, + is_single_insertion: false, + secondary_diff: None, + })), + is_single_insertion: false, + }; + + let range = buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end); + + let new_index_text = cx + .update(|cx| { + uncommitted_diff.new_secondary_text_for_stage_or_unstage( + true, + uncommitted_diff + .hunks_intersecting_range(range, &buffer) + .map(|hunk| { + (hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone()) + }), + &buffer, + cx, + ) + }) + .unwrap() + .to_string(); + pretty_assertions::assert_eq!( + new_index_text, + example.final_index_text, + "example: {}", + example.name + ); + } + } + #[gpui::test] async fn test_buffer_diff_compare(cx: &mut TestAppContext) { let base_text = " @@ -1382,7 +1586,7 @@ mod tests { } #[gpui::test(iterations = 100)] - async fn test_secondary_edits_for_stage_unstage(cx: &mut TestAppContext, mut rng: StdRng) { + async fn test_staging_and_unstaging_hunks(cx: &mut TestAppContext, mut rng: StdRng) { fn gen_line(rng: &mut StdRng) -> String { if rng.gen_bool(0.2) { "\n".to_owned() @@ -1447,7 +1651,7 @@ mod tests { fn uncommitted_diff( working_copy: &language::BufferSnapshot, - index_text: &Entity, + index_text: &Rope, head_text: String, cx: &mut TestAppContext, ) -> BufferDiff { @@ -1456,7 +1660,7 @@ mod tests { buffer_id: working_copy.remote_id(), inner: BufferDiff::build_sync( working_copy.text.clone(), - index_text.read_with(cx, |index_text, _| index_text.text()), + index_text.to_string(), cx, ), secondary_diff: None, @@ -1487,17 +1691,11 @@ mod tests { ) }); let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot()); - let index_text = cx.new(|cx| { - language::Buffer::local_normalized( - if rng.gen() { - Rope::from(head_text.as_str()) - } else { - working_copy.as_rope().clone() - }, - text::LineEnding::default(), - cx, - ) - }); + let mut index_text = if rng.gen() { + Rope::from(head_text.as_str()) + } else { + working_copy.as_rope().clone() + }; let mut diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx); let mut hunks = cx.update(|cx| { @@ -1511,37 +1709,29 @@ mod tests { for _ in 0..operations { let i = rng.gen_range(0..hunks.len()); let hunk = &mut hunks[i]; - let hunk_fields = ( - hunk.diff_base_byte_range.clone(), - hunk.secondary_diff_base_byte_range.clone(), - hunk.buffer_range.clone(), - ); - let stage = match ( - hunk.secondary_status, - hunk.secondary_diff_base_byte_range.clone(), - ) { - (DiffHunkSecondaryStatus::HasSecondaryHunk, Some(_)) => { + let stage = match hunk.secondary_status { + DiffHunkSecondaryStatus::HasSecondaryHunk => { hunk.secondary_status = DiffHunkSecondaryStatus::None; - hunk.secondary_diff_base_byte_range = None; true } - (DiffHunkSecondaryStatus::None, None) => { + DiffHunkSecondaryStatus::None => { hunk.secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk; - // We don't look at this, just notice whether it's Some or not. - hunk.secondary_diff_base_byte_range = Some(17..17); false } _ => unreachable!(), }; let snapshot = cx.update(|cx| diff.snapshot(cx)); - let edits = snapshot.secondary_edits_for_stage_or_unstage( - stage, - [hunk_fields].into_iter(), - &working_copy, - ); - index_text.update(cx, |index_text, cx| { - index_text.edit(edits, None, cx); + index_text = cx.update(|cx| { + snapshot + .new_secondary_text_for_stage_or_unstage( + stage, + [(hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone())] + .into_iter(), + &working_copy, + cx, + ) + .unwrap() }); diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx); @@ -1550,6 +1740,7 @@ mod tests { .collect::>() }); assert_eq!(hunks.len(), found_hunks.len()); + for (expected_hunk, found_hunk) in hunks.iter().zip(&found_hunks) { assert_eq!( expected_hunk.buffer_range.to_point(&working_copy), @@ -1560,10 +1751,6 @@ mod tests { found_hunk.diff_base_byte_range ); assert_eq!(expected_hunk.secondary_status, found_hunk.secondary_status); - assert_eq!( - expected_hunk.secondary_diff_base_byte_range.is_some(), - found_hunk.secondary_diff_base_byte_range.is_some() - ) } hunks = found_hunks; } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index b1ab7586613aa2..b79931818e2cb4 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -256,6 +256,7 @@ async fn perform_completion( // so that users can use the new version, without having to update Zed. request.model = match model.as_str() { "claude-3-5-sonnet" => anthropic::Model::Claude3_5Sonnet.id().to_string(), + "claude-3-7-sonnet" => anthropic::Model::Claude3_7Sonnet.id().to_string(), "claude-3-opus" => anthropic::Model::Claude3Opus.id().to_string(), "claude-3-haiku" => anthropic::Model::Claude3Haiku.id().to_string(), "claude-3-sonnet" => anthropic::Model::Claude3Sonnet.id().to_string(), diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 19ce05f1eb8e59..885e6b62cd6d86 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -392,9 +392,13 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index d3c21084002d89..867e8fd3bb68e1 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -38,6 +38,7 @@ gpui.workspace = true http_client.workspace = true inline_completion.workspace = true language.workspace = true +log.workspace = true lsp.workspace = true menu.workspace = true node_runtime.workspace = true @@ -62,7 +63,9 @@ async-std = { version = "1.12.0", features = ["unstable"] } client = { workspace = true, features = ["test-support"] } clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } +ctor.workspace = true editor = { workspace = true, features = ["test-support"] } +env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 5edc0d5954329a..ff54ce1cef79f8 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -16,6 +16,7 @@ use gpui::{ }; use http_client::github::get_release_by_tag_name; use http_client::HttpClient; +use language::language_settings::CopilotSettings; use language::{ language_settings::{all_language_settings, language_settings, EditPredictionProvider}, point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16, @@ -367,13 +368,13 @@ impl Copilot { let server_id = self.server_id; let http = self.http.clone(); let node_runtime = self.node_runtime.clone(); - if all_language_settings(None, cx).edit_predictions.provider - == EditPredictionProvider::Copilot - { + let language_settings = all_language_settings(None, cx); + if language_settings.edit_predictions.provider == EditPredictionProvider::Copilot { if matches!(self.server, CopilotServer::Disabled) { + let env = self.build_env(&language_settings.edit_predictions.copilot); let start_task = cx .spawn(move |this, cx| { - Self::start_language_server(server_id, http, node_runtime, this, cx) + Self::start_language_server(server_id, http, node_runtime, env, this, cx) }) .shared(); self.server = CopilotServer::Starting { task: start_task }; @@ -385,6 +386,30 @@ impl Copilot { } } + fn build_env(&self, copilot_settings: &CopilotSettings) -> Option> { + let proxy_url = copilot_settings.proxy.clone()?; + let no_verify = copilot_settings.proxy_no_verify; + let http_or_https_proxy = if proxy_url.starts_with("http:") { + "HTTP_PROXY" + } else if proxy_url.starts_with("https:") { + "HTTPS_PROXY" + } else { + log::error!( + "Unsupported protocol scheme for language server proxy (must be http or https)" + ); + return None; + }; + + let mut env = HashMap::default(); + env.insert(http_or_https_proxy.to_string(), proxy_url); + + if let Some(true) = no_verify { + env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string()); + }; + + Some(env) + } + #[cfg(any(test, feature = "test-support"))] pub fn fake(cx: &mut gpui::TestAppContext) -> (Entity, lsp::FakeLanguageServer) { use lsp::FakeLanguageServer; @@ -422,6 +447,7 @@ impl Copilot { new_server_id: LanguageServerId, http: Arc, node_runtime: NodeRuntime, + env: Option>, this: WeakEntity, mut cx: AsyncApp, ) { @@ -432,8 +458,7 @@ impl Copilot { let binary = LanguageServerBinary { path: node_path, arguments, - // TODO: We could set HTTP_PROXY etc here and fix the copilot issue. - env: None, + env, }; let root_path = if cfg!(target_os = "windows") { @@ -611,6 +636,8 @@ impl Copilot { } pub fn reinstall(&mut self, cx: &mut Context) -> Task<()> { + let language_settings = all_language_settings(None, cx); + let env = self.build_env(&language_settings.edit_predictions.copilot); let start_task = cx .spawn({ let http = self.http.clone(); @@ -618,7 +645,7 @@ impl Copilot { let server_id = self.server_id; move |this, cx| async move { clear_copilot_dir().await; - Self::start_language_server(server_id, http, node_runtime, this, cx).await + Self::start_language_server(server_id, http, node_runtime, env, this, cx).await } }) .shared(); @@ -1279,3 +1306,11 @@ mod tests { } } } + +#[cfg(test)] +#[ctor::ctor] +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } +} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 70acf082bf81ab..b456052338db87 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -63,7 +63,7 @@ pub use editor_settings::{ CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar, }; pub use editor_settings_controls::*; -use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap}; +use element::{layout_line, AcceptEditPredictionBinding, LineWithInvisibles, PositionMap}; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, }; @@ -82,9 +82,9 @@ use git::blame::GitBlame; use gpui::{ div, impl_actions, point, prelude::*, pulsating_between, px, relative, size, Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, AvailableSpace, Background, Bounds, - ClickEvent, ClipboardEntry, ClipboardItem, Context, DispatchPhase, Entity, EntityInputHandler, - EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight, Global, - HighlightStyle, Hsla, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad, + ClickEvent, ClipboardEntry, ClipboardItem, Context, DispatchPhase, Edges, Entity, + EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight, + Global, HighlightStyle, Hsla, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad, ParentElement, Pixels, Render, SharedString, Size, Styled, StyledText, Subscription, Task, TextStyle, TextStyleRefinement, UTF16Selection, UnderlineStyle, UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window, @@ -119,6 +119,7 @@ use project::{ pub use proposed_changes_editor::{ ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar, }; +use smallvec::smallvec; use std::iter::Peekable; use task::{ResolvedTask, TaskTemplate, TaskVariables}; @@ -534,7 +535,7 @@ impl ScrollbarMarkerState { #[derive(Clone, Debug)] struct RunnableTasks { templates: Vec<(TaskSourceKind, TaskTemplate)>, - offset: MultiBufferOffset, + offset: multi_buffer::Anchor, // We need the column at which the task context evaluation should take place (when we're spawning it via gutter). column: u32, // Values of all named captures, including those starting with '_' @@ -562,8 +563,6 @@ struct ResolvedTasks { position: Anchor, } -#[derive(Copy, Clone, Debug)] -struct MultiBufferOffset(usize); #[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] struct BufferOffset(usize); @@ -688,8 +687,8 @@ pub struct Editor { show_inline_completions_override: Option, menu_inline_completions_policy: MenuInlineCompletionsPolicy, edit_prediction_preview: EditPredictionPreview, - edit_prediction_cursor_on_leading_whitespace: bool, - edit_prediction_requires_modifier_in_leading_space: bool, + edit_prediction_indent_conflict: bool, + edit_prediction_requires_modifier_in_indent_conflict: bool, inlay_hint_cache: InlayHintCache, next_inlay_id: usize, _subscriptions: Vec, @@ -707,7 +706,6 @@ pub struct Editor { show_git_blame_inline: bool, show_git_blame_inline_delay_task: Option>, git_blame_inline_tooltip: Option>, - distinguish_unstaged_diff_hunks: bool, git_blame_inline_enabled: bool, serialize_dirty_buffers: bool, show_selection_menu: Option, @@ -1432,12 +1430,11 @@ impl Editor { show_inline_completions_override: None, menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider, edit_prediction_settings: EditPredictionSettings::Disabled, - edit_prediction_cursor_on_leading_whitespace: false, - edit_prediction_requires_modifier_in_leading_space: true, + edit_prediction_indent_conflict: false, + edit_prediction_requires_modifier_in_indent_conflict: true, custom_context_menu: None, show_git_blame_gutter: false, show_git_blame_inline: false, - distinguish_unstaged_diff_hunks: false, show_selection_menu: None, show_git_blame_inline_delay_task: None, git_blame_inline_tooltip: None, @@ -1613,7 +1610,7 @@ impl Editor { || self.edit_prediction_requires_modifier() // Require modifier key when the cursor is on leading whitespace, to allow `tab` // bindings to insert tab characters. - || (self.edit_prediction_requires_modifier_in_leading_space && self.edit_prediction_cursor_on_leading_whitespace) + || (self.edit_prediction_requires_modifier_in_indent_conflict && self.edit_prediction_indent_conflict) } pub fn accept_edit_prediction_keybind( @@ -1861,6 +1858,7 @@ impl Editor { }), provider: Arc::new(provider), }); + self.update_edit_prediction_settings(cx); self.refresh_inline_completion(false, false, window, cx); } @@ -1980,7 +1978,7 @@ impl Editor { self.auto_replace_emoji_shortcode = auto_replace; } - pub fn toggle_inline_completions( + pub fn toggle_edit_predictions( &mut self, _: &ToggleEditPrediction, window: &mut Window, @@ -2001,6 +1999,7 @@ impl Editor { cx: &mut Context, ) { self.show_inline_completions_override = show_edit_predictions; + self.update_edit_prediction_settings(cx); if let Some(false) = show_edit_predictions { self.discard_inline_completion(false, cx); @@ -2185,7 +2184,7 @@ impl Editor { self.refresh_selected_text_highlights(window, cx); refresh_matching_bracket_highlights(self, window, cx); self.update_visible_inline_completion(window, cx); - self.edit_prediction_requires_modifier_in_leading_space = true; + self.edit_prediction_requires_modifier_in_indent_conflict = true; linked_editing_ranges::refresh_linked_ranges(self, window, cx); if self.git_blame_inline_enabled { self.start_inline_blame_timer(window, cx); @@ -4859,7 +4858,7 @@ impl Editor { let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - if !self.inline_completions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) { + if !self.edit_predictions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) { self.discard_inline_completion(false, cx); return None; } @@ -4908,6 +4907,22 @@ impl Editor { } } + pub fn update_edit_prediction_settings(&mut self, cx: &mut Context) { + if self.edit_prediction_provider.is_none() { + self.edit_prediction_settings = EditPredictionSettings::Disabled; + } else { + let selection = self.selections.newest_anchor(); + let cursor = selection.head(); + + if let Some((buffer, cursor_buffer_position)) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx) + { + self.edit_prediction_settings = + self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); + } + } + } + fn edit_prediction_settings_at_position( &self, buffer: &Entity, @@ -4962,18 +4977,18 @@ impl Editor { ) } - pub fn inline_completions_enabled(&self, cx: &App) -> bool { + pub fn edit_predictions_enabled_at_cursor(&self, cx: &App) -> bool { let cursor = self.selections.newest_anchor().head(); if let Some((buffer, cursor_position)) = self.buffer.read(cx).text_anchor_for_position(cursor, cx) { - self.inline_completions_enabled_in_buffer(&buffer, cursor_position, cx) + self.edit_predictions_enabled_in_buffer(&buffer, cursor_position, cx) } else { false } } - fn inline_completions_enabled_in_buffer( + fn edit_predictions_enabled_in_buffer( &self, buffer: &Entity, buffer_position: language::Anchor, @@ -5171,7 +5186,7 @@ impl Editor { } } - self.edit_prediction_requires_modifier_in_leading_space = false; + self.edit_prediction_requires_modifier_in_indent_conflict = false; } pub fn accept_partial_inline_completion( @@ -5469,8 +5484,19 @@ impl Editor { self.edit_prediction_settings = self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); - self.edit_prediction_cursor_on_leading_whitespace = - multibuffer.is_line_whitespace_upto(cursor); + self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor); + + if self.edit_prediction_indent_conflict { + let cursor_point = cursor.to_point(&multibuffer); + + let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx); + + if let Some((_, indent)) = indents.iter().next() { + if indent.len == cursor_point.column { + self.edit_prediction_indent_conflict = false; + } + } + } let inline_completion = provider.suggest(&buffer, cursor_buffer_position, cx)?; let edits = inline_completion @@ -6052,6 +6078,524 @@ impl Editor { .map(|menu| menu.origin()) } + const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = Pixels(24.); + const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = Pixels(2.); + + #[allow(clippy::too_many_arguments)] + fn render_edit_prediction_popover( + &mut self, + text_bounds: &Bounds, + content_origin: gpui::Point, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + scroll_top: f32, + scroll_bottom: f32, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + scroll_pixel_position: gpui::Point, + newest_selection_head: Option, + editor_width: Pixels, + style: &EditorStyle, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let active_inline_completion = self.active_inline_completion.as_ref()?; + + if self.edit_prediction_visible_in_cursor_popover(true) { + return None; + } + + match &active_inline_completion.completion { + InlineCompletion::Move { target, .. } => { + let target_display_point = target.to_display_point(editor_snapshot); + + if self.edit_prediction_requires_modifier() { + if !self.edit_prediction_preview_is_active() { + return None; + } + + self.render_edit_prediction_modifier_jump_popover( + text_bounds, + content_origin, + visible_row_range, + line_layouts, + line_height, + scroll_pixel_position, + newest_selection_head, + target_display_point, + window, + cx, + ) + } else { + self.render_edit_prediction_eager_jump_popover( + text_bounds, + content_origin, + editor_snapshot, + visible_row_range, + scroll_top, + scroll_bottom, + line_height, + scroll_pixel_position, + target_display_point, + editor_width, + window, + cx, + ) + } + } + InlineCompletion::Edit { + display_mode: EditDisplayMode::Inline, + .. + } => None, + InlineCompletion::Edit { + display_mode: EditDisplayMode::TabAccept, + edits, + .. + } => { + let range = &edits.first()?.0; + let target_display_point = range.end.to_display_point(editor_snapshot); + + self.render_edit_prediction_end_of_line_popover( + "Accept", + editor_snapshot, + visible_row_range, + target_display_point, + line_height, + scroll_pixel_position, + content_origin, + editor_width, + window, + cx, + ) + } + InlineCompletion::Edit { + edits, + edit_preview, + display_mode: EditDisplayMode::DiffPopover, + snapshot, + } => self.render_edit_prediction_diff_popover( + text_bounds, + content_origin, + editor_snapshot, + visible_row_range, + line_layouts, + line_height, + scroll_pixel_position, + newest_selection_head, + editor_width, + style, + edits, + edit_preview, + snapshot, + window, + cx, + ), + } + } + + #[allow(clippy::too_many_arguments)] + fn render_edit_prediction_modifier_jump_popover( + &mut self, + text_bounds: &Bounds, + content_origin: gpui::Point, + visible_row_range: Range, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + scroll_pixel_position: gpui::Point, + newest_selection_head: Option, + target_display_point: DisplayPoint, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let scrolled_content_origin = + content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0)); + + const SCROLL_PADDING_Y: Pixels = px(12.); + + if target_display_point.row() < visible_row_range.start { + return self.render_edit_prediction_scroll_popover( + |_| SCROLL_PADDING_Y, + IconName::ArrowUp, + visible_row_range, + line_layouts, + newest_selection_head, + scrolled_content_origin, + window, + cx, + ); + } else if target_display_point.row() >= visible_row_range.end { + return self.render_edit_prediction_scroll_popover( + |size| text_bounds.size.height - size.height - SCROLL_PADDING_Y, + IconName::ArrowDown, + visible_row_range, + line_layouts, + newest_selection_head, + scrolled_content_origin, + window, + cx, + ); + } + + const POLE_WIDTH: Pixels = px(2.); + + let mut element = v_flex() + .items_end() + .child( + self.render_edit_prediction_line_popover("Jump", None, window, cx)? + .rounded_br(px(0.)) + .rounded_tr(px(0.)) + .border_r_2(), + ) + .child( + div() + .w(POLE_WIDTH) + .bg(Editor::edit_prediction_callout_popover_border_color(cx)) + .h(line_height), + ) + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + let line_layout = + line_layouts.get(target_display_point.row().minus(visible_row_range.start) as usize)?; + let target_column = target_display_point.column() as usize; + + let target_x = line_layout.x_for_index(target_column); + let target_y = + (target_display_point.row().as_f32() * line_height) - scroll_pixel_position.y; + + let mut origin = scrolled_content_origin + point(target_x, target_y) + - point(size.width - POLE_WIDTH, size.height - line_height); + + origin.x = origin.x.max(content_origin.x); + + element.prepaint_at(origin, window, cx); + + Some((element, origin)) + } + + #[allow(clippy::too_many_arguments)] + fn render_edit_prediction_scroll_popover( + &mut self, + to_y: impl Fn(Size) -> Pixels, + scroll_icon: IconName, + visible_row_range: Range, + line_layouts: &[LineWithInvisibles], + newest_selection_head: Option, + scrolled_content_origin: gpui::Point, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let mut element = self + .render_edit_prediction_line_popover("Scroll", Some(scroll_icon), window, cx)? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + let cursor = newest_selection_head?; + let cursor_row_layout = + line_layouts.get(cursor.row().minus(visible_row_range.start) as usize)?; + let cursor_column = cursor.column() as usize; + + let cursor_character_x = cursor_row_layout.x_for_index(cursor_column); + + let origin = scrolled_content_origin + point(cursor_character_x, to_y(size)); + + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } + + #[allow(clippy::too_many_arguments)] + fn render_edit_prediction_eager_jump_popover( + &mut self, + text_bounds: &Bounds, + content_origin: gpui::Point, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + scroll_top: f32, + scroll_bottom: f32, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + target_display_point: DisplayPoint, + editor_width: Pixels, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + if target_display_point.row().as_f32() < scroll_top { + let mut element = self + .render_edit_prediction_line_popover( + "Jump to Edit", + Some(IconName::ArrowUp), + window, + cx, + )? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + let offset = point( + (text_bounds.size.width - size.width) / 2., + Self::EDIT_PREDICTION_POPOVER_PADDING_Y, + ); + + let origin = text_bounds.origin + offset; + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } else if (target_display_point.row().as_f32() + 1.) > scroll_bottom { + let mut element = self + .render_edit_prediction_line_popover( + "Jump to Edit", + Some(IconName::ArrowDown), + window, + cx, + )? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + let offset = point( + (text_bounds.size.width - size.width) / 2., + text_bounds.size.height - size.height - Self::EDIT_PREDICTION_POPOVER_PADDING_Y, + ); + + let origin = text_bounds.origin + offset; + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } else { + self.render_edit_prediction_end_of_line_popover( + "Jump to Edit", + editor_snapshot, + visible_row_range, + target_display_point, + line_height, + scroll_pixel_position, + content_origin, + editor_width, + window, + cx, + ) + } + } + + #[allow(clippy::too_many_arguments)] + fn render_edit_prediction_end_of_line_popover( + self: &mut Editor, + label: &'static str, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + target_display_point: DisplayPoint, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + content_origin: gpui::Point, + editor_width: Pixels, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let target_line_end = DisplayPoint::new( + target_display_point.row(), + editor_snapshot.line_len(target_display_point.row()), + ); + + let mut element = self + .render_edit_prediction_line_popover(label, None, window, cx)? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + let line_origin = self.display_to_pixel_point(target_line_end, editor_snapshot, window)?; + + let start_point = content_origin - point(scroll_pixel_position.x, Pixels::ZERO); + let mut origin = start_point + + line_origin + + point(Self::EDIT_PREDICTION_POPOVER_PADDING_X, Pixels::ZERO); + origin.x = origin.x.max(content_origin.x); + + let max_x = content_origin.x + editor_width - size.width; + + if origin.x > max_x { + let offset = line_height + Self::EDIT_PREDICTION_POPOVER_PADDING_Y; + + let icon = if visible_row_range.contains(&(target_display_point.row() + 2)) { + origin.y += offset; + IconName::ArrowUp + } else { + origin.y -= offset; + IconName::ArrowDown + }; + + element = self + .render_edit_prediction_line_popover(label, Some(icon), window, cx)? + .into_any(); + + let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + origin.x = content_origin.x + editor_width - size.width - px(2.); + } + + element.prepaint_at(origin, window, cx); + Some((element, origin)) + } + + #[allow(clippy::too_many_arguments)] + fn render_edit_prediction_diff_popover( + self: &Editor, + text_bounds: &Bounds, + content_origin: gpui::Point, + editor_snapshot: &EditorSnapshot, + visible_row_range: Range, + line_layouts: &[LineWithInvisibles], + line_height: Pixels, + scroll_pixel_position: gpui::Point, + newest_selection_head: Option, + editor_width: Pixels, + style: &EditorStyle, + edits: &Vec<(Range, String)>, + edit_preview: &Option, + snapshot: &language::BufferSnapshot, + window: &mut Window, + cx: &mut App, + ) -> Option<(AnyElement, gpui::Point)> { + let edit_start = edits + .first() + .unwrap() + .0 + .start + .to_display_point(editor_snapshot); + let edit_end = edits + .last() + .unwrap() + .0 + .end + .to_display_point(editor_snapshot); + + let is_visible = visible_row_range.contains(&edit_start.row()) + || visible_row_range.contains(&edit_end.row()); + if !is_visible { + return None; + } + + let highlighted_edits = + crate::inline_completion_edit_text(&snapshot, edits, edit_preview.as_ref()?, false, cx); + + let styled_text = highlighted_edits.to_styled_text(&style.text); + let line_count = highlighted_edits.text.lines().count(); + + const BORDER_WIDTH: Pixels = px(1.); + + let mut element = h_flex() + .items_start() + .child( + h_flex() + .bg(cx.theme().colors().editor_background) + .border(BORDER_WIDTH) + .shadow_sm() + .border_color(cx.theme().colors().border) + .rounded_l_lg() + .when(line_count > 1, |el| el.rounded_br_lg()) + .pr_1() + .child(styled_text), + ) + .child( + h_flex() + .h(line_height + BORDER_WIDTH * px(2.)) + .px_1p5() + .gap_1() + // Workaround: For some reason, there's a gap if we don't do this + .ml(-BORDER_WIDTH) + .shadow(smallvec![gpui::BoxShadow { + color: gpui::black().opacity(0.05), + offset: point(px(1.), px(1.)), + blur_radius: px(2.), + spread_radius: px(0.), + }]) + .bg(Editor::edit_prediction_line_popover_bg_color(cx)) + .border(BORDER_WIDTH) + .border_color(cx.theme().colors().border) + .rounded_r_lg() + .children(self.render_edit_prediction_accept_keybind(window, cx)), + ) + .into_any(); + + let longest_row = + editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1); + let longest_line_width = if visible_row_range.contains(&longest_row) { + line_layouts[(longest_row.0 - visible_row_range.start.0) as usize].width + } else { + layout_line( + longest_row, + editor_snapshot, + style, + editor_width, + |_| false, + window, + cx, + ) + .width + }; + + let viewport_bounds = + Bounds::new(Default::default(), window.viewport_size()).extend(Edges { + right: -EditorElement::SCROLLBAR_WIDTH, + ..Default::default() + }); + + let x_after_longest = + text_bounds.origin.x + longest_line_width + Self::EDIT_PREDICTION_POPOVER_PADDING_X + - scroll_pixel_position.x; + + let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx); + + // Fully visible if it can be displayed within the window (allow overlapping other + // panes). However, this is only allowed if the popover starts within text_bounds. + let can_position_to_the_right = x_after_longest < text_bounds.right() + && x_after_longest + element_bounds.width < viewport_bounds.right(); + + let mut origin = if can_position_to_the_right { + point( + x_after_longest, + text_bounds.origin.y + edit_start.row().as_f32() * line_height + - scroll_pixel_position.y, + ) + } else { + let cursor_row = newest_selection_head.map(|head| head.row()); + let above_edit = edit_start + .row() + .0 + .checked_sub(line_count as u32) + .map(DisplayRow); + let below_edit = Some(edit_end.row() + 1); + let above_cursor = + cursor_row.and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow)); + let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1); + + // Place the edit popover adjacent to the edit if there is a location + // available that is onscreen and does not obscure the cursor. Otherwise, + // place it adjacent to the cursor. + let row_target = [above_edit, below_edit, above_cursor, below_cursor] + .into_iter() + .flatten() + .find(|&start_row| { + let end_row = start_row + line_count as u32; + visible_row_range.contains(&start_row) + && visible_row_range.contains(&end_row) + && cursor_row.map_or(true, |cursor_row| { + !((start_row..end_row).contains(&cursor_row)) + }) + })?; + + content_origin + + point( + -scroll_pixel_position.x, + row_target.as_f32() * line_height - scroll_pixel_position.y, + ) + }; + + origin.x -= BORDER_WIDTH; + + window.defer_draw(element, origin, 1); + + // Do not return an element, since it will already be drawn due to defer_draw. + None + } + fn edit_prediction_cursor_popover_height(&self) -> Pixels { px(30.) } @@ -10821,7 +11365,9 @@ impl Editor { (runnable.buffer_id, row), RunnableTasks { templates: tasks, - offset: MultiBufferOffset(runnable.run_range.start), + offset: snapshot + .buffer_snapshot + .anchor_before(runnable.run_range.start), context_range, column: point.column, extra_variables: runnable.extra_captures, @@ -11599,7 +12145,9 @@ impl Editor { let range = editor.range_for_match(&range); let range = collapse_multiline_range(range); - if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() { + if !split + && Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() + { editor.go_to_singleton_buffer_range(range.clone(), window, cx); } else { window.defer(cx, move |window, cx| { @@ -13263,10 +13811,6 @@ impl Editor { }); } - pub fn set_distinguish_unstaged_diff_hunks(&mut self) { - self.distinguish_unstaged_diff_hunks = true; - } - pub fn expand_all_diff_hunks( &mut self, _: &ExpandAllDiffHunks, @@ -13312,7 +13856,7 @@ impl Editor { snapshot: &MultiBufferSnapshot, ) -> bool { let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot); - hunks.any(|hunk| hunk.secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk) + hunks.any(|hunk| hunk.secondary_status != DiffHunkSecondaryStatus::None) } pub fn toggle_staged_selected_diff_hunks( @@ -13457,12 +14001,8 @@ impl Editor { log::debug!("no diff for buffer id"); return; }; - let Some(secondary_diff) = diff.secondary_diff() else { - log::debug!("no secondary diff for buffer id"); - return; - }; - let edits = diff.secondary_edits_for_stage_or_unstage( + let Some(new_index_text) = diff.new_secondary_text_for_stage_or_unstage( stage, hunks.filter_map(|hunk| { if stage && hunk.secondary_status == DiffHunkSecondaryStatus::None { @@ -13472,29 +14012,14 @@ impl Editor { { return None; } - Some(( - hunk.diff_base_byte_range.clone(), - hunk.secondary_diff_base_byte_range.clone(), - hunk.buffer_range.clone(), - )) + Some((hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone())) }), &buffer_snapshot, - ); - - let Some(index_base) = secondary_diff - .base_text() - .map(|snapshot| snapshot.text.as_rope().clone()) - else { - log::debug!("no index base"); + cx, + ) else { + log::debug!("missing secondary diff or index text"); return; }; - let index_buffer = cx.new(|cx| { - Buffer::local_normalized(index_base.clone(), text::LineEnding::default(), cx) - }); - let new_index_text = index_buffer.update(cx, |index_buffer, cx| { - index_buffer.edit(edits, None, cx); - index_buffer.snapshot().as_rope().to_string() - }); let new_index_text = if new_index_text.is_empty() && !stage && (diff.is_single_insertion @@ -13512,7 +14037,12 @@ impl Editor { .update(cx, |buffer_store, cx| buffer_store.save_buffer(buffer, cx)) .detach_and_log_err(cx); - let _ = repo.read(cx).set_index_text(&path, new_index_text); + cx.background_spawn( + repo.read(cx) + .set_index_text(&path, new_index_text.map(|rope| rope.to_string())) + .log_err(), + ) + .detach(); } pub fn expand_selected_diff_hunks(&mut self, cx: &mut Context) { @@ -15198,6 +15728,7 @@ impl Editor { fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + self.update_edit_prediction_settings(cx); self.refresh_inline_completion(true, false, window, cx); self.refresh_inlay_hints( InlayHintRefreshReason::SettingsChange(inlay_hint_settings( @@ -15386,27 +15917,39 @@ impl Editor { let selections = self.selections.all::(cx); let multi_buffer = self.buffer.read(cx); for selection in selections { - for (buffer, mut range, _) in multi_buffer + for (snapshot, range, _, anchor) in multi_buffer .snapshot(cx) - .range_to_buffer_ranges(selection.range()) + .range_to_buffer_ranges_with_deleted_hunks(selection.range()) { - // When editing branch buffers, jump to the corresponding location - // in their base buffer. - let mut buffer_handle = multi_buffer.buffer(buffer.remote_id()).unwrap(); - let buffer = buffer_handle.read(cx); - if let Some(base_buffer) = buffer.base_buffer() { - range = buffer.range_to_version(range, &base_buffer.read(cx).version()); - buffer_handle = base_buffer; - } - - if selection.reversed { - mem::swap(&mut range.start, &mut range.end); + if let Some(anchor) = anchor { + // selection is in a deleted hunk + let Some(buffer_id) = anchor.buffer_id else { + continue; + }; + let Some(buffer_handle) = multi_buffer.buffer(buffer_id) else { + continue; + }; + let offset = text::ToOffset::to_offset( + &anchor.text_anchor, + &buffer_handle.read(cx).snapshot(), + ); + let range = offset..offset; + new_selections_by_buffer + .entry(buffer_handle) + .or_insert((Vec::new(), None)) + .0 + .push(range) + } else { + let Some(buffer_handle) = multi_buffer.buffer(snapshot.remote_id()) + else { + continue; + }; + new_selections_by_buffer + .entry(buffer_handle) + .or_insert((Vec::new(), None)) + .0 + .push(range) } - new_selections_by_buffer - .entry(buffer_handle) - .or_insert((Vec::new(), None)) - .0 - .push(range) } } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index da41b1db2a0236..95e272c76edf3a 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -7,7 +7,7 @@ use crate::{ }, JoinLines, }; -use buffer_diff::{BufferDiff, DiffHunkStatus}; +use buffer_diff::{BufferDiff, DiffHunkStatus, DiffHunkStatusKind}; use futures::StreamExt; use gpui::{ div, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext, @@ -3392,7 +3392,7 @@ async fn test_join_lines_with_git_diff_base(executor: BackgroundExecutor, cx: &m .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); // Join lines @@ -3432,7 +3432,7 @@ async fn test_custom_newlines_cause_no_false_positive_diffs( init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; cx.set_state("Line 0\r\nLine 1\rˇ\nLine 2\r\nLine 3"); - cx.set_diff_base("Line 0\r\nLine 1\r\nLine 2\r\nLine 3"); + cx.set_head_text("Line 0\r\nLine 1\r\nLine 2\r\nLine 3"); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -5814,7 +5814,7 @@ async fn test_fold_function_bodies(cx: &mut TestAppContext) { let mut cx = EditorLspTestContext::new_rust(Default::default(), cx).await; cx.set_state(&text); - cx.set_diff_base(&base_text); + cx.set_head_text(&base_text); cx.update_editor(|editor, window, cx| { editor.expand_all_diff_hunks(&Default::default(), window, cx); }); @@ -11042,7 +11042,7 @@ async fn test_go_to_hunk(executor: BackgroundExecutor, cx: &mut TestAppContext) .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -12535,7 +12535,7 @@ async fn test_deleting_over_diff_hunk(cx: &mut TestAppContext) { three "#}; - cx.set_diff_base(base_text); + cx.set_head_text(base_text); cx.set_state("\nˇ\n"); cx.executor().run_until_parked(); cx.update_editor(|editor, _window, cx| { @@ -13172,7 +13172,7 @@ async fn test_toggle_selected_diff_hunks(executor: BackgroundExecutor, cx: &mut .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -13306,7 +13306,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks( .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -13334,7 +13334,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks( .unindent(), ); - cx.set_diff_base("new diff base!"); + cx.set_head_text("new diff base!"); executor.run_until_parked(); cx.assert_state_with_diff( r#" @@ -13634,7 +13634,7 @@ async fn test_edits_around_expanded_insertion_hunks( .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -13782,7 +13782,7 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - cx.set_diff_base(indoc! { " + cx.set_head_text(indoc! { " one two three @@ -13905,7 +13905,7 @@ async fn test_edits_around_expanded_deletion_hunks( .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -14028,7 +14028,7 @@ async fn test_backspace_after_deletion_hunk(executor: BackgroundExecutor, cx: &m .unindent(), ); - cx.set_diff_base(&base_text); + cx.set_head_text(&base_text); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -14110,7 +14110,7 @@ async fn test_edit_after_expanded_modification_hunk( .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); executor.run_until_parked(); cx.update_editor(|editor, window, cx| { editor.expand_all_diff_hunks(&ExpandAllDiffHunks, window, cx); @@ -14845,7 +14845,7 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp "# .unindent(), ); - cx.set_diff_base(&diff_base); + cx.set_head_text(&diff_base); cx.update_editor(|editor, window, cx| { editor.expand_all_diff_hunks(&ExpandAllDiffHunks, window, cx); }); @@ -14982,6 +14982,80 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp ); } +#[gpui::test] +async fn test_partially_staged_hunk(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_head_text(indoc! { " + one + two + three + four + five + " + }); + cx.set_index_text(indoc! { " + one + two + three + four + five + " + }); + cx.set_state(indoc! {" + one + TWO + ˇTHREE + FOUR + five + "}); + cx.run_until_parked(); + cx.update_editor(|editor, window, cx| { + editor.toggle_staged_selected_diff_hunks(&Default::default(), window, cx); + }); + cx.run_until_parked(); + cx.assert_index_text(Some(indoc! {" + one + TWO + THREE + FOUR + five + "})); + cx.set_state(indoc! { " + one + TWO + ˇTHREE-HUNDRED + FOUR + five + "}); + cx.run_until_parked(); + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let hunks = editor + .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot) + .collect::>(); + assert_eq!(hunks.len(), 1); + assert_eq!( + hunks[0].status(), + DiffHunkStatus { + kind: DiffHunkStatusKind::Modified, + secondary: DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk + } + ); + + editor.toggle_staged_selected_diff_hunks(&Default::default(), window, cx); + }); + cx.run_until_parked(); + cx.assert_index_text(Some(indoc! {" + one + TWO + THREE-HUNDRED + FOUR + five + "})); +} + #[gpui::test] fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -15341,11 +15415,12 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) { }); editor.update_in(cx, |editor, window, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); editor.tasks.insert( (buffer.read(cx).remote_id(), 3), RunnableTasks { templates: vec![], - offset: MultiBufferOffset(43), + offset: snapshot.anchor_before(43), column: 0, extra_variables: HashMap::default(), context_range: BufferOffset(43)..BufferOffset(85), @@ -15355,7 +15430,7 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) { (buffer.read(cx).remote_id(), 8), RunnableTasks { templates: vec![], - offset: MultiBufferOffset(86), + offset: snapshot.anchor_before(86), column: 0, extra_variables: HashMap::default(), context_range: BufferOffset(86)..BufferOffset(191), @@ -16686,7 +16761,7 @@ fn assert_hunk_revert( cx: &mut EditorLspTestContext, ) { cx.set_state(not_reverted_text_with_selections); - cx.set_diff_base(base_text); + cx.set_head_text(base_text); cx.executor().run_until_parked(); let actual_hunk_statuses_before = cx.update_editor(|editor, window, cx| { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index b04114c44c8ff3..1a46252d70680f 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -50,7 +50,7 @@ use language::{ use lsp::DiagnosticSeverity; use multi_buffer::{ Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow, - RowInfo, ToOffset, + RowInfo, }; use project::{ debugger::breakpoint_store::{Breakpoint, BreakpointKind}, @@ -410,7 +410,7 @@ impl EditorElement { register_action(editor, window, Editor::toggle_relative_line_numbers); register_action(editor, window, Editor::toggle_indent_guides); register_action(editor, window, Editor::toggle_inlay_hints); - register_action(editor, window, Editor::toggle_inline_completions); + register_action(editor, window, Editor::toggle_edit_predictions); register_action(editor, window, Editor::toggle_inline_diagnostics); register_action(editor, window, hover_popover::hover); register_action(editor, window, Editor::reveal_in_finder); @@ -2141,21 +2141,22 @@ impl EditorElement { None }; - let offset_range_start = snapshot - .display_point_to_anchor(DisplayPoint::new(range.start, 0), Bias::Left) - .to_offset(&snapshot.buffer_snapshot); - let offset_range_end = snapshot - .display_point_to_anchor(DisplayPoint::new(range.end, 0), Bias::Right) - .to_offset(&snapshot.buffer_snapshot); + let offset_range_start = + snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left); + + let offset_range_end = + snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right); editor .tasks .iter() .filter_map(|(_, tasks)| { - if tasks.offset.0 < offset_range_start || tasks.offset.0 >= offset_range_end { + let multibuffer_point = tasks.offset.to_point(&snapshot.buffer_snapshot); + if multibuffer_point < offset_range_start + || multibuffer_point > offset_range_end + { return None; } - let multibuffer_point = tasks.offset.0.to_point(&snapshot.buffer_snapshot); let multibuffer_row = MultiBufferRow(multibuffer_point.row); let buffer_folded = snapshot .buffer_snapshot @@ -3805,391 +3806,6 @@ impl EditorElement { } } - #[allow(clippy::too_many_arguments)] - fn layout_edit_prediction_popover( - &self, - text_bounds: &Bounds, - content_origin: gpui::Point, - editor_snapshot: &EditorSnapshot, - visible_row_range: Range, - scroll_top: f32, - scroll_bottom: f32, - line_layouts: &[LineWithInvisibles], - line_height: Pixels, - scroll_pixel_position: gpui::Point, - newest_selection_head: Option, - editor_width: Pixels, - style: &EditorStyle, - window: &mut Window, - cx: &mut App, - ) -> Option<(AnyElement, gpui::Point)> { - const PADDING_X: Pixels = Pixels(24.); - const PADDING_Y: Pixels = Pixels(2.); - - let editor = self.editor.read(cx); - let active_inline_completion = editor.active_inline_completion.as_ref()?; - - if editor.edit_prediction_visible_in_cursor_popover(true) { - return None; - } - - // Adjust text origin for horizontal scrolling (in some cases here) - let start_point = content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0)); - - // Clamp left offset after extreme scrollings - let clamp_start = |point: gpui::Point| gpui::Point { - x: point.x.max(content_origin.x), - y: point.y, - }; - - match &active_inline_completion.completion { - InlineCompletion::Move { target, .. } => { - let target_display_point = target.to_display_point(editor_snapshot); - - if editor.edit_prediction_requires_modifier() { - if !editor.edit_prediction_preview_is_active() { - return None; - } - - if target_display_point.row() < visible_row_range.start { - let mut element = editor - .render_edit_prediction_line_popover( - "Scroll", - Some(IconName::ArrowUp), - window, - cx, - )? - .into_any(); - - element.layout_as_root(AvailableSpace::min_size(), window, cx); - - let cursor = newest_selection_head?; - let cursor_row_layout = line_layouts - .get(cursor.row().minus(visible_row_range.start) as usize)?; - let cursor_column = cursor.column() as usize; - - let cursor_character_x = cursor_row_layout.x_for_index(cursor_column); - - const PADDING_Y: Pixels = px(12.); - - let origin = start_point + point(cursor_character_x, PADDING_Y); - - element.prepaint_at(origin, window, cx); - return Some((element, origin)); - } else if target_display_point.row() >= visible_row_range.end { - let mut element = editor - .render_edit_prediction_line_popover( - "Scroll", - Some(IconName::ArrowDown), - window, - cx, - )? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - let cursor = newest_selection_head?; - let cursor_row_layout = line_layouts - .get(cursor.row().minus(visible_row_range.start) as usize)?; - let cursor_column = cursor.column() as usize; - - let cursor_character_x = cursor_row_layout.x_for_index(cursor_column); - const PADDING_Y: Pixels = px(12.); - - let origin = start_point - + point( - cursor_character_x, - text_bounds.size.height - size.height - PADDING_Y, - ); - - element.prepaint_at(origin, window, cx); - return Some((element, origin)); - } else { - const POLE_WIDTH: Pixels = px(2.); - - let mut element = v_flex() - .items_end() - .child( - editor - .render_edit_prediction_line_popover("Jump", None, window, cx)? - .rounded_br(px(0.)) - .rounded_tr(px(0.)) - .border_r_2(), - ) - .child( - div() - .w(POLE_WIDTH) - .bg(Editor::edit_prediction_callout_popover_border_color(cx)) - .h(line_height), - ) - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - let line_layout = - line_layouts - .get(target_display_point.row().minus(visible_row_range.start) - as usize)?; - let target_column = target_display_point.column() as usize; - - let target_x = line_layout.x_for_index(target_column); - let target_y = (target_display_point.row().as_f32() * line_height) - - scroll_pixel_position.y; - - let origin = clamp_start( - start_point + point(target_x, target_y) - - point(size.width - POLE_WIDTH, size.height - line_height), - ); - - element.prepaint_at(origin, window, cx); - - return Some((element, origin)); - } - } - - if target_display_point.row().as_f32() < scroll_top { - let mut element = editor - .render_edit_prediction_line_popover( - "Jump to Edit", - Some(IconName::ArrowUp), - window, - cx, - )? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - let offset = point((text_bounds.size.width - size.width) / 2., PADDING_Y); - - let origin = text_bounds.origin + offset; - element.prepaint_at(origin, window, cx); - Some((element, origin)) - } else if (target_display_point.row().as_f32() + 1.) > scroll_bottom { - let mut element = editor - .render_edit_prediction_line_popover( - "Jump to Edit", - Some(IconName::ArrowDown), - window, - cx, - )? - .into_any(); - - let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - let offset = point( - (text_bounds.size.width - size.width) / 2., - text_bounds.size.height - size.height - PADDING_Y, - ); - - let origin = text_bounds.origin + offset; - element.prepaint_at(origin, window, cx); - Some((element, origin)) - } else { - let mut element = editor - .render_edit_prediction_line_popover("Jump to Edit", None, window, cx)? - .into_any(); - let target_line_end = DisplayPoint::new( - target_display_point.row(), - editor_snapshot.line_len(target_display_point.row()), - ); - let origin = self.editor.update(cx, |editor, _cx| { - editor.display_to_pixel_point(target_line_end, editor_snapshot, window) - })?; - - let origin = clamp_start(start_point + origin + point(PADDING_X, px(0.))); - element.prepaint_as_root(origin, AvailableSpace::min_size(), window, cx); - Some((element, origin)) - } - } - InlineCompletion::Edit { - edits, - edit_preview, - display_mode, - snapshot, - } => { - if self.editor.read(cx).has_visible_completions_menu() { - return None; - } - - let edit_start = edits - .first() - .unwrap() - .0 - .start - .to_display_point(editor_snapshot); - let edit_end = edits - .last() - .unwrap() - .0 - .end - .to_display_point(editor_snapshot); - - let is_visible = visible_row_range.contains(&edit_start.row()) - || visible_row_range.contains(&edit_end.row()); - if !is_visible { - return None; - } - - match display_mode { - EditDisplayMode::TabAccept => { - let range = &edits.first()?.0; - let target_display_point = range.end.to_display_point(editor_snapshot); - - let target_line_end = DisplayPoint::new( - target_display_point.row(), - editor_snapshot.line_len(target_display_point.row()), - ); - let (mut element, origin) = self.editor.update(cx, |editor, cx| { - Some(( - editor - .render_edit_prediction_line_popover( - "Accept", None, window, cx, - )? - .into_any(), - editor.display_to_pixel_point( - target_line_end, - editor_snapshot, - window, - )?, - )) - })?; - - let origin = clamp_start(start_point + origin + point(PADDING_X, px(0.))); - element.prepaint_as_root(origin, AvailableSpace::min_size(), window, cx); - return Some((element, origin)); - } - EditDisplayMode::Inline => return None, - EditDisplayMode::DiffPopover => {} - } - - let highlighted_edits = crate::inline_completion_edit_text( - &snapshot, - edits, - edit_preview.as_ref()?, - false, - cx, - ); - - let styled_text = highlighted_edits.to_styled_text(&style.text); - let line_count = highlighted_edits.text.lines().count(); - - const BORDER_WIDTH: Pixels = px(1.); - - let mut element = h_flex() - .items_start() - .child( - h_flex() - .bg(cx.theme().colors().editor_background) - .border(BORDER_WIDTH) - .shadow_sm() - .border_color(cx.theme().colors().border) - .rounded_l_lg() - .when(line_count > 1, |el| el.rounded_br_lg()) - .pr_1() - .child(styled_text), - ) - .child( - h_flex() - .h(line_height + BORDER_WIDTH * px(2.)) - .px_1p5() - .gap_1() - // Workaround: For some reason, there's a gap if we don't do this - .ml(-BORDER_WIDTH) - .shadow(smallvec![gpui::BoxShadow { - color: gpui::black().opacity(0.05), - offset: point(px(1.), px(1.)), - blur_radius: px(2.), - spread_radius: px(0.), - }]) - .bg(Editor::edit_prediction_line_popover_bg_color(cx)) - .border(BORDER_WIDTH) - .border_color(cx.theme().colors().border) - .rounded_r_lg() - .children(editor.render_edit_prediction_accept_keybind(window, cx)), - ) - .into_any(); - - let longest_row = - editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1); - let longest_line_width = if visible_row_range.contains(&longest_row) { - line_layouts[(longest_row.0 - visible_row_range.start.0) as usize].width - } else { - layout_line( - longest_row, - editor_snapshot, - style, - editor_width, - |_| false, - window, - cx, - ) - .width - }; - - let viewport_bounds = Bounds::new(Default::default(), window.viewport_size()) - .extend(Edges { - right: -Self::SCROLLBAR_WIDTH, - ..Default::default() - }); - - let x_after_longest = - text_bounds.origin.x + longest_line_width + PADDING_X - scroll_pixel_position.x; - - let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx); - - // Fully visible if it can be displayed within the window (allow overlapping other - // panes). However, this is only allowed if the popover starts within text_bounds. - let can_position_to_the_right = x_after_longest < text_bounds.right() - && x_after_longest + element_bounds.width < viewport_bounds.right(); - - let mut origin = if can_position_to_the_right { - point( - x_after_longest, - text_bounds.origin.y + edit_start.row().as_f32() * line_height - - scroll_pixel_position.y, - ) - } else { - let cursor_row = newest_selection_head.map(|head| head.row()); - let above_edit = edit_start - .row() - .0 - .checked_sub(line_count as u32) - .map(DisplayRow); - let below_edit = Some(edit_end.row() + 1); - let above_cursor = cursor_row - .and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow)); - let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1); - - // Place the edit popover adjacent to the edit if there is a location - // available that is onscreen and does not obscure the cursor. Otherwise, - // place it adjacent to the cursor. - let row_target = [above_edit, below_edit, above_cursor, below_cursor] - .into_iter() - .flatten() - .find(|&start_row| { - let end_row = start_row + line_count as u32; - visible_row_range.contains(&start_row) - && visible_row_range.contains(&end_row) - && cursor_row.map_or(true, |cursor_row| { - !((start_row..end_row).contains(&cursor_row)) - }) - })?; - - content_origin - + point( - -scroll_pixel_position.x, - row_target.as_f32() * line_height - scroll_pixel_position.y, - ) - }; - - origin.x -= BORDER_WIDTH; - - window.defer_draw(element, origin, 1); - - // Do not return an element, since it will already be drawn due to defer_draw. - None - } - } - } - fn layout_mouse_context_menu( &self, editor_snapshot: &EditorSnapshot, @@ -6335,7 +5951,7 @@ pub(crate) struct LineWithInvisibles { fragments: SmallVec<[LineFragment; 1]>, invisibles: Vec, len: usize, - width: Pixels, + pub(crate) width: Pixels, font_size: Pixels, } @@ -7519,22 +7135,25 @@ impl Element for EditorElement { }); let (inline_completion_popover, inline_completion_popover_origin) = self - .layout_edit_prediction_popover( - &text_hitbox.bounds, - content_origin, - &snapshot, - start_row..end_row, - scroll_position.y, - scroll_position.y + height_in_lines, - &line_layouts, - line_height, - scroll_pixel_position, - newest_selection_head, - editor_width, - &style, - window, - cx, - ) + .editor + .update(cx, |editor, cx| { + editor.render_edit_prediction_popover( + &text_hitbox.bounds, + content_origin, + &snapshot, + start_row..end_row, + scroll_position.y, + scroll_position.y + height_in_lines, + &line_layouts, + line_height, + scroll_pixel_position, + newest_selection_head, + editor_width, + &style, + window, + cx, + ) + }) .unzip(); let mut inline_diagnostics = self.layout_inline_diagnostics( @@ -8356,7 +7975,7 @@ struct BlockLayout { style: BlockStyle, } -fn layout_line( +pub fn layout_line( row: DisplayRow, snapshot: &EditorSnapshot, style: &EditorStyle, diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index fb63d21151d26c..6a08f6e283ca5e 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -285,7 +285,7 @@ impl EditorTestContext { snapshot.anchor_before(ranges[0].start)..snapshot.anchor_after(ranges[0].end) } - pub fn set_diff_base(&mut self, diff_base: &str) { + pub fn set_head_text(&mut self, diff_base: &str) { self.cx.run_until_parked(); let fs = self.update_editor(|editor, _, cx| { editor.project.as_ref().unwrap().read(cx).fs().as_fake() @@ -298,6 +298,20 @@ impl EditorTestContext { self.cx.run_until_parked(); } + pub fn set_index_text(&mut self, diff_base: &str) { + self.cx.run_until_parked(); + let fs = self.update_editor(|editor, _, cx| { + editor.project.as_ref().unwrap().read(cx).fs().as_fake() + }); + let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); + fs.set_index_for_repo( + &Self::root_path().join(".git"), + &[(path.into(), diff_base.to_string())], + ); + self.cx.run_until_parked(); + } + + #[track_caller] pub fn assert_index_text(&mut self, expected: Option<&str>) { let fs = self.update_editor(|editor, _, cx| { editor.project.as_ref().unwrap().read(cx).fs().as_fake() diff --git a/crates/extensions_ui/src/components/extension_card.rs b/crates/extensions_ui/src/components/extension_card.rs index 901e8db075a5f6..d9ae37801186c6 100644 --- a/crates/extensions_ui/src/components/extension_card.rs +++ b/crates/extensions_ui/src/components/extension_card.rs @@ -53,10 +53,7 @@ impl RenderOnce for ExtensionCard { .size_full() .items_center() .justify_center() - .bg(theme::color_alpha( - cx.theme().colors().elevated_surface_background, - 0.8, - )) + .bg(cx.theme().colors().elevated_surface_background.alpha(0.8)) .child(Label::new("Overridden by dev extension.")), ) }), diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 628c216ccd9209..35ffad95118566 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -1337,7 +1337,10 @@ impl FakeFs { pub fn paths(&self, include_dot_git: bool) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); - queue.push_back((PathBuf::from("/"), self.state.lock().root.clone())); + queue.push_back(( + PathBuf::from(util::path!("/")), + self.state.lock().root.clone(), + )); while let Some((path, entry)) = queue.pop_front() { if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() { for (name, entry) in entries { @@ -1358,7 +1361,10 @@ impl FakeFs { pub fn directories(&self, include_dot_git: bool) -> Vec { let mut result = Vec::new(); let mut queue = collections::VecDeque::new(); - queue.push_back((PathBuf::from("/"), self.state.lock().root.clone())); + queue.push_back(( + PathBuf::from(util::path!("/")), + self.state.lock().root.clone(), + )); while let Some((path, entry)) = queue.pop_front() { if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() { for (name, entry) in entries { @@ -2020,7 +2026,11 @@ pub async fn copy_recursive<'a>( let Ok(item_relative_path) = item.strip_prefix(source) else { continue; }; - let target_item = target.join(item_relative_path); + let target_item = if item_relative_path == Path::new("") { + target.to_path_buf() + } else { + target.join(item_relative_path) + }; if is_dir { if !options.overwrite && fs.metadata(&target_item).await.is_ok_and(|m| m.is_some()) { if options.ignore_if_exists { @@ -2174,6 +2184,142 @@ mod tests { ); } + #[gpui::test] + async fn test_copy_recursive_with_single_file(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor.clone()); + fs.insert_tree( + path!("/outer"), + json!({ + "a": "A", + "b": "B", + "inner": {} + }), + ) + .await; + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/outer/a")), + PathBuf::from(path!("/outer/b")), + ] + ); + + let source = Path::new(path!("/outer/a")); + let target = Path::new(path!("/outer/a copy")); + copy_recursive(fs.as_ref(), source, target, Default::default()) + .await + .unwrap(); + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/outer/a")), + PathBuf::from(path!("/outer/a copy")), + PathBuf::from(path!("/outer/b")), + ] + ); + + let source = Path::new(path!("/outer/a")); + let target = Path::new(path!("/outer/inner/a copy")); + copy_recursive(fs.as_ref(), source, target, Default::default()) + .await + .unwrap(); + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/outer/a")), + PathBuf::from(path!("/outer/a copy")), + PathBuf::from(path!("/outer/b")), + PathBuf::from(path!("/outer/inner/a copy")), + ] + ); + } + + #[gpui::test] + async fn test_copy_recursive_with_single_dir(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor.clone()); + fs.insert_tree( + path!("/outer"), + json!({ + "a": "A", + "empty": {}, + "non-empty": { + "b": "B", + } + }), + ) + .await; + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/outer/a")), + PathBuf::from(path!("/outer/non-empty/b")), + ] + ); + assert_eq!( + fs.directories(false), + vec![ + PathBuf::from(path!("/")), + PathBuf::from(path!("/outer")), + PathBuf::from(path!("/outer/empty")), + PathBuf::from(path!("/outer/non-empty")), + ] + ); + + let source = Path::new(path!("/outer/empty")); + let target = Path::new(path!("/outer/empty copy")); + copy_recursive(fs.as_ref(), source, target, Default::default()) + .await + .unwrap(); + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/outer/a")), + PathBuf::from(path!("/outer/non-empty/b")), + ] + ); + assert_eq!( + fs.directories(false), + vec![ + PathBuf::from(path!("/")), + PathBuf::from(path!("/outer")), + PathBuf::from(path!("/outer/empty")), + PathBuf::from(path!("/outer/empty copy")), + PathBuf::from(path!("/outer/non-empty")), + ] + ); + + let source = Path::new(path!("/outer/non-empty")); + let target = Path::new(path!("/outer/non-empty copy")); + copy_recursive(fs.as_ref(), source, target, Default::default()) + .await + .unwrap(); + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/outer/a")), + PathBuf::from(path!("/outer/non-empty/b")), + PathBuf::from(path!("/outer/non-empty copy/b")), + ] + ); + assert_eq!( + fs.directories(false), + vec![ + PathBuf::from(path!("/")), + PathBuf::from(path!("/outer")), + PathBuf::from(path!("/outer/empty")), + PathBuf::from(path!("/outer/empty copy")), + PathBuf::from(path!("/outer/non-empty")), + PathBuf::from(path!("/outer/non-empty copy")), + ] + ); + } + #[gpui::test] async fn test_copy_recursive(executor: BackgroundExecutor) { let fs = FakeFs::new(executor.clone()); @@ -2185,7 +2331,8 @@ mod tests { "b": "B", "inner3": { "d": "D", - } + }, + "inner4": {} }, "inner2": { "c": "C", @@ -2203,6 +2350,17 @@ mod tests { PathBuf::from(path!("/outer/inner1/inner3/d")), ] ); + assert_eq!( + fs.directories(false), + vec![ + PathBuf::from(path!("/")), + PathBuf::from(path!("/outer")), + PathBuf::from(path!("/outer/inner1")), + PathBuf::from(path!("/outer/inner2")), + PathBuf::from(path!("/outer/inner1/inner3")), + PathBuf::from(path!("/outer/inner1/inner4")), + ] + ); let source = Path::new(path!("/outer")); let target = Path::new(path!("/outer/inner1/outer")); @@ -2223,6 +2381,22 @@ mod tests { PathBuf::from(path!("/outer/inner1/outer/inner1/inner3/d")), ] ); + assert_eq!( + fs.directories(false), + vec![ + PathBuf::from(path!("/")), + PathBuf::from(path!("/outer")), + PathBuf::from(path!("/outer/inner1")), + PathBuf::from(path!("/outer/inner2")), + PathBuf::from(path!("/outer/inner1/inner3")), + PathBuf::from(path!("/outer/inner1/inner4")), + PathBuf::from(path!("/outer/inner1/outer")), + PathBuf::from(path!("/outer/inner1/outer/inner1")), + PathBuf::from(path!("/outer/inner1/outer/inner2")), + PathBuf::from(path!("/outer/inner1/outer/inner1/inner3")), + PathBuf::from(path!("/outer/inner1/outer/inner1/inner4")), + ] + ); } #[gpui::test] diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 4eefe6c262fe59..0473b1dd57d269 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -26,6 +26,7 @@ log.workspace = true parking_lot.workspace = true regex.workspace = true rope.workspace = true +schemars.workspace = true serde.workspace = true smol.workspace = true sum_tree.workspace = true diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 21cd982b09f0ae..d68d9f7b655e58 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -8,6 +8,9 @@ pub mod status; use anyhow::{anyhow, Context as _, Result}; use gpui::action_with_deprecated_aliases; use gpui::actions; +use gpui::impl_actions; +use repository::PushOptions; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::ffi::OsStr; use std::fmt; @@ -27,6 +30,13 @@ pub static COMMIT_MESSAGE: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("COMMIT_EDITMSG")); pub static INDEX_LOCK: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("index.lock")); +#[derive(Debug, Copy, Clone, PartialEq, Deserialize, JsonSchema)] +pub struct Push { + pub options: Option, +} + +impl_actions!(git, [Push]); + actions!( git, [ @@ -43,6 +53,8 @@ actions!( RestoreTrackedFiles, TrashUntrackedFiles, Uncommit, + Pull, + Fetch, Commit, ] ); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index b29d4b226d059f..7b68507eca4135 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -7,6 +7,8 @@ use git2::BranchType; use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; +use schemars::JsonSchema; +use serde::Deserialize; use std::borrow::Borrow; use std::io::Write as _; use std::process::Stdio; @@ -29,6 +31,12 @@ pub struct Branch { } impl Branch { + pub fn tracking_status(&self) -> Option { + self.upstream + .as_ref() + .and_then(|upstream| upstream.tracking.status()) + } + pub fn priority_key(&self) -> (bool, Option) { ( self.is_head, @@ -42,11 +50,32 @@ impl Branch { #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Upstream { pub ref_name: SharedString, - pub tracking: Option, + pub tracking: UpstreamTracking, } -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub struct UpstreamTracking { +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] +pub enum UpstreamTracking { + /// Remote ref not present in local repository. + Gone, + /// Remote ref present in local repository (fetched from remote). + Tracked(UpstreamTrackingStatus), +} + +impl UpstreamTracking { + pub fn is_gone(&self) -> bool { + matches!(self, UpstreamTracking::Gone) + } + + pub fn status(&self) -> Option { + match self { + UpstreamTracking::Gone => None, + UpstreamTracking::Tracked(status) => Some(*status), + } + } +} + +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] +pub struct UpstreamTrackingStatus { pub ahead: u32, pub behind: u32, } @@ -68,6 +97,11 @@ pub struct CommitDetails { pub committer_name: SharedString, } +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct Remote { + pub name: SharedString, +} + pub enum ResetMode { // reset the branch pointer, leave index and worktree unchanged // (this will make it look like things that were committed are now @@ -139,6 +173,22 @@ pub trait GitRepository: Send + Sync { fn unstage_paths(&self, paths: &[RepoPath]) -> Result<()>; fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()>; + + fn push( + &self, + branch_name: &str, + upstream_name: &str, + options: Option, + ) -> Result<()>; + fn pull(&self, branch_name: &str, upstream_name: &str) -> Result<()>; + fn get_remotes(&self, branch_name: Option<&str>) -> Result>; + fn fetch(&self) -> Result<()>; +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, JsonSchema)] +pub enum PushOptions { + SetUpstream, + Force, } impl std::fmt::Debug for dyn GitRepository { @@ -165,6 +215,14 @@ impl RealGitRepository { hosting_provider_registry, } } + + fn working_directory(&self) -> Result { + self.repository + .lock() + .workdir() + .context("failed to read git work directory") + .map(Path::to_path_buf) + } } // https://git-scm.com/book/en/v2/Git-Internals-Git-Objects @@ -209,12 +267,7 @@ impl GitRepository for RealGitRepository { } fn reset(&self, commit: &str, mode: ResetMode) -> Result<()> { - let working_directory = self - .repository - .lock() - .workdir() - .context("failed to read git work directory")? - .to_path_buf(); + let working_directory = self.working_directory()?; let mode_flag = match mode { ResetMode::Mixed => "--mixed", @@ -238,12 +291,7 @@ impl GitRepository for RealGitRepository { if paths.is_empty() { return Ok(()); } - let working_directory = self - .repository - .lock() - .workdir() - .context("failed to read git work directory")? - .to_path_buf(); + let working_directory = self.working_directory()?; let output = new_std_command(&self.git_binary_path) .current_dir(&working_directory) @@ -296,12 +344,7 @@ impl GitRepository for RealGitRepository { } fn set_index_text(&self, path: &RepoPath, content: Option) -> anyhow::Result<()> { - let working_directory = self - .repository - .lock() - .workdir() - .context("failed to read git work directory")? - .to_path_buf(); + let working_directory = self.working_directory()?; if let Some(content) = content { let mut child = new_std_command(&self.git_binary_path) .current_dir(&working_directory) @@ -485,12 +528,7 @@ impl GitRepository for RealGitRepository { } fn stage_paths(&self, paths: &[RepoPath]) -> Result<()> { - let working_directory = self - .repository - .lock() - .workdir() - .context("failed to read git work directory")? - .to_path_buf(); + let working_directory = self.working_directory()?; if !paths.is_empty() { let output = new_std_command(&self.git_binary_path) @@ -498,6 +536,8 @@ impl GitRepository for RealGitRepository { .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.as_ref())) .output()?; + + // TODO: Get remote response out of this and show it to the user if !output.status.success() { return Err(anyhow!( "Failed to stage paths:\n{}", @@ -509,12 +549,7 @@ impl GitRepository for RealGitRepository { } fn unstage_paths(&self, paths: &[RepoPath]) -> Result<()> { - let working_directory = self - .repository - .lock() - .workdir() - .context("failed to read git work directory")? - .to_path_buf(); + let working_directory = self.working_directory()?; if !paths.is_empty() { let output = new_std_command(&self.git_binary_path) @@ -522,6 +557,8 @@ impl GitRepository for RealGitRepository { .args(["reset", "--quiet", "--"]) .args(paths.iter().map(|p| p.as_ref())) .output()?; + + // TODO: Get remote response out of this and show it to the user if !output.status.success() { return Err(anyhow!( "Failed to unstage:\n{}", @@ -533,32 +570,141 @@ impl GitRepository for RealGitRepository { } fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()> { - let working_directory = self - .repository - .lock() - .workdir() - .context("failed to read git work directory")? - .to_path_buf(); - let mut args = vec!["commit", "--quiet", "-m", message, "--cleanup=strip"]; - let author = name_and_email.map(|(name, email)| format!("{name} <{email}>")); - if let Some(author) = author.as_deref() { - args.push("--author"); - args.push(author); + let working_directory = self.working_directory()?; + + let mut cmd = new_std_command(&self.git_binary_path); + cmd.current_dir(&working_directory) + .args(["commit", "--quiet", "-m"]) + .arg(message) + .arg("--cleanup=strip"); + + if let Some((name, email)) = name_and_email { + cmd.arg("--author").arg(&format!("{name} <{email}>")); } + let output = cmd.output()?; + + // TODO: Get remote response out of this and show it to the user + if !output.status.success() { + return Err(anyhow!( + "Failed to commit:\n{}", + String::from_utf8_lossy(&output.stderr) + )); + } + Ok(()) + } + + fn push( + &self, + branch_name: &str, + remote_name: &str, + options: Option, + ) -> Result<()> { + let working_directory = self.working_directory()?; + let output = new_std_command(&self.git_binary_path) .current_dir(&working_directory) - .args(args) + .args(["push", "--quiet"]) + .args(options.map(|option| match option { + PushOptions::SetUpstream => "--set-upstream", + PushOptions::Force => "--force-with-lease", + })) + .arg(remote_name) + .arg(format!("{}:{}", branch_name, branch_name)) .output()?; if !output.status.success() { return Err(anyhow!( - "Failed to commit:\n{}", + "Failed to push:\n{}", String::from_utf8_lossy(&output.stderr) )); } + + // TODO: Get remote response out of this and show it to the user Ok(()) } + + fn pull(&self, branch_name: &str, remote_name: &str) -> Result<()> { + let working_directory = self.working_directory()?; + + let output = new_std_command(&self.git_binary_path) + .current_dir(&working_directory) + .args(["pull", "--quiet"]) + .arg(remote_name) + .arg(branch_name) + .output()?; + + if !output.status.success() { + return Err(anyhow!( + "Failed to pull:\n{}", + String::from_utf8_lossy(&output.stderr) + )); + } + + // TODO: Get remote response out of this and show it to the user + Ok(()) + } + + fn fetch(&self) -> Result<()> { + let working_directory = self.working_directory()?; + + let output = new_std_command(&self.git_binary_path) + .current_dir(&working_directory) + .args(["fetch", "--quiet", "--all"]) + .output()?; + + if !output.status.success() { + return Err(anyhow!( + "Failed to fetch:\n{}", + String::from_utf8_lossy(&output.stderr) + )); + } + + // TODO: Get remote response out of this and show it to the user + Ok(()) + } + + fn get_remotes(&self, branch_name: Option<&str>) -> Result> { + let working_directory = self.working_directory()?; + + if let Some(branch_name) = branch_name { + let output = new_std_command(&self.git_binary_path) + .current_dir(&working_directory) + .args(["config", "--get"]) + .arg(format!("branch.{}.remote", branch_name)) + .output()?; + + if output.status.success() { + let remote_name = String::from_utf8_lossy(&output.stdout); + + return Ok(vec![Remote { + name: remote_name.trim().to_string().into(), + }]); + } + } + + let output = new_std_command(&self.git_binary_path) + .current_dir(&working_directory) + .args(["remote"]) + .output()?; + + if output.status.success() { + let remote_names = String::from_utf8_lossy(&output.stdout) + .split('\n') + .filter(|name| !name.is_empty()) + .map(|name| Remote { + name: name.trim().to_string().into(), + }) + .collect(); + + return Ok(remote_names); + } else { + return Err(anyhow!( + "Failed to get remotes:\n{}", + String::from_utf8_lossy(&output.stderr) + )); + } + } } #[derive(Debug, Clone)] @@ -743,6 +889,22 @@ impl GitRepository for FakeGitRepository { fn commit(&self, _message: &str, _name_and_email: Option<(&str, &str)>) -> Result<()> { unimplemented!() } + + fn push(&self, _branch: &str, _remote: &str, _options: Option) -> Result<()> { + unimplemented!() + } + + fn pull(&self, _branch: &str, _remote: &str) -> Result<()> { + unimplemented!() + } + + fn fetch(&self) -> Result<()> { + unimplemented!() + } + + fn get_remotes(&self, _branch: Option<&str>) -> Result> { + unimplemented!() + } } fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> { @@ -911,9 +1073,9 @@ fn parse_branch_input(input: &str) -> Result> { Ok(branches) } -fn parse_upstream_track(upstream_track: &str) -> Result> { +fn parse_upstream_track(upstream_track: &str) -> Result { if upstream_track == "" { - return Ok(Some(UpstreamTracking { + return Ok(UpstreamTracking::Tracked(UpstreamTrackingStatus { ahead: 0, behind: 0, })); @@ -929,7 +1091,7 @@ fn parse_upstream_track(upstream_track: &str) -> Result let mut behind: u32 = 0; for component in upstream_track.split(", ") { if component == "gone" { - return Ok(None); + return Ok(UpstreamTracking::Gone); } if let Some(ahead_num) = component.strip_prefix("ahead ") { ahead = ahead_num.parse::()?; @@ -938,7 +1100,10 @@ fn parse_upstream_track(upstream_track: &str) -> Result behind = behind_num.parse::()?; } } - Ok(Some(UpstreamTracking { ahead, behind })) + Ok(UpstreamTracking::Tracked(UpstreamTrackingStatus { + ahead, + behind, + })) } #[test] @@ -953,7 +1118,7 @@ fn test_branches_parsing() { name: "zed-patches".into(), upstream: Some(Upstream { ref_name: "refs/remotes/origin/zed-patches".into(), - tracking: Some(UpstreamTracking { + tracking: UpstreamTracking::Tracked(UpstreamTrackingStatus { ahead: 0, behind: 0 }) diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index d6233dd8237119..33febb7af54a5c 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -261,7 +261,7 @@ impl PickerDelegate for BranchListDelegate { .project() .read(cx) .active_repository(cx) - .and_then(|repo| repo.read(cx).branch()) + .and_then(|repo| repo.read(cx).current_branch()) .map(|branch| branch.name.to_string()) }) .ok() diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 6cc67e11496dbf..4c6c8cdd8452d9 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -4,13 +4,17 @@ use crate::git_panel::{commit_message_editor, GitPanel}; use crate::repository_selector::RepositorySelector; use anyhow::Result; use git::Commit; +use language::language_settings::LanguageSettings; use language::Buffer; -use panel::{panel_editor_container, panel_editor_style, panel_filled_button, panel_icon_button}; +use panel::{ + panel_button, panel_editor_container, panel_editor_style, panel_filled_button, + panel_icon_button, +}; use settings::Settings; use theme::ThemeSettings; -use ui::{prelude::*, Tooltip}; +use ui::{prelude::*, KeybindingHint, Tooltip}; -use editor::{Editor, EditorElement, EditorMode, MultiBuffer}; +use editor::{Direction, Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer}; use gpui::*; use project::git::Repository; use project::{Fs, Project}; @@ -18,6 +22,8 @@ use std::sync::Arc; use workspace::dock::{Dock, DockPosition, PanelHandle}; use workspace::{ModalView, Workspace}; +// actions!(commit_modal, [NextSuggestion, PrevSuggestion]); + pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, window, cx| { let Some(window) = window else { @@ -32,6 +38,8 @@ pub struct CommitModal { git_panel: Entity, commit_editor: Entity, restore_dock: RestoreDock, + current_suggestion: Option, + suggested_messages: Vec, } impl Focusable for CommitModal { @@ -114,6 +122,7 @@ impl CommitModal { cx: &mut Context, ) -> Self { let panel = git_panel.read(cx); + let suggested_message = panel.suggest_commit_message(); let commit_editor = git_panel.update(cx, |git_panel, cx| { git_panel.set_modal_open(true, cx); @@ -122,11 +131,137 @@ impl CommitModal { cx.new(|cx| commit_message_editor(buffer, project.clone(), false, window, cx)) }); + let commit_message = commit_editor.read(cx).text(cx); + + if let Some(suggested_message) = suggested_message { + if commit_message.is_empty() { + commit_editor.update(cx, |editor, cx| { + editor.set_text(suggested_message, window, cx); + editor.select_all(&Default::default(), window, cx); + }); + } else { + if commit_message.as_str().trim() == suggested_message.trim() { + commit_editor.update(cx, |editor, cx| { + // select the message to make it easy to delete + editor.select_all(&Default::default(), window, cx); + }); + } + } + } + + let focus_handle = commit_editor.focus_handle(cx); + + cx.on_focus_out(&focus_handle, window, |this, _, window, cx| { + cx.emit(DismissEvent); + }) + .detach(); + Self { git_panel, commit_editor, restore_dock, + current_suggestion: None, + suggested_messages: vec![], + } + } + + /// Returns container `(width, x padding, border radius)` + fn container_properties(&self, window: &mut Window, cx: &mut Context) -> (f32, f32, f32) { + // TODO: Let's set the width based on your set wrap guide if possible + + // let settings = EditorSettings::get_global(cx); + + // let first_wrap_guide = self + // .commit_editor + // .read(cx) + // .wrap_guides(cx) + // .iter() + // .next() + // .map(|(guide, active)| if *active { Some(*guide) } else { None }) + // .flatten(); + + // let preferred_width = if let Some(guide) = first_wrap_guide { + // guide + // } else { + // 80 + // }; + + let border_radius = 16.0; + + let preferred_width = 50; // (chars wide) + + let mut width = 460.0; + let padding_x = 16.0; + + let mut snapshot = self + .commit_editor + .update(cx, |editor, cx| editor.snapshot(window, cx)); + let style = window.text_style().clone(); + + let font_id = window.text_system().resolve_font(&style.font()); + let font_size = style.font_size.to_pixels(window.rem_size()); + let line_height = style.line_height_in_pixels(window.rem_size()); + if let Ok(em_width) = window.text_system().em_width(font_id, font_size) { + width = preferred_width as f32 * em_width.0 + (padding_x * 2.0); + cx.notify(); } + + // cx.notify(); + + (width, padding_x, border_radius) + } + + // fn cycle_suggested_messages(&mut self, direction: Direction, cx: &mut Context) { + // let new_index = match direction { + // Direction::Next => { + // (self.current_suggestion.unwrap_or(0) + 1).rem_euclid(self.suggested_messages.len()) + // } + // Direction::Prev => { + // (self.current_suggestion.unwrap_or(0) + self.suggested_messages.len() - 1) + // .rem_euclid(self.suggested_messages.len()) + // } + // }; + // self.current_suggestion = Some(new_index); + + // cx.notify(); + // } + + // fn next_suggestion(&mut self, _: &NextSuggestion, window: &mut Window, cx: &mut Context) { + // self.current_suggestion = Some(1); + // self.apply_suggestion(window, cx); + // } + + // fn prev_suggestion(&mut self, _: &PrevSuggestion, window: &mut Window, cx: &mut Context) { + // self.current_suggestion = Some(0); + // self.apply_suggestion(window, cx); + // } + + // fn set_commit_message(&mut self, message: &str, window: &mut Window, cx: &mut Context) { + // self.commit_editor.update(cx, |editor, cx| { + // editor.set_text(message.to_string(), window, cx) + // }); + // self.current_suggestion = Some(0); + // cx.notify(); + // } + + // fn apply_suggestion(&mut self, window: &mut Window, cx: &mut Context) { + // let suggested_messages = self.suggested_messages.clone(); + + // if let Some(suggestion) = self.current_suggestion { + // let suggested_message = &suggested_messages[suggestion]; + + // self.set_commit_message(suggested_message, window, cx); + // } + + // cx.notify(); + // } + + fn commit_editor_element(&self, window: &mut Window, cx: &mut Context) -> EditorElement { + let mut editor = self.commit_editor.clone(); + + let editor_style = panel_editor_style(true, window, cx); + + EditorElement::new(&self.commit_editor, editor_style) } pub fn render_commit_editor( @@ -135,23 +270,144 @@ impl CommitModal { window: &mut Window, cx: &mut Context, ) -> impl IntoElement { - let editor = self.commit_editor.clone(); + let (width, padding_x, modal_border_radius) = self.container_properties(window, cx); - let panel_editor_style = panel_editor_style(true, window, cx); + let border_radius = modal_border_radius - padding_x / 2.0; + + let editor = self.commit_editor.clone(); + let editor_focus_handle = editor.focus_handle(cx); let settings = ThemeSettings::get_global(cx); let line_height = relative(settings.buffer_line_height.value()) .to_pixels(settings.buffer_font_size(cx).into(), window.rem_size()); + let mut snapshot = self + .commit_editor + .update(cx, |editor, cx| editor.snapshot(window, cx)); + let style = window.text_style().clone(); + + let font_id = window.text_system().resolve_font(&style.font()); + let font_size = style.font_size.to_pixels(window.rem_size()); + let line_height = style.line_height_in_pixels(window.rem_size()); + let em_width = window.text_system().em_width(font_id, font_size); + + let (branch, tooltip, commit_label, co_authors) = + self.git_panel.update(cx, |git_panel, cx| { + let branch = git_panel + .active_repository + .as_ref() + .and_then(|repo| repo.read(cx).current_branch().map(|b| b.name.clone())) + .unwrap_or_else(|| "".into()); + let tooltip = if git_panel.has_staged_changes() { + "Commit staged changes" + } else { + "Commit changes to tracked files" + }; + let title = if git_panel.has_staged_changes() { + "Commit" + } else { + "Commit Tracked" + }; + let co_authors = git_panel.render_co_authors(cx); + (branch, tooltip, title, co_authors) + }); + + let branch_selector = panel_button(branch) + .icon(IconName::GitBranch) + .icon_size(IconSize::Small) + .icon_color(Color::Placeholder) + .color(Color::Muted) + .icon_position(IconPosition::Start) + .tooltip(Tooltip::for_action_title( + "Switch Branch", + &zed_actions::git::Branch, + )) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); + })) + .style(ButtonStyle::Transparent); + + let changes_count = self.git_panel.read(cx).total_staged_count(); + + let close_kb_hint = + if let Some(close_kb) = ui::KeyBinding::for_action(&menu::Cancel, window, cx) { + Some( + KeybindingHint::new(close_kb, cx.theme().colors().editor_background) + .suffix("Cancel"), + ) + } else { + None + }; + + let fake_commit_kb = + ui::KeyBinding::new(gpui::KeyBinding::new("cmd-enter", gpui::NoAction, None), cx); + + let commit_hint = + KeybindingHint::new(fake_commit_kb, cx.theme().colors().editor_background) + .suffix(commit_label); + + let focus_handle = self.focus_handle(cx); + + // let next_suggestion_kb = + // ui::KeyBinding::for_action_in(&NextSuggestion, &focus_handle.clone(), window, cx); + // let next_suggestion_hint = next_suggestion_kb.map(|kb| { + // KeybindingHint::new(kb, cx.theme().colors().editor_background).suffix("Next Suggestion") + // }); + + // let prev_suggestion_kb = + // ui::KeyBinding::for_action_in(&PrevSuggestion, &focus_handle.clone(), window, cx); + // let prev_suggestion_hint = prev_suggestion_kb.map(|kb| { + // KeybindingHint::new(kb, cx.theme().colors().editor_background) + // .suffix("Previous Suggestion") + // }); + v_flex() - .justify_between() - .relative() - .w_full() - .h_full() - .pt_2() + .id("editor-container") .bg(cx.theme().colors().editor_background) - .child(EditorElement::new(&self.commit_editor, panel_editor_style)) - .child(self.render_footer(window, cx)) + .flex_1() + .size_full() + .rounded(px(border_radius)) + .overflow_hidden() + .border_1() + .border_color(cx.theme().colors().border_variant) + .py_2() + .px_3() + .on_click(cx.listener(move |_, _: &ClickEvent, window, _cx| { + window.focus(&editor_focus_handle); + })) + .child( + div() + .size_full() + .flex_1() + .child(self.commit_editor_element(window, cx)), + ) + .child( + h_flex() + .group("commit_editor_footer") + .flex_none() + .w_full() + .items_center() + .justify_between() + .w_full() + .pt_2() + .pb_0p5() + .gap_1() + .child(h_flex().gap_1().child(branch_selector).children(co_authors)) + .child(div().flex_1()) + .child( + h_flex() + .opacity(0.7) + .group_hover("commit_editor_footer", |this| this.opacity(1.0)) + .items_center() + .justify_end() + .flex_none() + .px_1() + .gap_4() + .children(close_kb_hint) + // .children(next_suggestion_hint) + .child(commit_hint), + ), + ) } pub fn render_footer(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { @@ -159,7 +415,12 @@ impl CommitModal { let branch = git_panel .active_repository .as_ref() - .and_then(|repo| repo.read(cx).branch().map(|b| b.name.clone())) + .and_then(|repo| { + repo.read(cx) + .repository_entry + .branch() + .map(|b| b.name.clone()) + }) .unwrap_or_else(|| "".into()); let tooltip = if git_panel.has_staged_changes() { "Commit staged changes" @@ -175,13 +436,10 @@ impl CommitModal { (branch, tooltip, title, co_authors) }); - let branch_selector = Button::new("branch-selector", branch) - .color(Color::Muted) - .style(ButtonStyle::Subtle) + let branch_selector = panel_button(branch) .icon(IconName::GitBranch) .icon_size(IconSize::Small) .icon_color(Color::Muted) - .size(ButtonSize::Compact) .icon_position(IconPosition::Start) .tooltip(Tooltip::for_action_title( "Switch Branch", @@ -191,13 +449,29 @@ impl CommitModal { window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); })) .style(ButtonStyle::Transparent); + + let changes_count = self.git_panel.read(cx).total_staged_count(); + + let close_kb_hint = + if let Some(close_kb) = ui::KeyBinding::for_action(&menu::Cancel, window, cx) { + Some( + KeybindingHint::new(close_kb, cx.theme().colors().editor_background) + .suffix("Cancel"), + ) + } else { + None + }; + h_flex() + .items_center() + .h(px(36.0)) .w_full() .justify_between() - .child(branch_selector) + .px_3() + .child(h_flex().child(branch_selector)) .child( - h_flex().children(co_authors).child( - panel_filled_button(title) + h_flex().gap_1p5().children(co_authors).child( + Button::new("stage-button", title) .tooltip(Tooltip::for_action_title(tooltip, &git::Commit)) .on_click(cx.listener(|this, _, window, cx| { this.commit(&Default::default(), window, cx); @@ -206,6 +480,10 @@ impl CommitModal { ) } + fn border_radius(&self) -> f32 { + 8.0 + } + fn dismiss(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context) { cx.emit(DismissEvent); } @@ -218,27 +496,33 @@ impl CommitModal { impl Render for CommitModal { fn render(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) -> impl IntoElement { + let (width, _, border_radius) = self.container_properties(window, cx); + v_flex() .id("commit-modal") .key_context("GitCommit") .elevation_3(cx) + .overflow_hidden() .on_action(cx.listener(Self::dismiss)) .on_action(cx.listener(Self::commit)) + // .on_action(cx.listener(Self::next_suggestion)) + // .on_action(cx.listener(Self::prev_suggestion)) .relative() - .bg(cx.theme().colors().editor_background) - .rounded(px(16.)) + .justify_between() + .bg(cx.theme().colors().elevated_surface_background) + .rounded(px(border_radius)) .border_1() .border_color(cx.theme().colors().border) - .py_2() - .px_4() - .w(px(480.)) - .min_h(rems(18.)) + .w(px(width)) + .h(px(360.)) .flex_1() .overflow_hidden() .child( v_flex() .flex_1() + .p_2() .child(self.render_commit_editor(None, window, cx)), ) + // .child(self.render_footer(window, cx)) } } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 7099d06994e3d2..f8e96095d86212 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4,17 +4,16 @@ use crate::repository_selector::RepositorySelectorPopoverMenu; use crate::{ git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector, }; -use crate::{project_diff, ProjectDiff}; -use collections::HashMap; +use crate::{picker_prompt, project_diff, ProjectDiff}; use db::kvp::KEY_VALUE_STORE; use editor::commit_tooltip::CommitTooltip; use editor::{ scroll::ScrollbarAutoHide, Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar, }; -use git::repository::{CommitDetails, ResetMode}; +use git::repository::{Branch, CommitDetails, PushOptions, Remote, ResetMode, UpstreamTracking}; use git::{repository::RepoPath, status::FileStatus, Commit, ToggleStaged}; -use git::{RestoreTrackedFiles, StageAll, TrashUntrackedFiles, UnstageAll}; +use git::{Push, RestoreTrackedFiles, StageAll, TrashUntrackedFiles, UnstageAll}; use gpui::*; use itertools::Itertools; use language::{Buffer, File}; @@ -27,14 +26,17 @@ use project::{ }; use serde::{Deserialize, Serialize}; use settings::Settings as _; -use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration, usize}; +use std::cell::RefCell; +use std::future::Future; +use std::rc::Rc; +use std::{collections::HashSet, sync::Arc, time::Duration, usize}; use strum::{IntoEnumIterator, VariantNames}; use time::OffsetDateTime; use ui::{ prelude::*, ButtonLike, Checkbox, ContextMenu, Divider, DividerColor, ElevationIndex, ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip, }; -use util::{maybe, ResultExt, TryFutureExt}; +use util::{maybe, post_inc, ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, notifications::{DetachAndPromptErr, NotificationId}, @@ -152,8 +154,6 @@ impl GitListEntry { #[derive(Debug, PartialEq, Eq, Clone)] pub struct GitStatusEntry { - pub(crate) depth: usize, - pub(crate) display_name: String, pub(crate) repo_path: RepoPath, pub(crate) status: FileStatus, pub(crate) is_staged: Option, @@ -174,15 +174,19 @@ struct PendingOperation { op_id: usize, } +type RemoteOperations = Rc>>; + pub struct GitPanel { + remote_operation_id: u32, + pending_remote_operations: RemoteOperations, pub(crate) active_repository: Option>, commit_editor: Entity, + suggested_commit_message: Option, conflicted_count: usize, conflicted_staged_count: usize, current_modifiers: Modifiers, add_coauthors: bool, entries: Vec, - entries_by_path: collections::HashMap, focus_handle: FocusHandle, fs: Arc, hide_scrollbar_task: Option>, @@ -206,6 +210,17 @@ pub struct GitPanel { modal_open: bool, } +struct RemoteOperationGuard { + id: u32, + pending_remote_operations: RemoteOperations, +} + +impl Drop for RemoteOperationGuard { + fn drop(&mut self) { + self.pending_remote_operations.borrow_mut().remove(&self.id); + } +} + pub(crate) fn commit_message_editor( commit_message_buffer: Entity, project: Entity, @@ -286,14 +301,16 @@ impl GitPanel { cx.new(|cx| RepositorySelector::new(project.clone(), window, cx)); let mut git_panel = Self { + pending_remote_operations: Default::default(), + remote_operation_id: 0, active_repository, commit_editor, + suggested_commit_message: None, conflicted_count: 0, conflicted_staged_count: 0, current_modifiers: window.modifiers(), add_coauthors: true, entries: Vec::new(), - entries_by_path: HashMap::default(), focus_handle: cx.focus_handle(), fs, hide_scrollbar_task: None, @@ -322,6 +339,80 @@ impl GitPanel { }) } + pub fn entry_by_path(&self, path: &RepoPath) -> Option { + fn binary_search(mut low: usize, mut high: usize, is_target: F) -> Option + where + F: Fn(usize) -> std::cmp::Ordering, + { + while low < high { + let mid = low + (high - low) / 2; + match is_target(mid) { + std::cmp::Ordering::Equal => return Some(mid), + std::cmp::Ordering::Less => low = mid + 1, + std::cmp::Ordering::Greater => high = mid, + } + } + None + } + if self.conflicted_count > 0 { + let conflicted_start = 1; + if let Some(ix) = binary_search( + conflicted_start, + conflicted_start + self.conflicted_count, + |ix| { + self.entries[ix] + .status_entry() + .unwrap() + .repo_path + .cmp(&path) + }, + ) { + return Some(ix); + } + } + if self.tracked_count > 0 { + let tracked_start = if self.conflicted_count > 0 { + 1 + self.conflicted_count + } else { + 0 + } + 1; + if let Some(ix) = + binary_search(tracked_start, tracked_start + self.tracked_count, |ix| { + self.entries[ix] + .status_entry() + .unwrap() + .repo_path + .cmp(&path) + }) + { + return Some(ix); + } + } + if self.new_count > 0 { + let untracked_start = if self.conflicted_count > 0 { + 1 + self.conflicted_count + } else { + 0 + } + if self.tracked_count > 0 { + 1 + self.tracked_count + } else { + 0 + } + 1; + if let Some(ix) = + binary_search(untracked_start, untracked_start + self.new_count, |ix| { + self.entries[ix] + .status_entry() + .unwrap() + .repo_path + .cmp(&path) + }) + { + return Some(ix); + } + } + None + } + pub fn select_entry_by_path( &mut self, path: ProjectPath, @@ -334,13 +425,23 @@ impl GitPanel { let Some(repo_path) = git_repo.read(cx).project_path_to_repo_path(&path) else { return; }; - let Some(ix) = self.entries_by_path.get(&repo_path) else { + let Some(ix) = self.entry_by_path(&repo_path) else { return; }; - self.selected_entry = Some(*ix); + self.selected_entry = Some(ix); cx.notify(); } + fn start_remote_operation(&mut self) -> RemoteOperationGuard { + let id = post_inc(&mut self.remote_operation_id); + self.pending_remote_operations.borrow_mut().insert(id); + + RemoteOperationGuard { + id, + pending_remote_operations: self.pending_remote_operations.clone(), + } + } + fn serialize(&mut self, cx: &mut Context) { let width = self.width; self.pending_serialization = cx.background_spawn( @@ -451,31 +552,6 @@ impl GitPanel { cx.notify(); } - fn calculate_depth_and_difference( - repo_path: &RepoPath, - visible_entries: &HashSet, - ) -> (usize, usize) { - let ancestors = repo_path.ancestors().skip(1); - for ancestor in ancestors { - if let Some(parent_entry) = visible_entries.get(ancestor) { - let entry_component_count = repo_path.components().count(); - let parent_component_count = parent_entry.components().count(); - - let difference = entry_component_count - parent_component_count; - - let parent_depth = parent_entry - .ancestors() - .skip(1) // Skip the parent itself - .filter(|ancestor| visible_entries.contains(*ancestor)) - .count(); - - return (parent_depth + 1, difference); - } - } - - (0, 0) - } - fn scroll_to_selected_entry(&mut self, cx: &mut Context) { if let Some(selected_entry) = self.selected_entry { self.scroll_handle @@ -1008,6 +1084,10 @@ impl GitPanel { .detach(); } + pub fn total_staged_count(&self) -> usize { + self.tracked_staged_count + self.new_staged_count + self.conflicted_staged_count + } + pub fn commit_message_buffer(&self, cx: &App) -> Entity { self.commit_editor .read(cx) @@ -1029,17 +1109,15 @@ impl GitPanel { } } - /// Commit all staged changes fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context) { - let editor = self.commit_editor.read(cx); - if editor.is_empty(cx) { - if !editor.focus_handle(cx).contains_focused(window, cx) { - editor.focus_handle(cx).focus(window); - return; - } + if self + .commit_editor + .focus_handle(cx) + .contains_focused(window, cx) + { + self.commit_changes(window, cx) } - - self.commit_changes(window, cx) + cx.propagate(); } pub(crate) fn commit_changes(&mut self, window: &mut Window, cx: &mut Context) { @@ -1121,23 +1199,44 @@ impl GitPanel { let Some(repo) = self.active_repository.clone() else { return; }; + + // TODO: Use git merge-base to find the upstream and main branch split + let confirmation = Task::ready(true); + // let confirmation = if self.commit_editor.read(cx).is_empty(cx) { + // Task::ready(true) + // } else { + // let prompt = window.prompt( + // PromptLevel::Warning, + // "Uncomitting will replace the current commit message with the previous commit's message", + // None, + // &["Ok", "Cancel"], + // cx, + // ); + // cx.spawn(|_, _| async move { prompt.await.is_ok_and(|i| i == 0) }) + // }; + let prior_head = self.load_commit_details("HEAD", cx); - let task = cx.spawn(|_, mut cx| async move { - let prior_head = prior_head.await?; + let task = cx.spawn_in(window, |this, mut cx| async move { + let result = maybe!(async { + if !confirmation.await { + Ok(None) + } else { + let prior_head = prior_head.await?; - repo.update(&mut cx, |repo, _| repo.reset("HEAD^", ResetMode::Soft))? - .await??; + repo.update(&mut cx, |repo, _| repo.reset("HEAD^", ResetMode::Soft))? + .await??; - Ok(prior_head) - }); + Ok(Some(prior_head)) + } + }) + .await; - let task = cx.spawn_in(window, |this, mut cx| async move { - let result = task.await; this.update_in(&mut cx, |this, window, cx| { this.pending_commit.take(); match result { - Ok(prior_commit) => { + Ok(None) => {} + Ok(Some(prior_commit)) => { this.commit_editor.update(cx, |editor, cx| { editor.set_text(prior_commit.message, window, cx) }); @@ -1151,6 +1250,176 @@ impl GitPanel { self.pending_commit = Some(task); } + /// Suggests a commit message based on the changed files and their statuses + pub fn suggest_commit_message(&self) -> Option { + let entries = self + .entries + .iter() + .filter_map(|entry| { + if let GitListEntry::GitStatusEntry(status_entry) = entry { + Some(status_entry) + } else { + None + } + }) + .collect::>(); + + if entries.is_empty() { + None + } else if entries.len() == 1 { + let entry = &entries[0]; + let file_name = entry + .repo_path + .file_name() + .unwrap_or_default() + .to_string_lossy(); + + if entry.status.is_deleted() { + Some(format!("Delete {}", file_name)) + } else if entry.status.is_created() { + Some(format!("Create {}", file_name)) + } else if entry.status.is_modified() { + Some(format!("Update {}", file_name)) + } else { + None + } + } else { + None + } + } + + fn update_editor_placeholder(&mut self, cx: &mut Context) { + let suggested_commit_message = self.suggest_commit_message(); + self.suggested_commit_message = suggested_commit_message.clone(); + + if let Some(suggested_commit_message) = suggested_commit_message { + self.commit_editor.update(cx, |editor, cx| { + editor.set_placeholder_text(Arc::from(suggested_commit_message), cx) + }); + } + + cx.notify(); + } + + fn fetch(&mut self, _: &git::Fetch, _window: &mut Window, cx: &mut Context) { + let Some(repo) = self.active_repository.clone() else { + return; + }; + let guard = self.start_remote_operation(); + let fetch = repo.read(cx).fetch(); + cx.spawn(|_, _| async move { + fetch.await??; + drop(guard); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn pull(&mut self, _: &git::Pull, window: &mut Window, cx: &mut Context) { + let guard = self.start_remote_operation(); + let remote = self.get_current_remote(window, cx); + cx.spawn(move |this, mut cx| async move { + let remote = remote.await?; + + this.update(&mut cx, |this, cx| { + let Some(repo) = this.active_repository.clone() else { + return Err(anyhow::anyhow!("No active repository")); + }; + + let Some(branch) = repo.read(cx).current_branch() else { + return Err(anyhow::anyhow!("No active branch")); + }; + + Ok(repo.read(cx).pull(branch.name.clone(), remote.name)) + })?? + .await??; + + drop(guard); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn push(&mut self, action: &git::Push, window: &mut Window, cx: &mut Context) { + let guard = self.start_remote_operation(); + let options = action.options; + let remote = self.get_current_remote(window, cx); + cx.spawn(move |this, mut cx| async move { + let remote = remote.await?; + + this.update(&mut cx, |this, cx| { + let Some(repo) = this.active_repository.clone() else { + return Err(anyhow::anyhow!("No active repository")); + }; + + let Some(branch) = repo.read(cx).current_branch() else { + return Err(anyhow::anyhow!("No active branch")); + }; + + Ok(repo + .read(cx) + .push(branch.name.clone(), remote.name, options)) + })?? + .await??; + + drop(guard); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn get_current_remote( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> impl Future> { + let repo = self.active_repository.clone(); + let workspace = self.workspace.clone(); + let mut cx = window.to_async(cx); + + async move { + let Some(repo) = repo else { + return Err(anyhow::anyhow!("No active repository")); + }; + + let mut current_remotes: Vec = repo + .update(&mut cx, |repo, cx| { + let Some(current_branch) = repo.current_branch() else { + return Err(anyhow::anyhow!("No active branch")); + }; + + Ok(repo.get_remotes(Some(current_branch.name.to_string()), cx)) + })?? + .await?; + + if current_remotes.len() == 0 { + return Err(anyhow::anyhow!("No active remote")); + } else if current_remotes.len() == 1 { + return Ok(current_remotes.pop().unwrap()); + } else { + let current_remotes: Vec<_> = current_remotes + .into_iter() + .map(|remotes| remotes.name) + .collect(); + let selection = cx + .update(|window, cx| { + picker_prompt::prompt( + "Pick which remote to push to", + current_remotes.clone(), + workspace, + window, + cx, + ) + })? + .await?; + + return Ok(Remote { + name: current_remotes[selection].clone(), + }); + } + } + } + fn potential_co_authors(&self, cx: &App) -> Vec<(String, String)> { let mut new_co_authors = Vec::new(); let project = self.project.read(cx); @@ -1276,6 +1545,7 @@ impl GitPanel { git_panel.clear_pending(); } git_panel.update_visible_entries(cx); + git_panel.update_editor_placeholder(cx); }) .ok(); } @@ -1322,7 +1592,6 @@ impl GitPanel { fn update_visible_entries(&mut self, cx: &mut Context) { self.entries.clear(); - self.entries_by_path.clear(); let mut changed_entries = Vec::new(); let mut new_entries = Vec::new(); let mut conflict_entries = Vec::new(); @@ -1335,13 +1604,9 @@ impl GitPanel { // First pass - collect all paths let repo = repo.read(cx); - let path_set = HashSet::from_iter(repo.status().map(|entry| entry.repo_path)); // Second pass - create entries with proper depth calculation for entry in repo.status() { - let (depth, difference) = - Self::calculate_depth_and_difference(&entry.repo_path, &path_set); - let is_conflict = repo.has_conflict(&entry.repo_path); let is_new = entry.status.is_created(); let is_staged = entry.status.is_staged(); @@ -1354,28 +1619,7 @@ impl GitPanel { continue; } - let display_name = if difference > 1 { - // Show partial path for deeply nested files - entry - .repo_path - .as_ref() - .iter() - .skip(entry.repo_path.components().count() - difference) - .collect::() - .to_string_lossy() - .into_owned() - } else { - // Just show filename - entry - .repo_path - .file_name() - .map(|name| name.to_string_lossy().into_owned()) - .unwrap_or_default() - }; - let entry = GitStatusEntry { - depth, - display_name, repo_path: entry.repo_path.clone(), status: entry.status, is_staged, @@ -1390,11 +1634,6 @@ impl GitPanel { } } - // Sort entries by path to maintain consistent order - conflict_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); - changed_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); - new_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); - if conflict_entries.len() > 0 { self.entries.push(GitListEntry::Header(GitHeaderEntry { header: Section::Conflict, @@ -1424,12 +1663,6 @@ impl GitPanel { .extend(new_entries.into_iter().map(GitListEntry::GitStatusEntry)); } - for (ix, entry) in self.entries.iter().enumerate() { - if let Some(status_entry) = entry.status_entry() { - self.entries_by_path - .insert(status_entry.repo_path.clone(), ix); - } - } self.update_counts(repo); self.select_first_entry_if_none(cx); @@ -1591,15 +1824,23 @@ impl GitPanel { .color(Color::Muted), ) .child(self.render_repository_selector(cx)) - .child(div().flex_grow()) + .child(div().flex_grow()) // spacer .child( - Button::new("diff", "+/-") - .tooltip(Tooltip::for_action_title("Open diff", &Diff)) - .on_click(|_, _, cx| { - cx.defer(|cx| { - cx.dispatch_action(&Diff); - }) - }), + div() + .h_flex() + .gap_1() + .children(self.render_spinner(cx)) + .children(self.render_sync_button(cx)) + .children(self.render_pull_button(cx)) + .child( + Button::new("diff", "+/-") + .tooltip(Tooltip::for_action_title("Open diff", &Diff)) + .on_click(|_, _, cx| { + cx.defer(|cx| { + cx.dispatch_action(&Diff); + }) + }), + ), ), ) } else { @@ -1607,6 +1848,74 @@ impl GitPanel { } } + pub fn render_spinner(&self, _cx: &mut Context) -> Option { + (!self.pending_remote_operations.borrow().is_empty()).then(|| { + Icon::new(IconName::ArrowCircle) + .size(IconSize::XSmall) + .color(Color::Info) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), + ) + .into_any_element() + }) + } + + pub fn render_sync_button(&self, cx: &mut Context) -> Option { + let active_repository = self.project.read(cx).active_repository(cx); + active_repository.as_ref().map(|_| { + panel_filled_button("Fetch") + .icon(IconName::ArrowCircle) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .tooltip(Tooltip::for_action_title("git fetch", &git::Fetch)) + .on_click( + cx.listener(move |this, _, window, cx| this.fetch(&git::Fetch, window, cx)), + ) + .into_any_element() + }) + } + + pub fn render_pull_button(&self, cx: &mut Context) -> Option { + let active_repository = self.project.read(cx).active_repository(cx); + active_repository + .as_ref() + .and_then(|repo| repo.read(cx).current_branch()) + .and_then(|branch| { + branch.upstream.as_ref().map(|upstream| { + let status = &upstream.tracking; + + let disabled = status.is_gone(); + + panel_filled_button(match status { + git::repository::UpstreamTracking::Tracked(status) if status.behind > 0 => { + format!("Pull ({})", status.behind) + } + _ => "Pull".to_string(), + }) + .icon(IconName::ArrowDown) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .disabled(status.is_gone()) + .tooltip(move |window, cx| { + if disabled { + Tooltip::simple("Upstream is gone", cx) + } else { + // TODO: Add and argument substitutions to this + Tooltip::for_action("git pull", &git::Pull, window, cx) + } + }) + .on_click( + cx.listener(move |this, _, window, cx| this.pull(&git::Pull, window, cx)), + ) + .into_any_element() + }) + }) + } + pub fn render_repository_selector(&self, cx: &mut Context) -> impl IntoElement { let active_repository = self.project.read(cx).active_repository(cx); let repository_display_name = active_repository @@ -1679,27 +1988,25 @@ impl GitPanel { && self.pending_commit.is_none() && !editor.read(cx).is_empty(cx) && self.has_write_access(cx); + let panel_editor_style = panel_editor_style(true, window, cx); let enable_coauthors = self.render_co_authors(cx); - let editor_focus_handle = editor.read(cx).focus_handle(cx).clone(); - - let focus_handle_1 = self.focus_handle(cx).clone(); let tooltip = if self.has_staged_changes() { - "Commit staged changes" + "git commit" } else { - "Commit changes to tracked files" + "git commit --all" }; let title = if self.has_staged_changes() { "Commit" } else { - "Commit All" + "Commit Tracked" }; + let editor_focus_handle = self.commit_editor.focus_handle(cx); let commit_button = panel_filled_button(title) .tooltip(move |window, cx| { - let focus_handle = focus_handle_1.clone(); - Tooltip::for_action_in(tooltip, &Commit, &focus_handle, window, cx) + Tooltip::for_action_in(tooltip, &Commit, &editor_focus_handle, window, cx) }) .disabled(!can_commit) .on_click({ @@ -1709,7 +2016,7 @@ impl GitPanel { let branch = self .active_repository .as_ref() - .and_then(|repo| repo.read(cx).branch().map(|b| b.name.clone())) + .and_then(|repo| repo.read(cx).current_branch().map(|b| b.name.clone())) .unwrap_or_else(|| "".into()); let branch_selector = Button::new("branch-selector", branch) @@ -1743,8 +2050,8 @@ impl GitPanel { .border_color(cx.theme().colors().border) .bg(cx.theme().colors().editor_background) .cursor_text() - .on_click(cx.listener(move |_, _: &ClickEvent, window, _cx| { - window.focus(&editor_focus_handle); + .on_click(cx.listener(move |this, _: &ClickEvent, window, cx| { + window.focus(&this.commit_editor.focus_handle(cx)); })) .when(!self.modal_open, |el| { el.child(EditorElement::new(&self.commit_editor, panel_editor_style)) @@ -1772,24 +2079,9 @@ impl GitPanel { fn render_previous_commit(&self, cx: &mut Context) -> Option { let active_repository = self.active_repository.as_ref()?; - let branch = active_repository.read(cx).branch()?; + let branch = active_repository.read(cx).current_branch()?; let commit = branch.most_recent_commit.as_ref()?.clone(); - if branch.upstream.as_ref().is_some_and(|upstream| { - if let Some(tracking) = &upstream.tracking { - tracking.ahead == 0 - } else { - true - } - }) { - return None; - } - let tooltip = if self.has_staged_changes() { - "git reset HEAD^ --soft" - } else { - "git reset HEAD^" - }; - let this = cx.entity(); Some( h_flex() @@ -1829,9 +2121,17 @@ impl GitPanel { .icon_size(IconSize::Small) .icon_color(Color::Muted) .icon_position(IconPosition::Start) - .tooltip(Tooltip::for_action_title(tooltip, &git::Uncommit)) + .tooltip(Tooltip::for_action_title( + if self.has_staged_changes() { + "git reset HEAD^ --soft" + } else { + "git reset HEAD^" + }, + &git::Uncommit, + )) .on_click(cx.listener(|this, _, window, cx| this.uncommit(window, cx))), - ), + ) + .child(self.render_push_button(branch, cx)), ) } @@ -1918,8 +2218,8 @@ impl GitPanel { ) -> Option { let repo = self.active_repository.as_ref()?.read(cx); let repo_path = repo.worktree_id_path_to_repo_path(file.worktree_id(cx), file.path())?; - let ix = self.entries_by_path.get(&repo_path)?; - let entry = self.entries.get(*ix)?; + let ix = self.entry_by_path(&repo_path)?; + let entry = self.entries.get(ix)?; let is_staged = self.entry_is_staged(entry.status_entry()?); @@ -2193,16 +2493,18 @@ impl GitPanel { }) }); - let start_slot = h_flex() - .id(("start-slot", ix)) - .gap(DynamicSpacing::Base04.rems(cx)) - .child(checkbox) - .tooltip(|window, cx| Tooltip::for_action("Stage File", &ToggleStaged, window, cx)) - .child(git_status_icon(status, cx)) - .on_mouse_down(MouseButton::Left, |_, _, cx| { - // prevent the list item active state triggering when toggling checkbox - cx.stop_propagation(); - }); + let start_slot = + h_flex() + .id(("start-slot", ix)) + .gap(DynamicSpacing::Base04.rems(cx)) + .child(checkbox.tooltip(|window, cx| { + Tooltip::for_action("Stage File", &ToggleStaged, window, cx) + })) + .child(git_status_icon(status, cx)) + .on_mouse_down(MouseButton::Left, |_, _, cx| { + // prevent the list item active state triggering when toggling checkbox + cx.stop_propagation(); + }); div() .w_full() @@ -2252,6 +2554,69 @@ impl GitPanel { .into_any_element() } + fn render_push_button(&self, branch: &Branch, cx: &Context) -> AnyElement { + let mut disabled = false; + + // TODO: Add and argument substitutions to this + let button: SharedString; + let tooltip: SharedString; + let action: Option; + if let Some(upstream) = &branch.upstream { + match upstream.tracking { + UpstreamTracking::Gone => { + button = "Republish".into(); + tooltip = "git push --set-upstream".into(); + action = Some(git::Push { + options: Some(PushOptions::SetUpstream), + }); + } + UpstreamTracking::Tracked(tracking) => { + if tracking.behind > 0 { + disabled = true; + button = "Push".into(); + tooltip = "Upstream is ahead of local branch".into(); + action = None; + } else if tracking.ahead > 0 { + button = format!("Push ({})", tracking.ahead).into(); + tooltip = "git push".into(); + action = Some(git::Push { options: None }); + } else { + disabled = true; + button = "Push".into(); + tooltip = "Upstream matches local branch".into(); + action = None; + } + } + } + } else { + button = "Publish".into(); + tooltip = "git push --set-upstream".into(); + action = Some(git::Push { + options: Some(PushOptions::SetUpstream), + }); + }; + + panel_filled_button(button) + .icon(IconName::ArrowUp) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .disabled(disabled) + .when_some(action, |this, action| { + this.on_click( + cx.listener(move |this, _, window, cx| this.push(&action, window, cx)), + ) + }) + .tooltip(move |window, cx| { + if let Some(action) = action.as_ref() { + Tooltip::for_action(tooltip.clone(), action, window, cx) + } else { + Tooltip::simple(tooltip.clone(), cx) + } + }) + .into_any_element() + } + fn has_write_access(&self, cx: &App) -> bool { !self.project.read(cx).is_read_only(cx) } @@ -2301,6 +2666,9 @@ impl Render for GitPanel { .on_action(cx.listener(Self::unstage_all)) .on_action(cx.listener(Self::discard_tracked_changes)) .on_action(cx.listener(Self::clean_all)) + .on_action(cx.listener(Self::fetch)) + .on_action(cx.listener(Self::pull)) + .on_action(cx.listener(Self::push)) .when(has_write_access && has_co_authors, |git_panel| { git_panel.on_action(cx.listener(Self::toggle_fill_co_authors)) }) @@ -2317,17 +2685,21 @@ impl Render for GitPanel { .size_full() .overflow_hidden() .bg(ElevationIndex::Surface.bg(cx)) - .child(if has_entries { + .child( v_flex() .size_full() .children(self.render_panel_header(window, cx)) - .child(self.render_entries(has_write_access, window, cx)) + .map(|this| { + if has_entries { + this.child(self.render_entries(has_write_access, window, cx)) + } else { + this.child(self.render_empty_state(cx).into_any_element()) + } + }) .children(self.render_previous_commit(cx)) .child(self.render_commit_editor(window, cx)) - .into_any_element() - } else { - self.render_empty_state(cx).into_any_element() - }) + .into_any_element(), + ) .children(self.context_menu.as_ref().map(|(menu, position, _)| { deferred( anchored() diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 7cca2b23a59c92..7e74fa788ce600 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -9,6 +9,7 @@ pub mod branch_picker; mod commit_modal; pub mod git_panel; mod git_panel_settings; +pub mod picker_prompt; pub mod project_diff; pub mod repository_selector; diff --git a/crates/git_ui/src/picker_prompt.rs b/crates/git_ui/src/picker_prompt.rs new file mode 100644 index 00000000000000..f565b1a768fec5 --- /dev/null +++ b/crates/git_ui/src/picker_prompt.rs @@ -0,0 +1,235 @@ +use anyhow::{anyhow, Result}; +use futures::channel::oneshot; +use fuzzy::{StringMatch, StringMatchCandidate}; + +use core::cmp; +use gpui::{ + rems, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, + InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription, + Task, WeakEntity, Window, +}; +use picker::{Picker, PickerDelegate}; +use std::sync::Arc; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; +use util::ResultExt; +use workspace::{ModalView, Workspace}; + +pub struct PickerPrompt { + pub picker: Entity>, + rem_width: f32, + _subscription: Subscription, +} + +pub fn prompt( + prompt: &str, + options: Vec, + workspace: WeakEntity, + window: &mut Window, + cx: &mut App, +) -> Task> { + if options.is_empty() { + return Task::ready(Err(anyhow!("No options"))); + } + let prompt = prompt.to_string().into(); + + window.spawn(cx, |mut cx| async move { + // Modal branch picker has a longer trailoff than a popover one. + let (tx, rx) = oneshot::channel(); + let delegate = PickerPromptDelegate::new(prompt, options, tx, 70); + + workspace.update_in(&mut cx, |workspace, window, cx| { + workspace.toggle_modal(window, cx, |window, cx| { + PickerPrompt::new(delegate, 34., window, cx) + }) + })?; + + rx.await? + }) +} + +impl PickerPrompt { + fn new( + delegate: PickerPromptDelegate, + rem_width: f32, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); + let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent)); + Self { + picker, + rem_width, + _subscription, + } + } +} +impl ModalView for PickerPrompt {} +impl EventEmitter for PickerPrompt {} + +impl Focusable for PickerPrompt { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for PickerPrompt { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + v_flex() + .w(rems(self.rem_width)) + .child(self.picker.clone()) + .on_mouse_down_out(cx.listener(|this, _, window, cx| { + this.picker.update(cx, |this, cx| { + this.cancel(&Default::default(), window, cx); + }) + })) + } +} + +pub struct PickerPromptDelegate { + prompt: Arc, + matches: Vec, + all_options: Vec, + selected_index: usize, + max_match_length: usize, + tx: Option>>, +} + +impl PickerPromptDelegate { + pub fn new( + prompt: Arc, + options: Vec, + tx: oneshot::Sender>, + max_chars: usize, + ) -> Self { + Self { + prompt, + all_options: options, + matches: vec![], + selected_index: 0, + max_match_length: max_chars, + tx: Some(tx), + } + } +} + +impl PickerDelegate for PickerPromptDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + self.prompt.clone() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _: &mut Context>, + ) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + window: &mut Window, + cx: &mut Context>, + ) -> Task<()> { + cx.spawn_in(window, move |picker, mut cx| async move { + let candidates = picker.update(&mut cx, |picker, _| { + picker + .delegate + .all_options + .iter() + .enumerate() + .map(|(ix, option)| StringMatchCandidate::new(ix, &option)) + .collect::>() + }); + let Some(candidates) = candidates.log_err() else { + return; + }; + let matches: Vec = if query.is_empty() { + candidates + .into_iter() + .enumerate() + .map(|(index, candidate)| StringMatch { + candidate_id: index, + string: candidate.string, + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + fuzzy::match_strings( + &candidates, + &query, + true, + 10000, + &Default::default(), + cx.background_executor().clone(), + ) + .await + }; + picker + .update(&mut cx, |picker, _| { + let delegate = &mut picker.delegate; + delegate.matches = matches; + if delegate.matches.is_empty() { + delegate.selected_index = 0; + } else { + delegate.selected_index = + cmp::min(delegate.selected_index, delegate.matches.len() - 1); + } + }) + .log_err(); + }) + } + + fn confirm(&mut self, _: bool, _window: &mut Window, cx: &mut Context>) { + let Some(option) = self.matches.get(self.selected_index()) else { + return; + }; + + self.tx.take().map(|tx| tx.send(Ok(option.candidate_id))); + cx.emit(DismissEvent); + } + + fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { + cx.emit(DismissEvent); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + let hit = &self.matches[ix]; + let shortened_option = util::truncate_and_trailoff(&hit.string, self.max_match_length); + + Some( + ListItem::new(SharedString::from(format!("picker-prompt-menu-{ix}"))) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(selected) + .map(|el| { + let highlights: Vec<_> = hit + .positions + .iter() + .filter(|index| index < &&self.max_match_length) + .copied() + .collect(); + + el.child(HighlightedLabel::new(shortened_option, highlights)) + }), + ) + } +} diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index ebec4d7848877b..0b1c80888c2fae 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -11,7 +11,7 @@ use editor::{ }; use feature_flags::FeatureFlagViewExt; use futures::StreamExt; -use git::{Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll}; +use git::{status::FileStatus, Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll}; use gpui::{ actions, Action, AnyElement, AnyView, App, AppContext as _, AsyncWindowContext, Entity, EventEmitter, FocusHandle, Focusable, Render, Subscription, Task, WeakEntity, @@ -51,6 +51,7 @@ struct DiffBuffer { path_key: PathKey, buffer: Entity, diff: Entity, + file_status: FileStatus, } const CONFLICT_NAMESPACE: &'static str = "0"; @@ -127,7 +128,6 @@ impl ProjectDiff { window, cx, ); - diff_display_editor.set_distinguish_unstaged_diff_hunks(); diff_display_editor.set_expand_all_diff_hunks(cx); diff_display_editor.register_addon(GitPanelAddon { workspace: workspace.downgrade(), @@ -352,6 +352,7 @@ impl ProjectDiff { path_key, buffer, diff: changes, + file_status: entry.status, }) })); } @@ -384,15 +385,22 @@ impl ProjectDiff { .collect::>() }; - self.multibuffer.update(cx, |multibuffer, cx| { + let is_excerpt_newly_added = self.multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( path_key.clone(), buffer, diff_hunk_ranges, editor::DEFAULT_MULTIBUFFER_CONTEXT, cx, - ); + ) }); + + if is_excerpt_newly_added && diff_buffer.file_status.is_deleted() { + self.editor.update(cx, |editor, cx| { + editor.fold_buffer(snapshot.text.remote_id(), cx) + }); + } + if self.multibuffer.read(cx).is_empty() && self .editor diff --git a/crates/git_ui/src/quick_commit.rs b/crates/git_ui/src/quick_commit.rs deleted file mode 100644 index e69de29bb2d1d6..00000000000000 diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 1ea6fba95e5726..2ef7c5b2bc7715 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [features] default = ["http_client", "font-kit", "wayland", "x11"] test-support = [ - "backtrace", + "leak-detection", "collections/test-support", "rand", "util/test-support", @@ -21,6 +21,7 @@ test-support = [ "wayland", "x11", ] +leak-detection = ["backtrace"] runtime_shaders = [] macos-blade = [ "blade-graphics", diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index afdb99b0586e95..69037f9eef8e45 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -19,7 +19,7 @@ use std::{ thread::panicking, }; -#[cfg(any(test, feature = "test-support"))] +#[cfg(any(test, feature = "leak-detection"))] use collections::HashMap; use super::Context; @@ -62,7 +62,7 @@ pub(crate) struct EntityMap { struct EntityRefCounts { counts: SlotMap, dropped_entity_ids: Vec, - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] leak_detector: LeakDetector, } @@ -74,7 +74,7 @@ impl EntityMap { ref_counts: Arc::new(RwLock::new(EntityRefCounts { counts: SlotMap::with_key(), dropped_entity_ids: Vec::new(), - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] leak_detector: LeakDetector { next_handle_id: 0, entity_handles: HashMap::default(), @@ -221,7 +221,7 @@ pub struct AnyEntity { pub(crate) entity_id: EntityId, pub(crate) entity_type: TypeId, entity_map: Weak>, - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] handle_id: HandleId, } @@ -231,7 +231,7 @@ impl AnyEntity { entity_id: id, entity_type, entity_map: entity_map.clone(), - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] handle_id: entity_map .upgrade() .unwrap() @@ -290,7 +290,7 @@ impl Clone for AnyEntity { entity_id: self.entity_id, entity_type: self.entity_type, entity_map: self.entity_map.clone(), - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] handle_id: self .entity_map .upgrade() @@ -319,7 +319,7 @@ impl Drop for AnyEntity { } } - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] if let Some(entity_map) = self.entity_map.upgrade() { entity_map .write() @@ -535,7 +535,7 @@ impl AnyWeakEntity { entity_id: self.entity_id, entity_type: self.entity_type, entity_map: self.entity_ref_counts.clone(), - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] handle_id: self .entity_ref_counts .upgrade() @@ -547,7 +547,7 @@ impl AnyWeakEntity { } /// Assert that entity referenced by this weak handle has been released. - #[cfg(any(test, feature = "test-support"))] + #[cfg(any(test, feature = "leak-detection"))] pub fn assert_released(&self) { self.entity_ref_counts .upgrade() @@ -710,23 +710,23 @@ impl PartialEq> for WeakEntity { } } -#[cfg(any(test, feature = "test-support"))] +#[cfg(any(test, feature = "leak-detection"))] static LEAK_BACKTRACE: std::sync::LazyLock = std::sync::LazyLock::new(|| std::env::var("LEAK_BACKTRACE").map_or(false, |b| !b.is_empty())); -#[cfg(any(test, feature = "test-support"))] +#[cfg(any(test, feature = "leak-detection"))] #[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq)] pub(crate) struct HandleId { id: u64, // id of the handle itself, not the pointed at object } -#[cfg(any(test, feature = "test-support"))] +#[cfg(any(test, feature = "leak-detection"))] pub(crate) struct LeakDetector { next_handle_id: u64, entity_handles: HashMap>>, } -#[cfg(any(test, feature = "test-support"))] +#[cfg(any(test, feature = "leak-detection"))] impl LeakDetector { #[track_caller] pub fn handle_created(&mut self, entity_id: EntityId) -> HandleId { diff --git a/crates/gpui/src/color.rs b/crates/gpui/src/color.rs index daec9440a4eba3..149d924b6fb1d2 100644 --- a/crates/gpui/src/color.rs +++ b/crates/gpui/src/color.rs @@ -486,7 +486,31 @@ impl Hsla { self.a *= 1.0 - factor.clamp(0., 1.); } - /// Returns a new HSLA color with the same hue, saturation, and lightness, but with a modified alpha value. + /// Multiplies the alpha value of the color by a given factor + /// and returns a new HSLA color. + /// + /// Useful for transforming colors with dynamic opacity, + /// like a color from an external source. + /// + /// Example: + /// ``` + /// let color = gpui::red(); + /// let faded_color = color.opacity(0.5); + /// assert_eq!(faded_color.a, 0.5); + /// ``` + /// + /// This will return a red color with half the opacity. + /// + /// Example: + /// ``` + /// let color = hlsa(0.7, 1.0, 0.5, 0.7); // A saturated blue + /// let faded_color = color.opacity(0.16); + /// assert_eq!(faded_color.a, 0.112); + /// ``` + /// + /// This will return a blue color with around ~10% opacity, + /// suitable for an element's hover or selected state. + /// pub fn opacity(&self, factor: f32) -> Self { Hsla { h: self.h, @@ -495,6 +519,35 @@ impl Hsla { a: self.a * factor.clamp(0., 1.), } } + + /// Returns a new HSLA color with the same hue, saturation, + /// and lightness, but with a new alpha value. + /// + /// Example: + /// ``` + /// let color = gpui::red(); + /// let red_color = color.alpha(0.25); + /// assert_eq!(red_color.a, 0.25); + /// ``` + /// + /// This will return a red color with half the opacity. + /// + /// Example: + /// ``` + /// let color = hsla(0.7, 1.0, 0.5, 0.7); // A saturated blue + /// let faded_color = color.alpha(0.25); + /// assert_eq!(faded_color.a, 0.25); + /// ``` + /// + /// This will return a blue color with 25% opacity. + pub fn alpha(&self, a: f32) -> Self { + Hsla { + h: self.h, + s: self.s, + l: self.l, + a: a.clamp(0., 1.), + } + } } impl From for Hsla { diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index c703f9cd3a3633..8b3e5c2359bb9f 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -35,6 +35,11 @@ pub struct BackgroundExecutor { /// A pointer to the executor that is currently running, /// for spawning tasks on the main thread. +/// +/// This is intentionally `!Send` via the `not_send` marker field. This is because +/// `ForegroundExecutor::spawn` does not require `Send` but checks at runtime that the future is +/// only polled from the same thread it was spawned from. These checks would fail when spawning +/// foreground tasks from from background threads. #[derive(Clone)] pub struct ForegroundExecutor { #[doc(hidden)] diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index c1f6a9f61754c9..57951650c331fa 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -1,7 +1,7 @@ use crate::{ self as gpui, px, relative, rems, AbsoluteLength, AlignItems, CursorStyle, DefiniteLength, Fill, FlexDirection, FlexWrap, Font, FontStyle, FontWeight, Hsla, JustifyContent, Length, - SharedString, StrikethroughStyle, StyleRefinement, TextOverflow, WhiteSpace, + SharedString, StrikethroughStyle, StyleRefinement, TextOverflow, UnderlineStyle, WhiteSpace, }; use crate::{TextAlign, TextStyleRefinement}; pub use gpui_macros::{ @@ -486,6 +486,17 @@ pub trait Styled: Sized { self } + /// Sets the text decoration to underline. + /// [Docs](https://tailwindcss.com/docs/text-decoration-line#underling-text) + fn underline(mut self) -> Self { + let style = self.text_style().get_or_insert_with(Default::default); + style.underline = Some(UnderlineStyle { + thickness: px(1.), + ..Default::default() + }); + self + } + /// Sets the decoration of the text to have a line through it. /// [Docs](https://tailwindcss.com/docs/text-decoration#setting-the-text-decoration) fn line_through(mut self) -> Self { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index b3afcdb63d3082..ad6ace22cced91 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -593,8 +593,7 @@ impl Frame { } } -// Holds the state for a specific window. -#[doc(hidden)] +/// Holds the state for a specific window. pub struct Window { pub(crate) handle: AnyWindowHandle, pub(crate) invalidator: WindowInvalidator, @@ -1007,6 +1006,7 @@ impl Window { subscription } + /// Replaces the root entity of the window with a new one. pub fn replace_root( &mut self, cx: &mut App, @@ -1021,6 +1021,7 @@ impl Window { view } + /// Returns the root entity of the window, if it has one. pub fn root(&self) -> Option>> where E: 'static + Render, diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index c038458977401f..58ede4e4bc2a16 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -234,6 +234,8 @@ pub struct EditPredictionSettings { pub disabled_globs: Vec, /// Configures how edit predictions are displayed in the buffer. pub mode: EditPredictionsMode, + /// Settings specific to GitHub Copilot. + pub copilot: CopilotSettings, } /// The mode in which edit predictions should be displayed. @@ -248,6 +250,14 @@ pub enum EditPredictionsMode { EagerPreview, } +#[derive(Clone, Debug, Default)] +pub struct CopilotSettings { + /// HTTP/HTTPS proxy to use for Copilot. + pub proxy: Option, + /// Disable certificate verification for proxy (not recommended). + pub proxy_no_verify: Option, +} + /// The settings for all languages. #[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AllLanguageSettingsContent { @@ -465,6 +475,23 @@ pub struct EditPredictionSettingsContent { /// Provider support required. #[serde(default)] pub mode: EditPredictionsMode, + /// Settings specific to GitHub Copilot. + #[serde(default)] + pub copilot: CopilotSettingsContent, +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct CopilotSettingsContent { + /// HTTP/HTTPS proxy to use for Copilot. + /// + /// Default: none + #[serde(default)] + pub proxy: Option, + /// Disable certificate verification for the proxy (not recommended). + /// + /// Default: false + #[serde(default)] + pub proxy_no_verify: Option, } /// The settings for enabling/disabling features. @@ -1064,6 +1091,16 @@ impl settings::Settings for AllLanguageSettings { .map(|globs| globs.iter().collect()) .ok_or_else(Self::missing_default)?; + let mut copilot_settings = default_value + .edit_predictions + .as_ref() + .map(|settings| settings.copilot.clone()) + .map(|copilot| CopilotSettings { + proxy: copilot.proxy, + proxy_no_verify: copilot.proxy_no_verify, + }) + .unwrap_or_default(); + let mut file_types: HashMap, GlobSet> = HashMap::default(); for (language, suffixes) in &default_value.file_types { @@ -1096,6 +1133,22 @@ impl settings::Settings for AllLanguageSettings { } } + if let Some(proxy) = user_settings + .edit_predictions + .as_ref() + .and_then(|settings| settings.copilot.proxy.clone()) + { + copilot_settings.proxy = Some(proxy); + } + + if let Some(proxy_no_verify) = user_settings + .edit_predictions + .as_ref() + .and_then(|settings| settings.copilot.proxy_no_verify) + { + copilot_settings.proxy_no_verify = Some(proxy_no_verify); + } + // A user's global settings override the default global settings and // all default language-specific settings. merge_settings(&mut defaults, &user_settings.defaults); @@ -1147,6 +1200,7 @@ impl settings::Settings for AllLanguageSettings { .filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher())) .collect(), mode: edit_predictions_mode, + copilot: copilot_settings, }, defaults, languages, diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 51f205dced72e5..e279c6989b0d02 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -19,17 +19,14 @@ test-support = [] anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true base64.workspace = true +client.workspace = true collections.workspace = true -deepseek = { workspace = true, features = ["schemars"] } futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true image.workspace = true -lmstudio = { workspace = true, features = ["schemars"] } log.workspace = true -mistral = { workspace = true, features = ["schemars"] } -ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } parking_lot.workspace = true proto.workspace = true @@ -38,6 +35,7 @@ serde.workspace = true serde_json.workspace = true smol.workspace = true strum.workspace = true +telemetry_events.workspace = true thiserror.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 7b50702a6ee7e0..72ff92142d978b 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -3,20 +3,17 @@ mod rate_limiter; mod registry; mod request; mod role; +mod telemetry; #[cfg(any(test, feature = "test-support"))] pub mod fake_provider; use anyhow::Result; +use client::Client; use futures::FutureExt; use futures::{future::BoxFuture, stream::BoxStream, StreamExt, TryStreamExt as _}; use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window}; -pub use model::*; use proto::Plan; -pub use rate_limiter::*; -pub use registry::*; -pub use request::*; -pub use role::*; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::fmt; @@ -24,10 +21,18 @@ use std::{future::Future, sync::Arc}; use thiserror::Error; use ui::IconName; +pub use crate::model::*; +pub use crate::rate_limiter::*; +pub use crate::registry::*; +pub use crate::request::*; +pub use crate::role::*; +pub use crate::telemetry::*; + pub const ZED_CLOUD_PROVIDER_ID: &str = "zed.dev"; -pub fn init(cx: &mut App) { +pub fn init(client: Arc, cx: &mut App) { registry::init(cx); + RefreshLlmTokenListener::register(client.clone(), cx); } /// The availability of a [`LanguageModel`]. diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index ead33e7b9b34fa..2a09ab5c4219bc 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -1,7 +1,17 @@ -use proto::Plan; +use std::fmt; +use std::sync::Arc; + +use anyhow::Result; +use client::Client; +use gpui::{ + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal as _, +}; +use proto::{Plan, TypedEnvelope}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use strum::EnumIter; +use thiserror::Error; use ui::IconName; use crate::LanguageModelAvailability; @@ -69,6 +79,7 @@ impl CloudModel { | anthropic::Model::Claude3Sonnet | anthropic::Model::Claude3Haiku | anthropic::Model::Claude3_5Haiku + | anthropic::Model::Claude3_7Sonnet | anthropic::Model::Custom { .. } => { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } @@ -101,3 +112,92 @@ impl CloudModel { } } } + +#[derive(Error, Debug)] +pub struct PaymentRequiredError; + +impl fmt::Display for PaymentRequiredError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "Payment required to use this language model. Please upgrade your account." + ) + } +} + +#[derive(Error, Debug)] +pub struct MaxMonthlySpendReachedError; + +impl fmt::Display for MaxMonthlySpendReachedError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "Maximum spending limit reached for this month. For more usage, increase your spending limit." + ) + } +} + +#[derive(Clone, Default)] +pub struct LlmApiToken(Arc>>); + +impl LlmApiToken { + pub async fn acquire(&self, client: &Arc) -> Result { + let lock = self.0.upgradable_read().await; + if let Some(token) = lock.as_ref() { + Ok(token.to_string()) + } else { + Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await + } + } + + pub async fn refresh(&self, client: &Arc) -> Result { + Self::fetch(self.0.write().await, client).await + } + + async fn fetch<'a>( + mut lock: RwLockWriteGuard<'a, Option>, + client: &Arc, + ) -> Result { + let response = client.request(proto::GetLlmToken {}).await?; + *lock = Some(response.token.clone()); + Ok(response.token.clone()) + } +} + +struct GlobalRefreshLlmTokenListener(Entity); + +impl Global for GlobalRefreshLlmTokenListener {} + +pub struct RefreshLlmTokenEvent; + +pub struct RefreshLlmTokenListener { + _llm_token_subscription: client::Subscription, +} + +impl EventEmitter for RefreshLlmTokenListener {} + +impl RefreshLlmTokenListener { + pub fn register(client: Arc, cx: &mut App) { + let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, cx)); + cx.set_global(GlobalRefreshLlmTokenListener(listener)); + } + + pub fn global(cx: &App) -> Entity { + GlobalRefreshLlmTokenListener::global(cx).0.clone() + } + + fn new(client: Arc, cx: &mut Context) -> Self { + Self { + _llm_token_subscription: client + .add_message_handler(cx.weak_entity(), Self::handle_refresh_llm_token), + } + } + + async fn handle_refresh_llm_token( + this: Entity, + _: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result<()> { + this.update(&mut cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)) + } +} diff --git a/crates/language_model/src/model/mod.rs b/crates/language_model/src/model/mod.rs index 12aaed3ab2917d..db4c55daa7db99 100644 --- a/crates/language_model/src/model/mod.rs +++ b/crates/language_model/src/model/mod.rs @@ -1,7 +1,3 @@ pub mod cloud_model; -pub use anthropic::Model as AnthropicModel; pub use cloud_model::*; -pub use lmstudio::Model as LmStudioModel; -pub use ollama::Model as OllamaModel; -pub use open_ai::Model as OpenAiModel; diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 507e8b42072a56..5f11ddffd68a7d 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -241,298 +241,6 @@ pub struct LanguageModelRequest { pub temperature: Option, } -impl LanguageModelRequest { - pub fn into_open_ai(self, model: String, max_output_tokens: Option) -> open_ai::Request { - let stream = !model.starts_with("o1-"); - open_ai::Request { - model, - messages: self - .messages - .into_iter() - .map(|msg| match msg.role { - Role::User => open_ai::RequestMessage::User { - content: msg.string_contents(), - }, - Role::Assistant => open_ai::RequestMessage::Assistant { - content: Some(msg.string_contents()), - tool_calls: Vec::new(), - }, - Role::System => open_ai::RequestMessage::System { - content: msg.string_contents(), - }, - }) - .collect(), - stream, - stop: self.stop, - temperature: self.temperature.unwrap_or(1.0), - max_tokens: max_output_tokens, - tools: Vec::new(), - tool_choice: None, - } - } - - pub fn into_mistral(self, model: String, max_output_tokens: Option) -> mistral::Request { - let len = self.messages.len(); - let merged_messages = - self.messages - .into_iter() - .fold(Vec::with_capacity(len), |mut acc, msg| { - let role = msg.role; - let content = msg.string_contents(); - - acc.push(match role { - Role::User => mistral::RequestMessage::User { content }, - Role::Assistant => mistral::RequestMessage::Assistant { - content: Some(content), - tool_calls: Vec::new(), - }, - Role::System => mistral::RequestMessage::System { content }, - }); - acc - }); - - mistral::Request { - model, - messages: merged_messages, - stream: true, - max_tokens: max_output_tokens, - temperature: self.temperature, - response_format: None, - tools: self - .tools - .into_iter() - .map(|tool| mistral::ToolDefinition::Function { - function: mistral::FunctionDefinition { - name: tool.name, - description: Some(tool.description), - parameters: Some(tool.input_schema), - }, - }) - .collect(), - } - } - - pub fn into_google(self, model: String) -> google_ai::GenerateContentRequest { - google_ai::GenerateContentRequest { - model, - contents: self - .messages - .into_iter() - .map(|msg| google_ai::Content { - parts: vec![google_ai::Part::TextPart(google_ai::TextPart { - text: msg.string_contents(), - })], - role: match msg.role { - Role::User => google_ai::Role::User, - Role::Assistant => google_ai::Role::Model, - Role::System => google_ai::Role::User, // Google AI doesn't have a system role - }, - }) - .collect(), - generation_config: Some(google_ai::GenerationConfig { - candidate_count: Some(1), - stop_sequences: Some(self.stop), - max_output_tokens: None, - temperature: self.temperature.map(|t| t as f64).or(Some(1.0)), - top_p: None, - top_k: None, - }), - safety_settings: None, - } - } - - pub fn into_anthropic( - self, - model: String, - default_temperature: f32, - max_output_tokens: u32, - ) -> anthropic::Request { - let mut new_messages: Vec = Vec::new(); - let mut system_message = String::new(); - - for message in self.messages { - if message.contents_empty() { - continue; - } - - match message.role { - Role::User | Role::Assistant => { - let cache_control = if message.cache { - Some(anthropic::CacheControl { - cache_type: anthropic::CacheControlType::Ephemeral, - }) - } else { - None - }; - let anthropic_message_content: Vec = message - .content - .into_iter() - .filter_map(|content| match content { - MessageContent::Text(text) => { - if !text.is_empty() { - Some(anthropic::RequestContent::Text { - text, - cache_control, - }) - } else { - None - } - } - MessageContent::Image(image) => { - Some(anthropic::RequestContent::Image { - source: anthropic::ImageSource { - source_type: "base64".to_string(), - media_type: "image/png".to_string(), - data: image.source.to_string(), - }, - cache_control, - }) - } - MessageContent::ToolUse(tool_use) => { - Some(anthropic::RequestContent::ToolUse { - id: tool_use.id.to_string(), - name: tool_use.name, - input: tool_use.input, - cache_control, - }) - } - MessageContent::ToolResult(tool_result) => { - Some(anthropic::RequestContent::ToolResult { - tool_use_id: tool_result.tool_use_id, - is_error: tool_result.is_error, - content: tool_result.content, - cache_control, - }) - } - }) - .collect(); - let anthropic_role = match message.role { - Role::User => anthropic::Role::User, - Role::Assistant => anthropic::Role::Assistant, - Role::System => unreachable!("System role should never occur here"), - }; - if let Some(last_message) = new_messages.last_mut() { - if last_message.role == anthropic_role { - last_message.content.extend(anthropic_message_content); - continue; - } - } - new_messages.push(anthropic::Message { - role: anthropic_role, - content: anthropic_message_content, - }); - } - Role::System => { - if !system_message.is_empty() { - system_message.push_str("\n\n"); - } - system_message.push_str(&message.string_contents()); - } - } - } - - anthropic::Request { - model, - messages: new_messages, - max_tokens: max_output_tokens, - system: Some(system_message), - tools: self - .tools - .into_iter() - .map(|tool| anthropic::Tool { - name: tool.name, - description: tool.description, - input_schema: tool.input_schema, - }) - .collect(), - tool_choice: None, - metadata: None, - stop_sequences: Vec::new(), - temperature: self.temperature.or(Some(default_temperature)), - top_k: None, - top_p: None, - } - } - - pub fn into_deepseek(self, model: String, max_output_tokens: Option) -> deepseek::Request { - let is_reasoner = model == "deepseek-reasoner"; - - let len = self.messages.len(); - let merged_messages = - self.messages - .into_iter() - .fold(Vec::with_capacity(len), |mut acc, msg| { - let role = msg.role; - let content = msg.string_contents(); - - if is_reasoner { - if let Some(last_msg) = acc.last_mut() { - match (last_msg, role) { - (deepseek::RequestMessage::User { content: last }, Role::User) => { - last.push(' '); - last.push_str(&content); - return acc; - } - - ( - deepseek::RequestMessage::Assistant { - content: last_content, - .. - }, - Role::Assistant, - ) => { - *last_content = last_content - .take() - .map(|c| { - let mut s = - String::with_capacity(c.len() + content.len() + 1); - s.push_str(&c); - s.push(' '); - s.push_str(&content); - s - }) - .or(Some(content)); - - return acc; - } - _ => {} - } - } - } - - acc.push(match role { - Role::User => deepseek::RequestMessage::User { content }, - Role::Assistant => deepseek::RequestMessage::Assistant { - content: Some(content), - tool_calls: Vec::new(), - }, - Role::System => deepseek::RequestMessage::System { content }, - }); - acc - }); - - deepseek::Request { - model, - messages: merged_messages, - stream: true, - max_tokens: max_output_tokens, - temperature: if is_reasoner { None } else { self.temperature }, - response_format: None, - tools: self - .tools - .into_iter() - .map(|tool| deepseek::ToolDefinition::Function { - function: deepseek::FunctionDefinition { - name: tool.name, - description: Some(tool.description), - parameters: Some(tool.input_schema), - }, - }) - .collect(), - } - } -} - #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] pub struct LanguageModelResponseMessage { pub role: Option, diff --git a/crates/language_model/src/role.rs b/crates/language_model/src/role.rs index fa56a2a88ba71c..953dfa6fdff91c 100644 --- a/crates/language_model/src/role.rs +++ b/crates/language_model/src/role.rs @@ -45,43 +45,3 @@ impl Display for Role { } } } - -impl From for ollama::Role { - fn from(val: Role) -> Self { - match val { - Role::User => ollama::Role::User, - Role::Assistant => ollama::Role::Assistant, - Role::System => ollama::Role::System, - } - } -} - -impl From for open_ai::Role { - fn from(val: Role) -> Self { - match val { - Role::User => open_ai::Role::User, - Role::Assistant => open_ai::Role::Assistant, - Role::System => open_ai::Role::System, - } - } -} - -impl From for deepseek::Role { - fn from(val: Role) -> Self { - match val { - Role::User => deepseek::Role::User, - Role::Assistant => deepseek::Role::Assistant, - Role::System => deepseek::Role::System, - } - } -} - -impl From for lmstudio::Role { - fn from(val: Role) -> Self { - match val { - Role::User => lmstudio::Role::User, - Role::Assistant => lmstudio::Role::Assistant, - Role::System => lmstudio::Role::System, - } - } -} diff --git a/crates/language_models/src/logging.rs b/crates/language_model/src/telemetry.rs similarity index 97% rename from crates/language_models/src/logging.rs rename to crates/language_model/src/telemetry.rs index 69fc3dfeece1b6..0454d43c734758 100644 --- a/crates/language_models/src/logging.rs +++ b/crates/language_model/src/telemetry.rs @@ -8,7 +8,7 @@ use std::sync::Arc; use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; use util::ResultExt; -use crate::provider::anthropic::PROVIDER_ID as ANTHROPIC_PROVIDER_ID; +pub const ANTHROPIC_PROVIDER_ID: &str = "anthropic"; pub fn report_assistant_event( event: AssistantEvent, diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index 9a9196ee1fbab4..cda61c1cfa8c81 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -14,6 +14,10 @@ path = "src/language_models.rs" [dependencies] anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true +aws-config = { workspace = true, features = ["behavior-version-latest"] } +aws-credential-types = { workspace = true, features = ["hardcoded-credentials"] } +aws_http_client.workspace = true +bedrock.workspace = true client.workspace = true collections.workspace = true credentials_provider.workspace = true @@ -25,6 +29,7 @@ fs.workspace = true futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true +gpui_tokio.workspace = true http_client.workspace = true language_model.workspace = true lmstudio = { workspace = true, features = ["schemars"] } @@ -40,10 +45,9 @@ serde_json.workspace = true settings.workspace = true smol.workspace = true strum.workspace = true -telemetry_events.workspace = true theme.workspace = true -thiserror.workspace = true tiktoken-rs.workspace = true +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } ui.workspace = true util.workspace = true diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 11f9415b597d84..4e0aee55b06a4e 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -6,14 +6,12 @@ use gpui::{App, Context, Entity}; use language_model::{LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID}; use provider::deepseek::DeepSeekLanguageModelProvider; -mod logging; pub mod provider; mod settings; use crate::provider::anthropic::AnthropicLanguageModelProvider; +use crate::provider::bedrock::BedrockLanguageModelProvider; use crate::provider::cloud::CloudLanguageModelProvider; -pub use crate::provider::cloud::LlmApiToken; -pub use crate::provider::cloud::RefreshLlmTokenListener; use crate::provider::copilot_chat::CopilotChatLanguageModelProvider; use crate::provider::google::GoogleLanguageModelProvider; use crate::provider::lmstudio::LmStudioLanguageModelProvider; @@ -21,7 +19,6 @@ use crate::provider::mistral::MistralLanguageModelProvider; use crate::provider::ollama::OllamaLanguageModelProvider; use crate::provider::open_ai::OpenAiLanguageModelProvider; pub use crate::settings::*; -pub use logging::report_assistant_event; pub fn init(user_store: Entity, client: Arc, fs: Arc, cx: &mut App) { crate::settings::init(fs, cx); @@ -39,8 +36,6 @@ fn register_language_model_providers( ) { use feature_flags::FeatureFlagAppExt; - RefreshLlmTokenListener::register(client.clone(), cx); - registry.register_provider( AnthropicLanguageModelProvider::new(client.http_client(), cx), cx, @@ -69,6 +64,10 @@ fn register_language_model_providers( MistralLanguageModelProvider::new(client.http_client(), cx), cx, ); + registry.register_provider( + BedrockLanguageModelProvider::new(client.http_client(), cx), + cx, + ); registry.register_provider(CopilotChatLanguageModelProvider::new(cx), cx); cx.observe_flag::(move |enabled, cx| { diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index 06c7355321b5e6..6b183292f32202 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -1,4 +1,5 @@ pub mod anthropic; +pub mod bedrock; pub mod cloud; pub mod copilot_chat; pub mod deepseek; diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 9908929457bf46..3076e4a6171b87 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -13,7 +13,7 @@ use http_client::HttpClient; use language_model::{ AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, + LanguageModelProviderState, LanguageModelRequest, MessageContent, RateLimiter, Role, }; use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason}; use schemars::JsonSchema; @@ -27,7 +27,7 @@ use theme::ThemeSettings; use ui::{prelude::*, Icon, IconName, Tooltip}; use util::{maybe, ResultExt}; -pub const PROVIDER_ID: &str = "anthropic"; +const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID; const PROVIDER_NAME: &str = "Anthropic"; #[derive(Default, Clone, Debug, PartialEq)] @@ -396,7 +396,8 @@ impl LanguageModel for AnthropicModel { request: LanguageModelRequest, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let request = request.into_anthropic( + let request = into_anthropic( + request, self.model.id().into(), self.model.default_temperature(), self.model.max_output_tokens(), @@ -427,7 +428,8 @@ impl LanguageModel for AnthropicModel { input_schema: serde_json::Value, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let mut request = request.into_anthropic( + let mut request = into_anthropic( + request, self.model.tool_model_id().into(), self.model.default_temperature(), self.model.max_output_tokens(), @@ -456,6 +458,117 @@ impl LanguageModel for AnthropicModel { } } +pub fn into_anthropic( + request: LanguageModelRequest, + model: String, + default_temperature: f32, + max_output_tokens: u32, +) -> anthropic::Request { + let mut new_messages: Vec = Vec::new(); + let mut system_message = String::new(); + + for message in request.messages { + if message.contents_empty() { + continue; + } + + match message.role { + Role::User | Role::Assistant => { + let cache_control = if message.cache { + Some(anthropic::CacheControl { + cache_type: anthropic::CacheControlType::Ephemeral, + }) + } else { + None + }; + let anthropic_message_content: Vec = message + .content + .into_iter() + .filter_map(|content| match content { + MessageContent::Text(text) => { + if !text.is_empty() { + Some(anthropic::RequestContent::Text { + text, + cache_control, + }) + } else { + None + } + } + MessageContent::Image(image) => Some(anthropic::RequestContent::Image { + source: anthropic::ImageSource { + source_type: "base64".to_string(), + media_type: "image/png".to_string(), + data: image.source.to_string(), + }, + cache_control, + }), + MessageContent::ToolUse(tool_use) => { + Some(anthropic::RequestContent::ToolUse { + id: tool_use.id.to_string(), + name: tool_use.name, + input: tool_use.input, + cache_control, + }) + } + MessageContent::ToolResult(tool_result) => { + Some(anthropic::RequestContent::ToolResult { + tool_use_id: tool_result.tool_use_id, + is_error: tool_result.is_error, + content: tool_result.content, + cache_control, + }) + } + }) + .collect(); + let anthropic_role = match message.role { + Role::User => anthropic::Role::User, + Role::Assistant => anthropic::Role::Assistant, + Role::System => unreachable!("System role should never occur here"), + }; + if let Some(last_message) = new_messages.last_mut() { + if last_message.role == anthropic_role { + last_message.content.extend(anthropic_message_content); + continue; + } + } + new_messages.push(anthropic::Message { + role: anthropic_role, + content: anthropic_message_content, + }); + } + Role::System => { + if !system_message.is_empty() { + system_message.push_str("\n\n"); + } + system_message.push_str(&message.string_contents()); + } + } + } + + anthropic::Request { + model, + messages: new_messages, + max_tokens: max_output_tokens, + system: Some(system_message), + tools: request + .tools + .into_iter() + .map(|tool| anthropic::Tool { + name: tool.name, + description: tool.description, + input_schema: tool.input_schema, + }) + .collect(), + tool_choice: None, + metadata: None, + stop_sequences: Vec::new(), + temperature: request.temperature.or(Some(default_temperature)), + top_k: None, + top_p: None, + } +} + pub fn map_to_language_model_completion_events( events: Pin>>>, ) -> impl Stream> { diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs new file mode 100644 index 00000000000000..c2b33216310824 --- /dev/null +++ b/crates/language_models/src/provider/bedrock.rs @@ -0,0 +1,1018 @@ +use std::pin::Pin; +use std::str::FromStr; +use std::sync::Arc; + +use anyhow::{anyhow, Context as _, Result}; +use aws_config::stalled_stream_protection::StalledStreamProtectionConfig; +use aws_config::Region; +use aws_credential_types::Credentials; +use aws_http_client::AwsHttpClient; +use bedrock::bedrock_client::types::{ + ContentBlockDelta, ContentBlockStart, ContentBlockStartEvent, ConverseStreamOutput, +}; +use bedrock::bedrock_client::{self, Config}; +use bedrock::{ + value_to_aws_document, BedrockError, BedrockInnerContent, BedrockMessage, BedrockSpecificTool, + BedrockStreamingResponse, BedrockTool, BedrockToolChoice, BedrockToolInputSchema, Model, +}; +use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; +use editor::{Editor, EditorElement, EditorStyle}; +use futures::{future::BoxFuture, stream::BoxStream, FutureExt, Stream, StreamExt}; +use gpui::{ + AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace, +}; +use gpui_tokio::Tokio; +use http_client::HttpClient; +use language_model::{ + AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, + LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, LanguageModelToolUse, MessageContent, RateLimiter, Role, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use settings::{Settings, SettingsStore}; +use strum::IntoEnumIterator; +use theme::ThemeSettings; +use tokio::runtime::Handle; +use ui::{prelude::*, Icon, IconName, Tooltip}; +use util::{maybe, ResultExt}; + +use crate::AllLanguageModelSettings; + +const PROVIDER_ID: &str = "amazon-bedrock"; +const PROVIDER_NAME: &str = "Amazon Bedrock"; + +#[derive(Default, Clone, Deserialize, Serialize, PartialEq, Debug)] +pub struct BedrockCredentials { + pub region: String, + pub access_key_id: String, + pub secret_access_key: String, +} + +#[derive(Default, Clone, Debug, PartialEq)] +pub struct AmazonBedrockSettings { + pub session_token: Option, + pub available_models: Vec, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct AvailableModel { + pub name: String, + pub display_name: Option, + pub max_tokens: usize, + pub cache_configuration: Option, + pub max_output_tokens: Option, + pub default_temperature: Option, +} + +// These environment variables all use a `ZED_` prefix because we don't want to overwrite the user's AWS credentials. +const ZED_BEDROCK_ACCESS_KEY_ID_VAR: &str = "ZED_ACCESS_KEY_ID"; +const ZED_BEDROCK_SECRET_ACCESS_KEY_VAR: &str = "ZED_SECRET_ACCESS_KEY"; +const ZED_BEDROCK_REGION_VAR: &str = "ZED_AWS_REGION"; +const ZED_AWS_CREDENTIALS_VAR: &str = "ZED_AWS_CREDENTIALS"; + +pub struct State { + credentials: Option, + credentials_from_env: bool, + region: Option, + _subscription: Subscription, +} + +impl State { + fn reset_credentials(&self, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + cx.spawn(|this, mut cx| async move { + credentials_provider + .delete_credentials(ZED_AWS_CREDENTIALS_VAR, &cx) + .await + .log_err(); + this.update(&mut cx, |this, cx| { + this.credentials = None; + this.credentials_from_env = false; + cx.notify(); + }) + }) + } + + fn set_credentials( + &mut self, + credentials: BedrockCredentials, + cx: &mut Context, + ) -> Task> { + let credentials_provider = ::global(cx); + cx.spawn(|this, mut cx| async move { + credentials_provider + .write_credentials( + ZED_AWS_CREDENTIALS_VAR, + "Bearer", + &serde_json::to_vec(&credentials)?, + &cx, + ) + .await?; + this.update(&mut cx, |this, cx| { + this.credentials = Some(credentials); + cx.notify(); + }) + }) + } + + fn is_authenticated(&self) -> bool { + self.credentials.is_some() + } + + fn authenticate(&self, cx: &mut Context) -> Task> { + if self.is_authenticated() { + return Task::ready(Ok(())); + } + + let credentials_provider = ::global(cx); + cx.spawn(|this, mut cx| async move { + let (credentials, from_env) = + if let Ok(credentials) = std::env::var(ZED_AWS_CREDENTIALS_VAR) { + (credentials, true) + } else { + let (_, credentials) = credentials_provider + .read_credentials(ZED_AWS_CREDENTIALS_VAR, &cx) + .await? + .ok_or_else(|| AuthenticateError::CredentialsNotFound)?; + ( + String::from_utf8(credentials) + .context("invalid {PROVIDER_NAME} credentials")?, + false, + ) + }; + + let credentials: BedrockCredentials = + serde_json::from_str(&credentials).context("failed to parse credentials")?; + + this.update(&mut cx, |this, cx| { + this.credentials = Some(credentials); + this.credentials_from_env = from_env; + cx.notify(); + })?; + + Ok(()) + }) + } +} + +pub struct BedrockLanguageModelProvider { + http_client: AwsHttpClient, + handler: tokio::runtime::Handle, + state: gpui::Entity, +} + +impl BedrockLanguageModelProvider { + pub fn new(http_client: Arc, cx: &mut App) -> Self { + let state = cx.new(|cx| State { + credentials: None, + region: Some(String::from("us-east-1")), + credentials_from_env: false, + _subscription: cx.observe_global::(|_, cx| { + cx.notify(); + }), + }); + + let tokio_handle = Tokio::handle(cx); + + let coerced_client = AwsHttpClient::new(http_client.clone(), tokio_handle.clone()); + + Self { + http_client: coerced_client, + handler: tokio_handle.clone(), + state, + } + } +} + +impl LanguageModelProvider for BedrockLanguageModelProvider { + fn id(&self) -> LanguageModelProviderId { + LanguageModelProviderId(PROVIDER_ID.into()) + } + + fn name(&self) -> LanguageModelProviderName { + LanguageModelProviderName(PROVIDER_NAME.into()) + } + + fn icon(&self) -> IconName { + IconName::AiBedrock + } + + fn default_model(&self, _cx: &App) -> Option> { + let model = bedrock::Model::default(); + Some(Arc::new(BedrockModel { + id: LanguageModelId::from(model.id().to_string()), + model, + http_client: self.http_client.clone(), + handler: self.handler.clone(), + state: self.state.clone(), + request_limiter: RateLimiter::new(4), + })) + } + + fn provided_models(&self, cx: &App) -> Vec> { + let mut models = BTreeMap::default(); + + for model in bedrock::Model::iter() { + if !matches!(model, bedrock::Model::Custom { .. }) { + models.insert(model.id().to_string(), model); + } + } + + // Override with available models from settings + for model in AllLanguageModelSettings::get_global(cx) + .bedrock + .available_models + .iter() + { + models.insert( + model.name.clone(), + bedrock::Model::Custom { + name: model.name.clone(), + display_name: model.display_name.clone(), + max_tokens: model.max_tokens, + max_output_tokens: model.max_output_tokens, + default_temperature: model.default_temperature, + }, + ); + } + + models + .into_values() + .map(|model| { + Arc::new(BedrockModel { + id: LanguageModelId::from(model.id().to_string()), + model, + http_client: self.http_client.clone(), + handler: self.handler.clone(), + state: self.state.clone(), + request_limiter: RateLimiter::new(4), + }) as Arc + }) + .collect() + } + + fn is_authenticated(&self, cx: &App) -> bool { + self.state.read(cx).is_authenticated() + } + + fn authenticate(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.authenticate(cx)) + } + + fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + .into() + } + + fn reset_credentials(&self, cx: &mut App) -> Task> { + self.state + .update(cx, |state, cx| state.reset_credentials(cx)) + } +} + +impl LanguageModelProviderState for BedrockLanguageModelProvider { + type ObservableEntity = State; + + fn observable_entity(&self) -> Option> { + Some(self.state.clone()) + } +} + +struct BedrockModel { + id: LanguageModelId, + model: Model, + http_client: AwsHttpClient, + handler: tokio::runtime::Handle, + state: gpui::Entity, + request_limiter: RateLimiter, +} + +impl BedrockModel { + fn stream_completion( + &self, + request: bedrock::Request, + cx: &AsyncApp, + ) -> Result< + BoxFuture<'static, BoxStream<'static, Result>>, + > { + let Ok(Ok((access_key_id, secret_access_key, region))) = + cx.read_entity(&self.state, |state, _cx| { + if let Some(credentials) = &state.credentials { + Ok(( + credentials.access_key_id.clone(), + credentials.secret_access_key.clone(), + state.region.clone(), + )) + } else { + return Err(anyhow!("Failed to read credentials")); + } + }) + else { + return Err(anyhow!("App state dropped")); + }; + + let runtime_client = bedrock_client::Client::from_conf( + Config::builder() + .stalled_stream_protection(StalledStreamProtectionConfig::disabled()) + .credentials_provider(Credentials::new( + access_key_id, + secret_access_key, + None, + None, + "Keychain", + )) + .region(Region::new(region.unwrap())) + .http_client(self.http_client.clone()) + .build(), + ); + + let owned_handle = self.handler.clone(); + + Ok(async move { + let request = bedrock::stream_completion(runtime_client, request, owned_handle); + request.await.unwrap_or_else(|e| { + futures::stream::once(async move { Err(BedrockError::ClientError(e)) }).boxed() + }) + } + .boxed()) + } +} + +impl LanguageModel for BedrockModel { + fn id(&self) -> LanguageModelId { + self.id.clone() + } + + fn name(&self) -> LanguageModelName { + LanguageModelName::from(self.model.display_name().to_string()) + } + + fn provider_id(&self) -> LanguageModelProviderId { + LanguageModelProviderId(PROVIDER_ID.into()) + } + + fn provider_name(&self) -> LanguageModelProviderName { + LanguageModelProviderName(PROVIDER_NAME.into()) + } + + fn telemetry_id(&self) -> String { + format!("bedrock/{}", self.model.id()) + } + + fn max_token_count(&self) -> usize { + self.model.max_token_count() + } + + fn max_output_tokens(&self) -> Option { + Some(self.model.max_output_tokens()) + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> BoxFuture<'static, Result> { + get_bedrock_tokens(request, cx) + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> BoxFuture<'static, Result>>> { + let request = into_bedrock( + request, + self.model.id().into(), + self.model.default_temperature(), + self.model.max_output_tokens(), + ); + + let owned_handle = self.handler.clone(); + + let request = self.stream_completion(request, cx); + let future = self.request_limiter.stream(async move { + let response = request.map_err(|e| anyhow!(e)).unwrap().await; + Ok(map_to_language_model_completion_events( + response, + owned_handle, + )) + }); + async move { Ok(future.await?.boxed()) }.boxed() + } + + fn use_any_tool( + &self, + request: LanguageModelRequest, + name: String, + description: String, + schema: Value, + _cx: &AsyncApp, + ) -> BoxFuture<'static, Result>>> { + let mut request = into_bedrock( + request, + self.model.id().into(), + self.model.default_temperature(), + self.model.max_output_tokens(), + ); + + request.tool_choice = Some(BedrockToolChoice::Tool( + BedrockSpecificTool::builder() + .name(name.clone()) + .build() + .unwrap(), + )); + + request.tools = vec![BedrockTool::builder() + .name(name.clone()) + .description(description.clone()) + .input_schema(BedrockToolInputSchema::Json(value_to_aws_document(&schema))) + .build() + .unwrap()]; + + let handle = self.handler.clone(); + + let request = self.stream_completion(request, _cx); + self.request_limiter + .run(async move { + let response = request.map_err(|e| anyhow!(e)).unwrap().await; + Ok(extract_tool_args_from_events(name, response, handle) + .await? + .boxed()) + }) + .boxed() + } + + fn cache_configuration(&self) -> Option { + None + } +} + +pub fn into_bedrock( + request: LanguageModelRequest, + model: String, + default_temperature: f32, + max_output_tokens: u32, +) -> bedrock::Request { + let mut new_messages: Vec = Vec::new(); + let mut system_message = String::new(); + + for message in request.messages { + if message.contents_empty() { + continue; + } + + match message.role { + Role::User | Role::Assistant => { + let bedrock_message_content: Vec = message + .content + .into_iter() + .filter_map(|content| match content { + MessageContent::Text(text) => { + if !text.is_empty() { + Some(BedrockInnerContent::Text(text)) + } else { + None + } + } + _ => None, + }) + .collect(); + let bedrock_role = match message.role { + Role::User => bedrock::BedrockRole::User, + Role::Assistant => bedrock::BedrockRole::Assistant, + Role::System => unreachable!("System role should never occur here"), + }; + if let Some(last_message) = new_messages.last_mut() { + if last_message.role == bedrock_role { + last_message.content.extend(bedrock_message_content); + continue; + } + } + new_messages.push( + BedrockMessage::builder() + .role(bedrock_role) + .set_content(Some(bedrock_message_content)) + .build() + .expect("failed to build Bedrock message"), + ); + } + Role::System => { + if !system_message.is_empty() { + system_message.push_str("\n\n"); + } + system_message.push_str(&message.string_contents()); + } + } + } + + bedrock::Request { + model, + messages: new_messages, + max_tokens: max_output_tokens, + system: Some(system_message), + tools: vec![], + tool_choice: None, + metadata: None, + stop_sequences: Vec::new(), + temperature: request.temperature.or(Some(default_temperature)), + top_k: None, + top_p: None, + } +} + +// TODO: just call the ConverseOutput.usage() method: +// https://docs.rs/aws-sdk-bedrockruntime/latest/aws_sdk_bedrockruntime/operation/converse/struct.ConverseOutput.html#method.output +pub fn get_bedrock_tokens( + request: LanguageModelRequest, + cx: &App, +) -> BoxFuture<'static, Result> { + cx.background_executor() + .spawn(async move { + let messages = request.messages; + let mut tokens_from_images = 0; + let mut string_messages = Vec::with_capacity(messages.len()); + + for message in messages { + use language_model::MessageContent; + + let mut string_contents = String::new(); + + for content in message.content { + match content { + MessageContent::Text(text) => { + string_contents.push_str(&text); + } + MessageContent::Image(image) => { + tokens_from_images += image.estimate_tokens(); + } + MessageContent::ToolUse(_tool_use) => { + // TODO: Estimate token usage from tool uses. + } + MessageContent::ToolResult(tool_result) => { + string_contents.push_str(&tool_result.content); + } + } + } + + if !string_contents.is_empty() { + string_messages.push(tiktoken_rs::ChatCompletionRequestMessage { + role: match message.role { + Role::User => "user".into(), + Role::Assistant => "assistant".into(), + Role::System => "system".into(), + }, + content: Some(string_contents), + name: None, + function_call: None, + }); + } + } + + // Tiktoken doesn't yet support these models, so we manually use the + // same tokenizer as GPT-4. + tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages) + .map(|tokens| tokens + tokens_from_images) + }) + .boxed() +} + +pub async fn extract_tool_args_from_events( + name: String, + mut events: Pin>>>, + handle: Handle, +) -> Result>> { + handle + .spawn(async move { + let mut tool_use_index = None; + while let Some(event) = events.next().await { + if let BedrockStreamingResponse::ContentBlockStart(ContentBlockStartEvent { + content_block_index, + start, + .. + }) = event? + { + match start { + None => { + continue; + } + Some(start) => match start.as_tool_use() { + Ok(tool_use) => { + if name == tool_use.name { + tool_use_index = Some(content_block_index); + break; + } + } + Err(err) => { + return Err(anyhow!("Failed to parse tool use event: {:?}", err)); + } + }, + } + } + } + + let Some(tool_use_index) = tool_use_index else { + return Err(anyhow!("Tool is not used")); + }; + + Ok(events.filter_map(move |event| { + let result = match event { + Err(_err) => None, + Ok(output) => match output.clone() { + BedrockStreamingResponse::ContentBlockDelta(inner) => { + match inner.clone().delta { + Some(ContentBlockDelta::ToolUse(tool_use)) => { + if inner.content_block_index == tool_use_index { + Some(Ok(tool_use.input)) + } else { + None + } + } + _ => None, + } + } + _ => None, + }, + }; + + async move { result } + })) + }) + .await? +} + +pub fn map_to_language_model_completion_events( + events: Pin>>>, + handle: Handle, +) -> impl Stream> { + struct RawToolUse { + id: String, + name: String, + input_json: String, + } + + struct State { + events: Pin>>>, + tool_uses_by_index: HashMap, + } + + futures::stream::unfold( + State { + events, + tool_uses_by_index: HashMap::default(), + }, + move |mut state: State| { + let inner_handle = handle.clone(); + async move { + inner_handle + .spawn(async { + while let Some(event) = state.events.next().await { + match event { + Ok(event) => match event { + ConverseStreamOutput::ContentBlockDelta(cb_delta) => { + if let Some(ContentBlockDelta::Text(text_out)) = + cb_delta.delta + { + return Some(( + Some(Ok(LanguageModelCompletionEvent::Text( + text_out, + ))), + state, + )); + } else if let Some(ContentBlockDelta::ToolUse(text_out)) = + cb_delta.delta + { + if let Some(tool_use) = state + .tool_uses_by_index + .get_mut(&cb_delta.content_block_index) + { + tool_use.input_json.push_str(text_out.input()); + return Some((None, state)); + }; + + return Some((None, state)); + } else if cb_delta.delta.is_none() { + return Some((None, state)); + } + } + ConverseStreamOutput::ContentBlockStart(cb_start) => { + if let Some(start) = cb_start.start { + match start { + ContentBlockStart::ToolUse(text_out) => { + let tool_use = RawToolUse { + id: text_out.tool_use_id, + name: text_out.name, + input_json: String::new(), + }; + + state.tool_uses_by_index.insert( + cb_start.content_block_index, + tool_use, + ); + } + _ => {} + } + } + } + ConverseStreamOutput::ContentBlockStop(cb_stop) => { + if let Some(tool_use) = state + .tool_uses_by_index + .remove(&cb_stop.content_block_index) + { + return Some(( + Some(maybe!({ + Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: tool_use.id.into(), + name: tool_use.name, + input: if tool_use.input_json.is_empty() + { + Value::Null + } else { + serde_json::Value::from_str( + &tool_use.input_json, + ) + .map_err(|err| anyhow!(err))? + }, + }, + )) + })), + state, + )); + } + } + _ => {} + }, + Err(err) => return Some((Some(Err(anyhow!(err))), state)), + } + } + None + }) + .await + .unwrap() + } + }, + ) + .filter_map(|event| async move { event }) +} + +struct ConfigurationView { + access_key_id_editor: Entity, + secret_access_key_editor: Entity, + region_editor: Entity, + state: gpui::Entity, + load_credentials_task: Option>, +} + +impl ConfigurationView { + const PLACEHOLDER_TEXT: &'static str = "XXXXXXXXXXXXXXXXXXX"; + const PLACEHOLDER_REGION: &'static str = "us-east-1"; + + fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); + + let load_credentials_task = Some(cx.spawn({ + let state = state.clone(); + |this, mut cx| async move { + if let Some(task) = state + .update(&mut cx, |state, cx| state.authenticate(cx)) + .log_err() + { + // We don't log an error, because "not signed in" is also an error. + let _ = task.await; + } + this.update(&mut cx, |this, cx| { + this.load_credentials_task = None; + cx.notify(); + }) + .log_err(); + } + })); + + Self { + access_key_id_editor: cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx); + editor + }), + secret_access_key_editor: cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx); + editor + }), + region_editor: cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text(Self::PLACEHOLDER_REGION, cx); + editor + }), + state, + load_credentials_task, + } + } + + fn save_credentials( + &mut self, + _: &menu::Confirm, + _window: &mut Window, + cx: &mut Context, + ) { + let access_key_id = self + .access_key_id_editor + .read(cx) + .text(cx) + .to_string() + .trim() + .to_string(); + let secret_access_key = self + .secret_access_key_editor + .read(cx) + .text(cx) + .to_string() + .trim() + .to_string(); + let region = self + .region_editor + .read(cx) + .text(cx) + .to_string() + .trim() + .to_string(); + + let state = self.state.clone(); + cx.spawn(|_, mut cx| async move { + state + .update(&mut cx, |state, cx| { + let credentials: BedrockCredentials = BedrockCredentials { + access_key_id: access_key_id.clone(), + secret_access_key: secret_access_key.clone(), + region: region.clone(), + }; + + state.set_credentials(credentials, cx) + })? + .await + }) + .detach_and_log_err(cx); + } + + fn reset_credentials(&mut self, window: &mut Window, cx: &mut Context) { + self.access_key_id_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + self.secret_access_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + self.region_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn(|_, mut cx| async move { + state + .update(&mut cx, |state, cx| state.reset_credentials(cx))? + .await + }) + .detach_and_log_err(cx); + } + + fn make_text_style(&self, cx: &Context) -> TextStyle { + let settings = ThemeSettings::get_global(cx); + TextStyle { + color: cx.theme().colors().text, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_fallbacks: settings.ui_font.fallbacks.clone(), + font_size: rems(0.875).into(), + font_weight: settings.ui_font.weight, + font_style: FontStyle::Normal, + line_height: relative(1.3), + background_color: None, + underline: None, + strikethrough: None, + white_space: WhiteSpace::Normal, + text_overflow: None, + text_align: Default::default(), + line_clamp: None, + } + } + + fn render_aa_id_editor(&self, cx: &mut Context) -> impl IntoElement { + let text_style = self.make_text_style(cx); + + EditorElement::new( + &self.access_key_id_editor, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } + + fn render_sk_editor(&self, cx: &mut Context) -> impl IntoElement { + let text_style = self.make_text_style(cx); + + EditorElement::new( + &self.secret_access_key_editor, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } + + fn render_region_editor(&self, cx: &mut Context) -> impl IntoElement { + let text_style = self.make_text_style(cx); + + EditorElement::new( + &self.region_editor, + EditorStyle { + background: cx.theme().colors().editor_background, + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } + + fn should_render_editor(&self, cx: &mut Context) -> bool { + !self.state.read(cx).is_authenticated() + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + const IAM_CONSOLE_URL: &str = "https://us-east-1.console.aws.amazon.com/iam/home"; + const INSTRUCTIONS: [&str; 3] = [ + "To use Zed's assistant with Bedrock, you need to add the Access Key ID, Secret Access Key and AWS Region. Follow these steps:", + "- Create a pair at:", + "- Paste your Access Key ID, Secret Key, and Region below and hit enter to use the assistant:", + ]; + let env_var_set = self.state.read(cx).credentials_from_env; + + if self.load_credentials_task.is_some() { + div().child(Label::new("Loading credentials...")).into_any() + } else if self.should_render_editor(cx) { + v_flex() + .size_full() + .on_action(cx.listener(Self::save_credentials)) + .child(Label::new(INSTRUCTIONS[0])) + .child(h_flex().child(Label::new(INSTRUCTIONS[1])).child( + Button::new("iam_console", IAM_CONSOLE_URL) + .style(ButtonStyle::Subtle) + .icon(IconName::ExternalLink) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(move |_, _window, cx| cx.open_url(IAM_CONSOLE_URL)) + ) + ) + .child(Label::new(INSTRUCTIONS[2])) + .child( + h_flex() + .gap_1() + .child(self.render_aa_id_editor(cx)) + .child(self.render_sk_editor(cx)) + .child(self.render_region_editor(cx)) + ) + .child( + Label::new( + format!("You can also assign the {ZED_BEDROCK_ACCESS_KEY_ID_VAR}, {ZED_BEDROCK_SECRET_ACCESS_KEY_VAR} and {ZED_BEDROCK_REGION_VAR} environment variable and restart Zed."), + ) + .size(LabelSize::Small), + ) + .into_any() + } else { + h_flex() + .size_full() + .justify_between() + .child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new(if env_var_set { + format!("Access Key ID is set in {ZED_BEDROCK_ACCESS_KEY_ID_VAR}, Secret Key is set in {ZED_BEDROCK_SECRET_ACCESS_KEY_VAR}, Region is set in {ZED_BEDROCK_REGION_VAR} environment variables.") + } else { + "Credentials configured.".to_string() + })), + ) + .child( + Button::new("reset-key", "Reset key") + .icon(Some(IconName::Trash)) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .disabled(env_var_set) + .when(env_var_set, |this| { + this.tooltip(Tooltip::text(format!("To reset your credentials, unset the {ZED_BEDROCK_ACCESS_KEY_ID_VAR}, {ZED_BEDROCK_SECRET_ACCESS_KEY_VAR}, and {ZED_BEDROCK_REGION_VAR} environment variables."))) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_credentials(window, cx))), + ) + .into_any() + } + } +} diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 236b78527b29ef..9c9401532a8d3b 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -1,4 +1,3 @@ -use super::open_ai::count_open_ai_tokens; use anthropic::AnthropicError; use anyhow::{anyhow, Result}; use client::{ @@ -11,10 +10,7 @@ use futures::{ future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt, TryStreamExt as _, }; -use gpui::{ - AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal, - Subscription, Task, -}; +use gpui::{AnyElement, AnyView, App, AsyncApp, Context, Entity, Subscription, Task}; use http_client::{AsyncBody, HttpClient, Method, Response, StatusCode}; use language_model::{ AuthenticateError, CloudModel, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, @@ -23,31 +19,28 @@ use language_model::{ ZED_CLOUD_PROVIDER_ID, }; use language_model::{ - LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider, + LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider, LlmApiToken, + MaxMonthlySpendReachedError, PaymentRequiredError, RefreshLlmTokenListener, }; -use proto::TypedEnvelope; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::value::RawValue; use settings::{Settings, SettingsStore}; -use smol::{ - io::{AsyncReadExt, BufReader}, - lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}, -}; -use std::fmt; +use smol::io::{AsyncReadExt, BufReader}; use std::{ future, sync::{Arc, LazyLock}, }; use strum::IntoEnumIterator; -use thiserror::Error; use ui::{prelude::*, TintColor}; -use crate::provider::anthropic::map_to_language_model_completion_events; +use crate::provider::anthropic::{ + count_anthropic_tokens, into_anthropic, map_to_language_model_completion_events, +}; +use crate::provider::google::into_google; +use crate::provider::open_ai::{count_open_ai_tokens, into_open_ai}; use crate::AllLanguageModelSettings; -use super::anthropic::count_anthropic_tokens; - pub const PROVIDER_NAME: &str = "Zed"; const ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: Option<&str> = @@ -100,44 +93,6 @@ pub struct AvailableModel { pub extra_beta_headers: Vec, } -struct GlobalRefreshLlmTokenListener(Entity); - -impl Global for GlobalRefreshLlmTokenListener {} - -pub struct RefreshLlmTokenEvent; - -pub struct RefreshLlmTokenListener { - _llm_token_subscription: client::Subscription, -} - -impl EventEmitter for RefreshLlmTokenListener {} - -impl RefreshLlmTokenListener { - pub fn register(client: Arc, cx: &mut App) { - let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, cx)); - cx.set_global(GlobalRefreshLlmTokenListener(listener)); - } - - pub fn global(cx: &App) -> Entity { - GlobalRefreshLlmTokenListener::global(cx).0.clone() - } - - fn new(client: Arc, cx: &mut Context) -> Self { - Self { - _llm_token_subscription: client - .add_message_handler(cx.weak_entity(), Self::handle_refresh_llm_token), - } - } - - async fn handle_refresh_llm_token( - this: Entity, - _: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result<()> { - this.update(&mut cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)) - } -} - pub struct CloudLanguageModelProvider { client: Arc, state: gpui::Entity, @@ -474,33 +429,6 @@ pub struct CloudLanguageModel { request_limiter: RateLimiter, } -#[derive(Clone, Default)] -pub struct LlmApiToken(Arc>>); - -#[derive(Error, Debug)] -pub struct PaymentRequiredError; - -impl fmt::Display for PaymentRequiredError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "Payment required to use this language model. Please upgrade your account." - ) - } -} - -#[derive(Error, Debug)] -pub struct MaxMonthlySpendReachedError; - -impl fmt::Display for MaxMonthlySpendReachedError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "Maximum spending limit reached for this month. For more usage, increase your spending limit." - ) - } -} - impl CloudLanguageModel { async fn perform_llm_completion( client: Arc, @@ -612,7 +540,7 @@ impl LanguageModel for CloudLanguageModel { CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx), CloudModel::Google(model) => { let client = self.client.clone(); - let request = request.into_google(model.id().into()); + let request = into_google(request, model.id().into()); let request = google_ai::CountTokensRequest { contents: request.contents, }; @@ -638,7 +566,8 @@ impl LanguageModel for CloudLanguageModel { ) -> BoxFuture<'static, Result>>> { match &self.model { CloudModel::Anthropic(model) => { - let request = request.into_anthropic( + let request = into_anthropic( + request, model.id().into(), model.default_temperature(), model.max_output_tokens(), @@ -666,7 +595,7 @@ impl LanguageModel for CloudLanguageModel { } CloudModel::OpenAi(model) => { let client = self.client.clone(); - let request = request.into_open_ai(model.id().into(), model.max_output_tokens()); + let request = into_open_ai(request, model.id().into(), model.max_output_tokens()); let llm_api_token = self.llm_api_token.clone(); let future = self.request_limiter.stream(async move { let response = Self::perform_llm_completion( @@ -693,7 +622,7 @@ impl LanguageModel for CloudLanguageModel { } CloudModel::Google(model) => { let client = self.client.clone(); - let request = request.into_google(model.id().into()); + let request = into_google(request, model.id().into()); let llm_api_token = self.llm_api_token.clone(); let future = self.request_limiter.stream(async move { let response = Self::perform_llm_completion( @@ -736,7 +665,8 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let mut request = request.into_anthropic( + let mut request = into_anthropic( + request, model.tool_model_id().into(), model.default_temperature(), model.max_output_tokens(), @@ -776,7 +706,7 @@ impl LanguageModel for CloudLanguageModel { } CloudModel::OpenAi(model) => { let mut request = - request.into_open_ai(model.id().into(), model.max_output_tokens()); + into_open_ai(request, model.id().into(), model.max_output_tokens()); request.tool_choice = Some(open_ai::ToolChoice::Other( open_ai::ToolDefinition::Function { function: open_ai::FunctionDefinition { @@ -844,30 +774,6 @@ fn response_lines( ) } -impl LlmApiToken { - pub async fn acquire(&self, client: &Arc) -> Result { - let lock = self.0.upgradable_read().await; - if let Some(token) = lock.as_ref() { - Ok(token.to_string()) - } else { - Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await - } - } - - pub async fn refresh(&self, client: &Arc) -> Result { - Self::fetch(self.0.write().await, client).await - } - - async fn fetch<'a>( - mut lock: RwLockWriteGuard<'a, Option>, - client: &Arc, - ) -> Result { - let response = client.request(proto::GetLlmToken {}).await?; - *lock = Some(response.token.clone()); - Ok(response.token.clone()) - } -} - struct ConfigurationView { state: gpui::Entity, } diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 830e94ecb5c123..84d34307cb8694 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -322,7 +322,11 @@ impl LanguageModel for DeepSeekLanguageModel { request: LanguageModelRequest, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let request = request.into_deepseek(self.model.id().to_string(), self.max_output_tokens()); + let request = into_deepseek( + request, + self.model.id().to_string(), + self.max_output_tokens(), + ); let stream = self.stream_completion(request, cx); async move { @@ -357,8 +361,11 @@ impl LanguageModel for DeepSeekLanguageModel { schema: serde_json::Value, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let mut deepseek_request = - request.into_deepseek(self.model.id().to_string(), self.max_output_tokens()); + let mut deepseek_request = into_deepseek( + request, + self.model.id().to_string(), + self.max_output_tokens(), + ); deepseek_request.tools = vec![deepseek::ToolDefinition::Function { function: deepseek::FunctionDefinition { @@ -402,6 +409,93 @@ impl LanguageModel for DeepSeekLanguageModel { } } +pub fn into_deepseek( + request: LanguageModelRequest, + model: String, + max_output_tokens: Option, +) -> deepseek::Request { + let is_reasoner = model == "deepseek-reasoner"; + + let len = request.messages.len(); + let merged_messages = + request + .messages + .into_iter() + .fold(Vec::with_capacity(len), |mut acc, msg| { + let role = msg.role; + let content = msg.string_contents(); + + if is_reasoner { + if let Some(last_msg) = acc.last_mut() { + match (last_msg, role) { + (deepseek::RequestMessage::User { content: last }, Role::User) => { + last.push(' '); + last.push_str(&content); + return acc; + } + + ( + deepseek::RequestMessage::Assistant { + content: last_content, + .. + }, + Role::Assistant, + ) => { + *last_content = last_content + .take() + .map(|c| { + let mut s = + String::with_capacity(c.len() + content.len() + 1); + s.push_str(&c); + s.push(' '); + s.push_str(&content); + s + }) + .or(Some(content)); + + return acc; + } + _ => {} + } + } + } + + acc.push(match role { + Role::User => deepseek::RequestMessage::User { content }, + Role::Assistant => deepseek::RequestMessage::Assistant { + content: Some(content), + tool_calls: Vec::new(), + }, + Role::System => deepseek::RequestMessage::System { content }, + }); + acc + }); + + deepseek::Request { + model, + messages: merged_messages, + stream: true, + max_tokens: max_output_tokens, + temperature: if is_reasoner { + None + } else { + request.temperature + }, + response_format: None, + tools: request + .tools + .into_iter() + .map(|tool| deepseek::ToolDefinition::Function { + function: deepseek::FunctionDefinition { + name: tool.name, + description: Some(tool.description), + parameters: Some(tool.input_schema), + }, + }) + .collect(), + } +} + struct ConfigurationView { api_key_editor: Entity, state: Entity, diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 0bf5001f794abe..934a06af55271d 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -272,7 +272,7 @@ impl LanguageModel for GoogleLanguageModel { request: LanguageModelRequest, cx: &App, ) -> BoxFuture<'static, Result> { - let request = request.into_google(self.model.id().to_string()); + let request = into_google(request, self.model.id().to_string()); let http_client = self.http_client.clone(); let api_key = self.state.read(cx).api_key.clone(); @@ -303,7 +303,7 @@ impl LanguageModel for GoogleLanguageModel { 'static, Result>>, > { - let request = request.into_google(self.model.id().to_string()); + let request = into_google(request, self.model.id().to_string()); let http_client = self.http_client.clone(); let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| { @@ -341,6 +341,38 @@ impl LanguageModel for GoogleLanguageModel { } } +pub fn into_google( + request: LanguageModelRequest, + model: String, +) -> google_ai::GenerateContentRequest { + google_ai::GenerateContentRequest { + model, + contents: request + .messages + .into_iter() + .map(|msg| google_ai::Content { + parts: vec![google_ai::Part::TextPart(google_ai::TextPart { + text: msg.string_contents(), + })], + role: match msg.role { + Role::User => google_ai::Role::User, + Role::Assistant => google_ai::Role::Model, + Role::System => google_ai::Role::User, // Google AI doesn't have a system role + }, + }) + .collect(), + generation_config: Some(google_ai::GenerationConfig { + candidate_count: Some(1), + stop_sequences: Some(request.stop), + max_output_tokens: None, + temperature: request.temperature.map(|t| t as f64).or(Some(1.0)), + top_p: None, + top_k: None, + }), + safety_settings: None, + } +} + pub fn count_google_tokens( request: LanguageModelRequest, cx: &App, diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 80a5988cffaa03..55a6413ef623fe 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -334,7 +334,11 @@ impl LanguageModel for MistralLanguageModel { request: LanguageModelRequest, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let request = request.into_mistral(self.model.id().to_string(), self.max_output_tokens()); + let request = into_mistral( + request, + self.model.id().to_string(), + self.max_output_tokens(), + ); let stream = self.stream_completion(request, cx); async move { @@ -369,7 +373,7 @@ impl LanguageModel for MistralLanguageModel { schema: serde_json::Value, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let mut request = request.into_mistral(self.model.id().into(), self.max_output_tokens()); + let mut request = into_mistral(request, self.model.id().into(), self.max_output_tokens()); request.tools = vec![mistral::ToolDefinition::Function { function: mistral::FunctionDefinition { name: tool_name.clone(), @@ -411,6 +415,52 @@ impl LanguageModel for MistralLanguageModel { } } +pub fn into_mistral( + request: LanguageModelRequest, + model: String, + max_output_tokens: Option, +) -> mistral::Request { + let len = request.messages.len(); + let merged_messages = + request + .messages + .into_iter() + .fold(Vec::with_capacity(len), |mut acc, msg| { + let role = msg.role; + let content = msg.string_contents(); + + acc.push(match role { + Role::User => mistral::RequestMessage::User { content }, + Role::Assistant => mistral::RequestMessage::Assistant { + content: Some(content), + tool_calls: Vec::new(), + }, + Role::System => mistral::RequestMessage::System { content }, + }); + acc + }); + + mistral::Request { + model, + messages: merged_messages, + stream: true, + max_tokens: max_output_tokens, + temperature: request.temperature, + response_format: None, + tools: request + .tools + .into_iter() + .map(|tool| mistral::ToolDefinition::Function { + function: mistral::FunctionDefinition { + name: tool.name, + description: Some(tool.description), + parameters: Some(tool.input_schema), + }, + }) + .collect(), + } +} + struct ConfigurationView { api_key_editor: Entity, state: gpui::Entity, diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 3e46983ebb7518..c249af0bb7038a 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -318,7 +318,7 @@ impl LanguageModel for OpenAiLanguageModel { 'static, Result>>, > { - let request = request.into_open_ai(self.model.id().into(), self.max_output_tokens()); + let request = into_open_ai(request, self.model.id().into(), self.max_output_tokens()); let completions = self.stream_completion(request, cx); async move { Ok(open_ai::extract_text_from_events(completions.await?) @@ -336,7 +336,7 @@ impl LanguageModel for OpenAiLanguageModel { schema: serde_json::Value, cx: &AsyncApp, ) -> BoxFuture<'static, Result>>> { - let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens()); + let mut request = into_open_ai(request, self.model.id().into(), self.max_output_tokens()); request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function { function: FunctionDefinition { name: tool_name.clone(), @@ -366,6 +366,39 @@ impl LanguageModel for OpenAiLanguageModel { } } +pub fn into_open_ai( + request: LanguageModelRequest, + model: String, + max_output_tokens: Option, +) -> open_ai::Request { + let stream = !model.starts_with("o1-"); + open_ai::Request { + model, + messages: request + .messages + .into_iter() + .map(|msg| match msg.role { + Role::User => open_ai::RequestMessage::User { + content: msg.string_contents(), + }, + Role::Assistant => open_ai::RequestMessage::Assistant { + content: Some(msg.string_contents()), + tool_calls: Vec::new(), + }, + Role::System => open_ai::RequestMessage::System { + content: msg.string_contents(), + }, + }) + .collect(), + stream, + stop: request.stop, + temperature: request.temperature.unwrap_or(1.0), + max_tokens: max_output_tokens, + tools: Vec::new(), + tool_choice: None, + } +} + pub fn count_open_ai_tokens( request: LanguageModelRequest, model: open_ai::Model, diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 740bfecb5e0c7c..a274d8e26212d9 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -11,6 +11,7 @@ use settings::{update_settings_file, Settings, SettingsSources}; use crate::provider::{ self, anthropic::AnthropicSettings, + bedrock::AmazonBedrockSettings, cloud::{self, ZedDotDevSettings}, copilot_chat::CopilotChatSettings, deepseek::DeepSeekSettings, @@ -57,6 +58,7 @@ pub fn init(fs: Arc, cx: &mut App) { #[derive(Default)] pub struct AllLanguageModelSettings { pub anthropic: AnthropicSettings, + pub bedrock: AmazonBedrockSettings, pub ollama: OllamaSettings, pub openai: OpenAiSettings, pub zed_dot_dev: ZedDotDevSettings, diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index 36e94ad2508e22..f52b8f1f9019b8 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -190,5 +190,9 @@ operator: "/" @operator (parameter (identifier) @variable.parameter) -(attribute_item) @attribute -(inner_attribute_item) @attribute +(attribute_item (attribute (identifier) @attribute)) +(inner_attribute_item (attribute (identifier) @attribute)) +; Match nested snake case identifiers in attribute items. +(token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) +; Override the attribute match for paths in scoped identifiers. +(token_tree (identifier) @variable "::") diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index e57ca0ea4f91c4..ae932374640706 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -131,7 +131,6 @@ pub struct MultiBufferDiffHunk { pub diff_base_byte_range: Range, /// Whether or not this hunk also appears in the 'secondary diff'. pub secondary_status: DiffHunkSecondaryStatus, - pub secondary_diff_base_byte_range: Option>, } impl MultiBufferDiffHunk { @@ -1448,7 +1447,7 @@ impl MultiBuffer { excerpt.range.context.start, )) } - + /// Sets excerpts, returns `true` if at least one new excerpt was added. pub fn set_excerpts_for_path( &mut self, path: PathKey, @@ -1456,7 +1455,7 @@ impl MultiBuffer { ranges: Vec>, context_line_count: u32, cx: &mut Context, - ) { + ) -> bool { let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); let mut insert_after = self @@ -1475,6 +1474,7 @@ impl MultiBuffer { let mut new_excerpt_ids = Vec::new(); let mut to_remove = Vec::new(); let mut to_insert = Vec::new(); + let mut added_a_new_excerpt = false; let snapshot = self.snapshot(cx); let mut excerpts_cursor = snapshot.excerpts.cursor::>(&()); @@ -1489,6 +1489,7 @@ impl MultiBuffer { continue; } (Some(_), None) => { + added_a_new_excerpt = true; to_insert.push(new_iter.next().unwrap()); continue; } @@ -1552,6 +1553,8 @@ impl MultiBuffer { } else { self.buffers_by_path.insert(path, new_excerpt_ids); } + + added_a_new_excerpt } pub fn paths(&self) -> impl Iterator + '_ { @@ -3502,7 +3505,6 @@ impl MultiBufferSnapshot { buffer_range: hunk.buffer_range.clone(), diff_base_byte_range: hunk.diff_base_byte_range.clone(), secondary_status: hunk.secondary_status, - secondary_diff_base_byte_range: hunk.secondary_diff_base_byte_range, }) }) } @@ -3872,7 +3874,6 @@ impl MultiBufferSnapshot { buffer_range: hunk.buffer_range.clone(), diff_base_byte_range: hunk.diff_base_byte_range.clone(), secondary_status: hunk.secondary_status, - secondary_diff_base_byte_range: hunk.secondary_diff_base_byte_range, }); } } diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 115aa975197fb7..3d119f9b887bd2 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -15,7 +15,7 @@ use language::{Outline, OutlineItem}; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; use settings::Settings; -use theme::{color_alpha, ActiveTheme, ThemeSettings}; +use theme::{ActiveTheme, ThemeSettings}; use ui::{prelude::*, ListItem, ListItemSpacing}; use util::ResultExt; use workspace::{DismissDecision, ModalView}; @@ -332,7 +332,7 @@ pub fn render_item( cx: &App, ) -> StyledText { let highlight_style = HighlightStyle { - background_color: Some(color_alpha(cx.theme().colors().text_accent, 0.3)), + background_color: Some(cx.theme().colors().text_accent.alpha(0.3)), ..Default::default() }; let custom_highlights = match_ranges diff --git a/crates/panel/src/panel.rs b/crates/panel/src/panel.rs index 934d8281a31f78..59572c402be3ea 100644 --- a/crates/panel/src/panel.rs +++ b/crates/panel/src/panel.rs @@ -49,6 +49,7 @@ pub fn panel_button(label: impl Into) -> ui::Button { let id = ElementId::Name(label.clone().to_lowercase().replace(' ', "_").into()); ui::Button::new(id, label) .label_size(ui::LabelSize::Small) + .icon_size(ui::IconSize::Small) // TODO: Change this once we use on_surface_bg in button_like .layer(ui::ElevationIndex::ModalSurface) .size(ui::ButtonSize::Compact) diff --git a/crates/project/src/git.rs b/crates/project/src/git.rs index 505faba60ce7b6..84b4dd9ff9d66f 100644 --- a/crates/project/src/git.rs +++ b/crates/project/src/git.rs @@ -5,7 +5,7 @@ use anyhow::{Context as _, Result}; use client::ProjectId; use futures::channel::{mpsc, oneshot}; use futures::StreamExt as _; -use git::repository::{Branch, CommitDetails, ResetMode}; +use git::repository::{Branch, CommitDetails, PushOptions, Remote, ResetMode}; use git::{ repository::{GitRepository, RepoPath}, status::{GitSummary, TrackedSummary}, @@ -74,6 +74,18 @@ pub enum Message { Stage(GitRepo, Vec), Unstage(GitRepo, Vec), SetIndexText(GitRepo, RepoPath, Option), + Push { + repo: GitRepo, + branch_name: SharedString, + remote_name: SharedString, + options: Option, + }, + Pull { + repo: GitRepo, + branch_name: SharedString, + remote_name: SharedString, + }, + Fetch(GitRepo), } pub enum GitEvent { @@ -107,6 +119,10 @@ impl GitStore { } pub fn init(client: &AnyProtoClient) { + client.add_entity_request_handler(Self::handle_get_remotes); + client.add_entity_request_handler(Self::handle_push); + client.add_entity_request_handler(Self::handle_pull); + client.add_entity_request_handler(Self::handle_fetch); client.add_entity_request_handler(Self::handle_stage); client.add_entity_request_handler(Self::handle_unstage); client.add_entity_request_handler(Self::handle_commit); @@ -242,8 +258,10 @@ impl GitStore { mpsc::unbounded::<(Message, oneshot::Sender>)>(); cx.spawn(|_, cx| async move { while let Some((msg, respond)) = update_receiver.next().await { - let result = cx.background_spawn(Self::process_git_msg(msg)).await; - respond.send(result).ok(); + if !respond.is_canceled() { + let result = cx.background_spawn(Self::process_git_msg(msg)).await; + respond.send(result).ok(); + } } }) .detach(); @@ -252,6 +270,94 @@ impl GitStore { async fn process_git_msg(msg: Message) -> Result<()> { match msg { + Message::Fetch(repo) => { + match repo { + GitRepo::Local(git_repository) => git_repository.fetch()?, + GitRepo::Remote { + project_id, + client, + worktree_id, + work_directory_id, + } => { + client + .request(proto::Fetch { + project_id: project_id.0, + worktree_id: worktree_id.to_proto(), + work_directory_id: work_directory_id.to_proto(), + }) + .await + .context("sending fetch request")?; + } + } + Ok(()) + } + + Message::Pull { + repo, + branch_name, + remote_name, + } => { + match repo { + GitRepo::Local(git_repository) => { + git_repository.pull(&branch_name, &remote_name)? + } + GitRepo::Remote { + project_id, + client, + worktree_id, + work_directory_id, + } => { + client + .request(proto::Pull { + project_id: project_id.0, + worktree_id: worktree_id.to_proto(), + work_directory_id: work_directory_id.to_proto(), + branch_name: branch_name.to_string(), + remote_name: remote_name.to_string(), + }) + .await + .context("sending pull request")?; + } + } + Ok(()) + } + Message::Push { + repo, + branch_name, + remote_name, + options, + } => { + match repo { + GitRepo::Local(git_repository) => { + git_repository.push(&branch_name, &remote_name, options)? + } + GitRepo::Remote { + project_id, + client, + worktree_id, + work_directory_id, + } => { + client + .request(proto::Push { + project_id: project_id.0, + worktree_id: worktree_id.to_proto(), + work_directory_id: work_directory_id.to_proto(), + branch_name: branch_name.to_string(), + remote_name: remote_name.to_string(), + options: options.map(|options| match options { + PushOptions::Force => proto::push::PushOptions::Force, + PushOptions::SetUpstream => { + proto::push::PushOptions::SetUpstream + } + } + as i32), + }) + .await + .context("sending push request")?; + } + } + Ok(()) + } Message::Stage(repo, paths) => { match repo { GitRepo::Local(repo) => repo.stage_paths(&paths)?, @@ -413,6 +519,73 @@ impl GitStore { } } + async fn handle_fetch( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); + let repository_handle = + Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; + + repository_handle + .update(&mut cx, |repository_handle, _cx| repository_handle.fetch())? + .await??; + Ok(proto::Ack {}) + } + + async fn handle_push( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); + let repository_handle = + Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; + + let options = envelope + .payload + .options + .as_ref() + .map(|_| match envelope.payload.options() { + proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream, + proto::push::PushOptions::Force => git::repository::PushOptions::Force, + }); + + let branch_name = envelope.payload.branch_name.into(); + let remote_name = envelope.payload.remote_name.into(); + + repository_handle + .update(&mut cx, |repository_handle, _cx| { + repository_handle.push(branch_name, remote_name, options) + })? + .await??; + Ok(proto::Ack {}) + } + + async fn handle_pull( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); + let repository_handle = + Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; + + let branch_name = envelope.payload.branch_name.into(); + let remote_name = envelope.payload.remote_name.into(); + + repository_handle + .update(&mut cx, |repository_handle, _cx| { + repository_handle.pull(branch_name, remote_name) + })? + .await??; + Ok(proto::Ack {}) + } + async fn handle_stage( this: Entity, envelope: TypedEnvelope, @@ -509,6 +682,34 @@ impl GitStore { Ok(proto::Ack {}) } + async fn handle_get_remotes( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); + let repository_handle = + Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; + + let branch_name = envelope.payload.branch_name; + + let remotes = repository_handle + .update(&mut cx, |repository_handle, cx| { + repository_handle.get_remotes(branch_name, cx) + })? + .await?; + + Ok(proto::GetRemotesResponse { + remotes: remotes + .into_iter() + .map(|remotes| proto::get_remotes_response::Remote { + name: remotes.name.to_string(), + }) + .collect::>(), + }) + } + async fn handle_show( this: Entity, envelope: TypedEnvelope, @@ -648,7 +849,7 @@ impl Repository { (self.worktree_id, self.repository_entry.work_directory_id()) } - pub fn branch(&self) -> Option<&Branch> { + pub fn current_branch(&self) -> Option<&Branch> { self.repository_entry.branch() } @@ -802,35 +1003,19 @@ impl Repository { commit: &str, paths: Vec, ) -> oneshot::Receiver> { - let (result_tx, result_rx) = futures::channel::oneshot::channel(); - let commit = commit.to_string().into(); - self.update_sender - .unbounded_send(( - Message::CheckoutFiles { - repo: self.git_repo.clone(), - commit, - paths, - }, - result_tx, - )) - .ok(); - result_rx + self.send_message(Message::CheckoutFiles { + repo: self.git_repo.clone(), + commit: commit.to_string().into(), + paths, + }) } pub fn reset(&self, commit: &str, reset_mode: ResetMode) -> oneshot::Receiver> { - let (result_tx, result_rx) = futures::channel::oneshot::channel(); - let commit = commit.to_string().into(); - self.update_sender - .unbounded_send(( - Message::Reset { - repo: self.git_repo.clone(), - commit, - reset_mode, - }, - result_tx, - )) - .ok(); - result_rx + self.send_message(Message::Reset { + repo: self.git_repo.clone(), + commit: commit.to_string().into(), + reset_mode, + }) } pub fn show(&self, commit: &str, cx: &Context) -> Task> { @@ -987,18 +1172,41 @@ impl Repository { message: SharedString, name_and_email: Option<(SharedString, SharedString)>, ) -> oneshot::Receiver> { - let (result_tx, result_rx) = futures::channel::oneshot::channel(); - self.update_sender - .unbounded_send(( - Message::Commit { - git_repo: self.git_repo.clone(), - message, - name_and_email, - }, - result_tx, - )) - .ok(); - result_rx + self.send_message(Message::Commit { + git_repo: self.git_repo.clone(), + message, + name_and_email, + }) + } + + pub fn fetch(&self) -> oneshot::Receiver> { + self.send_message(Message::Fetch(self.git_repo.clone())) + } + + pub fn push( + &self, + branch: SharedString, + remote: SharedString, + options: Option, + ) -> oneshot::Receiver> { + self.send_message(Message::Push { + repo: self.git_repo.clone(), + branch_name: branch, + remote_name: remote, + options, + }) + } + + pub fn pull( + &self, + branch: SharedString, + remote: SharedString, + ) -> oneshot::Receiver> { + self.send_message(Message::Pull { + repo: self.git_repo.clone(), + branch_name: branch, + remote_name: remote, + }) } pub fn set_index_text( @@ -1006,13 +1214,49 @@ impl Repository { path: &RepoPath, content: Option, ) -> oneshot::Receiver> { + self.send_message(Message::SetIndexText( + self.git_repo.clone(), + path.clone(), + content, + )) + } + + pub fn get_remotes(&self, branch_name: Option, cx: &App) -> Task>> { + match self.git_repo.clone() { + GitRepo::Local(git_repository) => { + cx.background_spawn( + async move { git_repository.get_remotes(branch_name.as_deref()) }, + ) + } + GitRepo::Remote { + project_id, + client, + worktree_id, + work_directory_id, + } => cx.background_spawn(async move { + let response = client + .request(proto::GetRemotes { + project_id: project_id.0, + worktree_id: worktree_id.to_proto(), + work_directory_id: work_directory_id.to_proto(), + branch_name, + }) + .await?; + + Ok(response + .remotes + .into_iter() + .map(|remotes| git::repository::Remote { + name: remotes.name.into(), + }) + .collect()) + }), + } + } + + fn send_message(&self, message: Message) -> oneshot::Receiver> { let (result_tx, result_rx) = futures::channel::oneshot::channel(); - self.update_sender - .unbounded_send(( - Message::SetIndexText(self.git_repo.clone(), path.clone(), content), - result_tx, - )) - .ok(); + self.update_sender.unbounded_send((message, result_tx)).ok(); result_rx } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 1ac058939c36d3..35d031f75111e8 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1923,20 +1923,20 @@ impl LocalLspStore { version: 0, snapshot: initial_snapshot.clone(), }; - let previous_snapshots = self - .buffer_snapshots + self.buffer_snapshots .entry(buffer_id) .or_default() - .insert(server.server_id(), vec![snapshot]); + .entry(server.server_id()) + .or_insert_with(|| { + server.register_buffer( + uri.clone(), + adapter.language_id(&language.name()), + 0, + initial_snapshot.text(), + ); - if previous_snapshots.is_none() { - server.register_buffer( - uri.clone(), - adapter.language_id(&language.name()), - 0, - initial_snapshot.text(), - ); - } + vec![snapshot] + }); } } } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 815ad6b634259d..8f209dd5f6f569 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -4959,14 +4959,14 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) { assert_eq!( fs.paths(true), vec![ - PathBuf::from("/"), - PathBuf::from("/one"), - PathBuf::from("/one/two"), - PathBuf::from("/one/two/c.rs"), - PathBuf::from("/one/two/three"), - PathBuf::from("/one/two/three/a.txt"), - PathBuf::from("/one/two/three/b.."), - PathBuf::from("/one/two/three/four"), + PathBuf::from(path!("/")), + PathBuf::from(path!("/one")), + PathBuf::from(path!("/one/two")), + PathBuf::from(path!("/one/two/c.rs")), + PathBuf::from(path!("/one/two/three")), + PathBuf::from(path!("/one/two/three/a.txt")), + PathBuf::from(path!("/one/two/three/b..")), + PathBuf::from(path!("/one/two/three/four")), ] ); diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index e2f8541161253d..dd4a79edd3ade7 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -946,12 +946,17 @@ impl WorktreeStore { upstream: proto_branch.upstream.map(|upstream| { git::repository::Upstream { ref_name: upstream.ref_name.into(), - tracking: upstream.tracking.map(|tracking| { - git::repository::UpstreamTracking { - ahead: tracking.ahead as u32, - behind: tracking.behind as u32, - } - }), + tracking: upstream + .tracking + .map(|tracking| { + git::repository::UpstreamTracking::Tracked( + git::repository::UpstreamTrackingStatus { + ahead: tracking.ahead as u32, + behind: tracking.behind as u32, + }, + ) + }) + .unwrap_or(git::repository::UpstreamTracking::Gone), } }), most_recent_commit: proto_branch.most_recent_commit.map(|commit| { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 7d87c3613722d6..e19ffc51c0620c 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -18,7 +18,7 @@ use file_icons::FileIcons; use git::status::GitSummary; use gpui::{ actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, - AnyElement, App, AsyncWindowContext, Bounds, ClipboardItem, Context, DismissEvent, Div, + AnyElement, App, ArcCow, AsyncWindowContext, Bounds, ClipboardItem, Context, DismissEvent, Div, DragMoveEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, Hsla, InteractiveElement, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render, ScrollStrategy, Stateful, @@ -265,7 +265,7 @@ struct ItemColors { default: Hsla, hover: Hsla, drag_over: Hsla, - marked_active: Hsla, + marked: Hsla, focused: Hsla, } @@ -274,10 +274,10 @@ fn get_item_color(cx: &App) -> ItemColors { ItemColors { default: colors.panel_background, - hover: colors.ghost_element_hover, - drag_over: colors.drop_target_background, - marked_active: colors.element_selected, + hover: colors.element_hover, + marked: colors.element_selected, focused: colors.panel_focused_border, + drag_over: colors.drop_target_background, } } @@ -302,6 +302,9 @@ impl ProjectPanel { this.reveal_entry(project.clone(), *entry_id, true, cx); } } + project::Event::ActiveEntryChanged(None) => { + this.marked_entries.clear(); + } project::Event::RevealInProjectPanel(entry_id) => { this.reveal_entry(project.clone(), *entry_id, false, cx); cx.emit(PanelEvent::Activate); @@ -2713,7 +2716,7 @@ impl ProjectPanel { else { continue; }; - let path = Arc::from(Path::new(path_name)); + let path = ArcCow::Borrowed(Path::new(path_name)); let depth = 0; (depth, path) } else if entry.is_file() { @@ -2725,7 +2728,7 @@ impl ProjectPanel { else { continue; }; - let path = Arc::from(Path::new(path_name)); + let path = ArcCow::Borrowed(Path::new(path_name)); let depth = entry.path.ancestors().count() - 1; (depth, path) } else { @@ -2745,11 +2748,11 @@ impl ProjectPanel { .ok() .and_then(|suffix| { let full_path = Path::new(root_folded_entry.file_name()?); - Some(Arc::::from(full_path.join(suffix))) + Some(ArcCow::Owned(Arc::::from(full_path.join(suffix)))) }) }) - .or_else(|| entry.path.file_name().map(Path::new).map(Arc::from)) - .unwrap_or_else(|| entry.path.clone()); + .or_else(|| entry.path.file_name().map(Path::new).map(ArcCow::Borrowed)) + .unwrap_or_else(|| ArcCow::Owned(entry.path.clone())); let depth = path.components().count(); (depth, path) }; @@ -3562,18 +3565,16 @@ impl ProjectPanel { marked_selections: selections, }; - let bg_color = if is_marked || is_active { - item_colors.marked_active + let bg_color = if is_marked { + item_colors.marked } else { item_colors.default }; - let bg_hover_color = if self.mouse_down || is_marked || is_active { - item_colors.marked_active - } else if !is_active { - item_colors.hover + let bg_hover_color = if is_marked { + item_colors.marked } else { - item_colors.default + item_colors.hover }; let border_color = @@ -4251,16 +4252,11 @@ impl ProjectPanel { let worktree_id = worktree.id(); self.expand_entry(worktree_id, entry_id, cx); self.update_visible_entries(Some((worktree_id, entry_id)), cx); - - if self.marked_entries.len() == 1 - && self - .marked_entries - .first() - .filter(|entry| entry.entry_id == entry_id) - .is_none() - { - self.marked_entries.clear(); - } + self.marked_entries.clear(); + self.marked_entries.insert(SelectedEntry { + worktree_id, + entry_id, + }); self.autoscroll(cx); cx.notify(); } @@ -7349,7 +7345,7 @@ mod tests { select_path(&panel, "root/new", cx); assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), - &["v root", " new <== selected"] + &["v root", " new <== selected <== marked"] ); panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx)); panel.update_in(cx, |panel, window, cx| { @@ -7783,7 +7779,7 @@ mod tests { " > .git", " v dir_1", " > gitignored_dir", - " file_1.py <== selected", + " file_1.py <== selected <== marked", " file_2.py", " file_3.py", " > dir_2", @@ -7809,7 +7805,7 @@ mod tests { " file_2.py", " file_3.py", " v dir_2", - " file_1.py <== selected", + " file_1.py <== selected <== marked", " file_2.py", " file_3.py", " .gitignore", @@ -7836,7 +7832,7 @@ mod tests { " file_2.py", " file_3.py", " v dir_2", - " file_1.py <== selected", + " file_1.py <== selected <== marked", " file_2.py", " file_3.py", " .gitignore", @@ -7857,7 +7853,7 @@ mod tests { " > .git", " v dir_1", " v gitignored_dir", - " file_a.py <== selected", + " file_a.py <== selected <== marked", " file_b.py", " file_c.py", " file_1.py", @@ -8012,7 +8008,7 @@ mod tests { " > .git", " v dir_1", " > gitignored_dir", - " file_1.py <== selected", + " file_1.py <== selected <== marked", " file_2.py", " file_3.py", " > dir_2", @@ -8038,7 +8034,7 @@ mod tests { " file_2.py", " file_3.py", " v dir_2", - " file_1.py <== selected", + " file_1.py <== selected <== marked", " file_2.py", " file_3.py", " .gitignore", @@ -8059,7 +8055,7 @@ mod tests { " > .git", " v dir_1", " v gitignored_dir", - " file_a.py <== selected", + " file_a.py <== selected <== marked", " file_b.py", " file_c.py", " file_1.py", diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 2a458b61d82eee..e8ef3dbfc84036 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -324,45 +324,51 @@ message Envelope { GitCommitDetails git_commit_details = 302; GitCheckoutFiles git_checkout_files = 303; - SynchronizeBreakpoints synchronize_breakpoints = 304; - SetActiveDebugLine set_active_debug_line = 305; - RemoveActiveDebugLine remove_active_debug_line = 306; - UpdateDebugAdapter update_debug_adapter = 307; - ShutdownDebugClient shutdown_debug_client = 308; - SetDebugClientCapabilities set_debug_client_capabilities = 309; - DapNextRequest dap_next_request = 310; - DapStepInRequest dap_step_in_request = 311; - DapStepOutRequest dap_step_out_request = 312; - DapStepBackRequest dap_step_back_request = 313; - DapContinueRequest dap_continue_request = 314; - DapContinueResponse dap_continue_response = 315; - DapPauseRequest dap_pause_request = 316; - DapDisconnectRequest dap_disconnect_request = 317; - DapTerminateThreadsRequest dap_terminate_threads_request = 318; - DapTerminateRequest dap_terminate_request = 319; - DapRestartRequest dap_restart_request = 320; - UpdateThreadStatus update_thread_status = 321; - VariablesRequest variables_request = 322; - DapVariables dap_variables = 323; - DapRestartStackFrameRequest dap_restart_stack_frame_request = 324; - IgnoreBreakpointState ignore_breakpoint_state = 325; - ToggleIgnoreBreakpoints toggle_ignore_breakpoints = 326; - DapModulesRequest dap_modules_request = 327; - DapModulesResponse dap_modules_response = 328; - DapLoadedSourcesRequest dap_loaded_sources_request = 329; - DapLoadedSourcesResponse dap_loaded_sources_response = 330; - DapStackTraceRequest dap_stack_trace_request = 331; - DapStackTraceResponse dap_stack_trace_response = 332; - DapScopesRequest dap_scopes_request = 333; - DapScopesResponse dap_scopes_response = 334; - DapSetVariableValueRequest dap_set_variable_value_request = 335; - DapSetVariableValueResponse dap_set_variable_value_response = 336; - DapEvaluateRequest dap_evaluate_request = 337; - DapEvaluateResponse dap_evaluate_response = 338; - DapCompletionRequest dap_completion_request = 339; - DapCompletionResponse dap_completion_response = 340; - DapThreadsRequest dap_threads_request = 341; - DapThreadsResponse dap_threads_response = 342;// current max + Push push = 304; + Fetch fetch = 305; + GetRemotes get_remotes = 306; + GetRemotesResponse get_remotes_response = 307; + Pull pull = 308; + + SynchronizeBreakpoints synchronize_breakpoints = 309; + SetActiveDebugLine set_active_debug_line = 310; + RemoveActiveDebugLine remove_active_debug_line = 311; + UpdateDebugAdapter update_debug_adapter = 312; + ShutdownDebugClient shutdown_debug_client = 313; + SetDebugClientCapabilities set_debug_client_capabilities = 314; + DapNextRequest dap_next_request = 315; + DapStepInRequest dap_step_in_request = 316; + DapStepOutRequest dap_step_out_request = 317; + DapStepBackRequest dap_step_back_request = 318; + DapContinueRequest dap_continue_request = 319; + DapContinueResponse dap_continue_response = 320; + DapPauseRequest dap_pause_request = 321; + DapDisconnectRequest dap_disconnect_request = 322; + DapTerminateThreadsRequest dap_terminate_threads_request = 323; + DapTerminateRequest dap_terminate_request = 324; + DapRestartRequest dap_restart_request = 325; + UpdateThreadStatus update_thread_status = 326; + VariablesRequest variables_request = 327; + DapVariables dap_variables = 328; + DapRestartStackFrameRequest dap_restart_stack_frame_request = 329; + IgnoreBreakpointState ignore_breakpoint_state = 330; + ToggleIgnoreBreakpoints toggle_ignore_breakpoints = 331; + DapModulesRequest dap_modules_request = 332; + DapModulesResponse dap_modules_response = 333; + DapLoadedSourcesRequest dap_loaded_sources_request = 334; + DapLoadedSourcesResponse dap_loaded_sources_response = 335; + DapStackTraceRequest dap_stack_trace_request = 336; + DapStackTraceResponse dap_stack_trace_response = 337; + DapScopesRequest dap_scopes_request = 338; + DapScopesResponse dap_scopes_response = 339; + DapSetVariableValueRequest dap_set_variable_value_request = 340; + DapSetVariableValueResponse dap_set_variable_value_response = 341; + DapEvaluateRequest dap_evaluate_request = 342; + DapEvaluateResponse dap_evaluate_response = 343; + DapCompletionRequest dap_completion_request = 344; + DapCompletionResponse dap_completion_response = 345; + DapThreadsRequest dap_threads_request = 346; + DapThreadsResponse dap_threads_response = 347;// current max } reserved 87 to 88; @@ -3413,3 +3419,46 @@ message OpenCommitMessageBuffer { uint64 worktree_id = 2; uint64 work_directory_id = 3; } + +message Push { + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 work_directory_id = 3; + string remote_name = 4; + string branch_name = 5; + optional PushOptions options = 6; + + enum PushOptions { + SET_UPSTREAM = 0; + FORCE = 1; + } +} + +message Fetch { + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 work_directory_id = 3; +} + +message GetRemotes { + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 work_directory_id = 3; + optional string branch_name = 4; +} + +message GetRemotesResponse { + repeated Remote remotes = 1; + + message Remote { + string name = 1; + } +} + +message Pull { + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 work_directory_id = 3; + string remote_name = 4; + string branch_name = 5; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 7dcc61a777316c..3c8726f0deb150 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -467,6 +467,11 @@ messages!( (GitShow, Background), (GitCommitDetails, Background), (SetIndexText, Background), + (Push, Background), + (Fetch, Background), + (GetRemotes, Background), + (GetRemotesResponse, Background), + (Pull, Background), (VariablesRequest, Background), (DapVariables, Background), (IgnoreBreakpointState, Background), @@ -621,6 +626,10 @@ request_messages!( (GitReset, Ack), (GitCheckoutFiles, Ack), (SetIndexText, Ack), + (Push, Ack), + (Fetch, Ack), + (GetRemotes, GetRemotesResponse), + (Pull, Ack), (DapNextRequest, Ack), (DapStepInRequest, Ack), (DapStepOutRequest, Ack), @@ -739,6 +748,10 @@ entity_messages!( GitReset, GitCheckoutFiles, SetIndexText, + Push, + Fetch, + GetRemotes, + Pull, SynchronizeBreakpoints, SetActiveDebugLine, RemoveActiveDebugLine, diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 886f2815ff5f56..26a84e352d94f1 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2946,6 +2946,7 @@ impl ToOffset for Point { } impl ToOffset for usize { + #[track_caller] fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { assert!( *self <= snapshot.len(), diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index a046961c1ef06f..1b9bc1033e9991 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -330,14 +330,6 @@ impl Theme { } } -/// Compounds a color with an alpha value. -/// TODO: Replace this with a method on Hsla. -pub fn color_alpha(color: Hsla, alpha: f32) -> Hsla { - let mut color = color; - color.a = alpha; - color -} - /// Asynchronously reads the user theme from the specified path. pub async fn read_user_theme(theme_path: &Path, fs: Arc) -> Result { let reader = fs.open_sync(theme_path).await?; diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index c4bba05287c2e3..47514ab22438a5 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -125,6 +125,7 @@ impl IconSize { pub enum IconName { Ai, AiAnthropic, + AiBedrock, AiAnthropicHosted, AiDeepSeek, AiGoogle, diff --git a/crates/ui/src/components/keybinding_hint.rs b/crates/ui/src/components/keybinding_hint.rs index 9df64be5f6a507..7b9d5538485a19 100644 --- a/crates/ui/src/components/keybinding_hint.rs +++ b/crates/ui/src/components/keybinding_hint.rs @@ -1,7 +1,8 @@ +use crate::KeyBinding; use crate::{h_flex, prelude::*}; -use crate::{ElevationIndex, KeyBinding}; -use gpui::{point, AnyElement, App, BoxShadow, IntoElement, Window}; +use gpui::{point, AnyElement, App, BoxShadow, FontStyle, Hsla, IntoElement, Window}; use smallvec::smallvec; +use theme::Appearance; /// Represents a hint for a keybinding, optionally with a prefix and suffix. /// @@ -23,7 +24,7 @@ pub struct KeybindingHint { suffix: Option, keybinding: KeyBinding, size: Option, - elevation: Option, + background_color: Hsla, } impl KeybindingHint { @@ -37,15 +38,15 @@ impl KeybindingHint { /// ``` /// use ui::prelude::*; /// - /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+C")); + /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+C"), Hsla::new(0.0, 0.0, 0.0, 1.0)); /// ``` - pub fn new(keybinding: KeyBinding) -> Self { + pub fn new(keybinding: KeyBinding, background_color: Hsla) -> Self { Self { prefix: None, suffix: None, keybinding, size: None, - elevation: None, + background_color, } } @@ -59,15 +60,19 @@ impl KeybindingHint { /// ``` /// use ui::prelude::*; /// - /// let hint = KeybindingHint::with_prefix("Copy:", KeyBinding::from_str("Ctrl+C")); + /// let hint = KeybindingHint::with_prefix("Copy:", KeyBinding::from_str("Ctrl+C"), Hsla::new(0.0, 0.0, 0.0, 1.0)); /// ``` - pub fn with_prefix(prefix: impl Into, keybinding: KeyBinding) -> Self { + pub fn with_prefix( + prefix: impl Into, + keybinding: KeyBinding, + background_color: Hsla, + ) -> Self { Self { prefix: Some(prefix.into()), suffix: None, keybinding, size: None, - elevation: None, + background_color, } } @@ -81,15 +86,19 @@ impl KeybindingHint { /// ``` /// use ui::prelude::*; /// - /// let hint = KeybindingHint::with_suffix(KeyBinding::from_str("Ctrl+V"), "Paste"); + /// let hint = KeybindingHint::with_suffix(KeyBinding::from_str("Ctrl+V"), "Paste", Hsla::new(0.0, 0.0, 0.0, 1.0)); /// ``` - pub fn with_suffix(keybinding: KeyBinding, suffix: impl Into) -> Self { + pub fn with_suffix( + keybinding: KeyBinding, + suffix: impl Into, + background_color: Hsla, + ) -> Self { Self { prefix: None, suffix: Some(suffix.into()), keybinding, size: None, - elevation: None, + background_color, } } @@ -143,46 +152,37 @@ impl KeybindingHint { self.size = size.into(); self } - - /// Sets the elevation of the keybinding hint. - /// - /// This method allows specifying the elevation index for the keybinding hint, - /// which affects its visual appearance in terms of depth or layering. - /// - /// # Examples - /// - /// ``` - /// use ui::prelude::*; - /// - /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+A")) - /// .elevation(ElevationIndex::new(1)); - /// ``` - pub fn elevation(mut self, elevation: impl Into>) -> Self { - self.elevation = elevation.into(); - self - } } impl RenderOnce for KeybindingHint { fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { let colors = cx.theme().colors().clone(); + let is_light = cx.theme().appearance() == Appearance::Light; + + let border_color = + self.background_color + .blend(colors.text.alpha(if is_light { 0.08 } else { 0.16 })); + let bg_color = + self.background_color + .blend(colors.text.alpha(if is_light { 0.06 } else { 0.12 })); + let shadow_color = colors.text.alpha(if is_light { 0.04 } else { 0.08 }); let size = self .size .unwrap_or(TextSize::Small.rems(cx).to_pixels(window.rem_size())); let kb_size = size - px(2.0); - let kb_bg = if let Some(elevation) = self.elevation { - elevation.on_elevation_bg(cx) - } else { - theme::color_alpha(colors.element_background, 0.6) - }; - h_flex() - .items_center() + let mut base = h_flex(); + + base.text_style() + .get_or_insert_with(Default::default) + .font_style = Some(FontStyle::Italic); + + base.items_center() .gap_0p5() .font_buffer(cx) .text_size(size) - .text_color(colors.text_muted) + .text_color(colors.text_disabled) .children(self.prefix) .child( h_flex() @@ -191,10 +191,10 @@ impl RenderOnce for KeybindingHint { .px_0p5() .mr_0p5() .border_1() - .border_color(kb_bg) - .bg(kb_bg.opacity(0.8)) + .border_color(border_color) + .bg(bg_color) .shadow(smallvec![BoxShadow { - color: cx.theme().colors().editor_background.opacity(0.8), + color: shadow_color, offset: point(px(0.), px(1.)), blur_radius: px(0.), spread_radius: px(0.), @@ -212,6 +212,8 @@ impl ComponentPreview for KeybindingHint { let enter = KeyBinding::for_action(&menu::Confirm, window, cx) .unwrap_or(KeyBinding::new(enter_fallback, cx)); + let bg_color = cx.theme().colors().surface_background; + v_flex() .gap_6() .children(vec![ @@ -220,17 +222,17 @@ impl ComponentPreview for KeybindingHint { vec![ single_example( "With Prefix", - KeybindingHint::with_prefix("Go to Start:", enter.clone()) + KeybindingHint::with_prefix("Go to Start:", enter.clone(), bg_color) .into_any_element(), ), single_example( "With Suffix", - KeybindingHint::with_suffix(enter.clone(), "Go to End") + KeybindingHint::with_suffix(enter.clone(), "Go to End", bg_color) .into_any_element(), ), single_example( "With Prefix and Suffix", - KeybindingHint::new(enter.clone()) + KeybindingHint::new(enter.clone(), bg_color) .prefix("Confirm:") .suffix("Execute selected action") .into_any_element(), @@ -242,21 +244,21 @@ impl ComponentPreview for KeybindingHint { vec![ single_example( "Small", - KeybindingHint::new(enter.clone()) + KeybindingHint::new(enter.clone(), bg_color) .size(Pixels::from(12.0)) .prefix("Small:") .into_any_element(), ), single_example( "Medium", - KeybindingHint::new(enter.clone()) + KeybindingHint::new(enter.clone(), bg_color) .size(Pixels::from(16.0)) .suffix("Medium") .into_any_element(), ), single_example( "Large", - KeybindingHint::new(enter.clone()) + KeybindingHint::new(enter.clone(), bg_color) .size(Pixels::from(20.0)) .prefix("Large:") .suffix("Size") @@ -264,41 +266,6 @@ impl ComponentPreview for KeybindingHint { ), ], ), - example_group_with_title( - "Elevations", - vec![ - single_example( - "Surface", - KeybindingHint::new(enter.clone()) - .elevation(ElevationIndex::Surface) - .prefix("Surface:") - .into_any_element(), - ), - single_example( - "Elevated Surface", - KeybindingHint::new(enter.clone()) - .elevation(ElevationIndex::ElevatedSurface) - .suffix("Elevated") - .into_any_element(), - ), - single_example( - "Editor Surface", - KeybindingHint::new(enter.clone()) - .elevation(ElevationIndex::EditorSurface) - .prefix("Editor:") - .suffix("Surface") - .into_any_element(), - ), - single_example( - "Modal Surface", - KeybindingHint::new(enter.clone()) - .elevation(ElevationIndex::ModalSurface) - .prefix("Modal:") - .suffix("Enter") - .into_any_element(), - ), - ], - ), ]) .into_any_element() } diff --git a/crates/ui/src/styles/elevation.rs b/crates/ui/src/styles/elevation.rs index f12a16e91ddc9b..aea91c8d5fd749 100644 --- a/crates/ui/src/styles/elevation.rs +++ b/crates/ui/src/styles/elevation.rs @@ -2,7 +2,7 @@ use std::fmt::{self, Display, Formatter}; use gpui::{hsla, point, px, App, BoxShadow, Hsla}; use smallvec::{smallvec, SmallVec}; -use theme::ActiveTheme; +use theme::{ActiveTheme, Appearance}; /// Today, elevation is primarily used to add shadows to elements, and set the correct background for elements like buttons. /// @@ -40,27 +40,37 @@ impl Display for ElevationIndex { impl ElevationIndex { /// Returns an appropriate shadow for the given elevation index. - pub fn shadow(self) -> SmallVec<[BoxShadow; 2]> { + pub fn shadow(self, cx: &App) -> SmallVec<[BoxShadow; 2]> { + let is_light = cx.theme().appearance() == Appearance::Light; + match self { ElevationIndex::Surface => smallvec![], ElevationIndex::EditorSurface => smallvec![], - ElevationIndex::ElevatedSurface => smallvec![BoxShadow { - color: hsla(0., 0., 0., 0.12), - offset: point(px(0.), px(2.)), - blur_radius: px(3.), - spread_radius: px(0.), - }], + ElevationIndex::ElevatedSurface => smallvec![ + BoxShadow { + color: hsla(0., 0., 0., 0.12), + offset: point(px(0.), px(2.)), + blur_radius: px(3.), + spread_radius: px(0.), + }, + BoxShadow { + color: hsla(0., 0., 0., if is_light { 0.03 } else { 0.06 }), + offset: point(px(1.), px(1.)), + blur_radius: px(0.), + spread_radius: px(0.), + } + ], ElevationIndex::ModalSurface => smallvec![ BoxShadow { - color: hsla(0., 0., 0., 0.12), + color: hsla(0., 0., 0., if is_light { 0.06 } else { 0.12 }), offset: point(px(0.), px(2.)), blur_radius: px(3.), spread_radius: px(0.), }, BoxShadow { - color: hsla(0., 0., 0., 0.08), + color: hsla(0., 0., 0., if is_light { 0.06 } else { 0.08 }), offset: point(px(0.), px(3.)), blur_radius: px(6.), spread_radius: px(0.), @@ -71,6 +81,12 @@ impl ElevationIndex { blur_radius: px(12.), spread_radius: px(0.), }, + BoxShadow { + color: hsla(0., 0., 0., if is_light { 0.04 } else { 0.12 }), + offset: point(px(1.), px(1.)), + blur_radius: px(0.), + spread_radius: px(0.), + }, ], _ => smallvec![], diff --git a/crates/ui/src/traits/styled_ext.rs b/crates/ui/src/traits/styled_ext.rs index 48a515afd79343..76da92d0046bf0 100644 --- a/crates/ui/src/traits/styled_ext.rs +++ b/crates/ui/src/traits/styled_ext.rs @@ -8,13 +8,13 @@ fn elevated(this: E, cx: &App, index: ElevationIndex) -> E { .rounded_lg() .border_1() .border_color(cx.theme().colors().border_variant) - .shadow(index.shadow()) + .shadow(index.shadow(cx)) } fn elevated_borderless(this: E, cx: &mut App, index: ElevationIndex) -> E { this.bg(cx.theme().colors().elevated_surface_background) .rounded_lg() - .shadow(index.shadow()) + .shadow(index.shadow(cx)) } /// Extends [`gpui::Styled`] with Zed-specific styling methods. diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index a6b827c8cf2bb5..09551a35cd9540 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -58,18 +58,18 @@ impl Vim { self.update_editor(window, cx, |vim, editor, window, cx| { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); - let mut original_positions: HashMap<_, _> = Default::default(); + let mut start_positions: HashMap<_, _> = Default::default(); editor.change_selections(None, window, cx, |s| { s.move_with(|map, selection| { - let original_position = (selection.head(), selection.goal); object.expand_selection(map, selection, around); - original_positions.insert(selection.id, original_position); + let start_position = (selection.start, selection.goal); + start_positions.insert(selection.id, start_position); }); }); vim.yank_selections_content(editor, false, cx); editor.change_selections(None, window, cx, |s| { s.move_with(|_, selection| { - let (head, goal) = original_positions.remove(&selection.id).unwrap(); + let (head, goal) = start_positions.remove(&selection.id).unwrap(); selection.collapse_to(head, goal); }); }); diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 0251f4d564d720..689763f8b42628 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -303,6 +303,9 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, |vim, _: &Quotes, window, cx| { vim.object(Object::Quotes, window, cx) }); + Vim::action(editor, cx, |vim, _: &BackQuotes, window, cx| { + vim.object(Object::BackQuotes, window, cx) + }); Vim::action(editor, cx, |vim, _: &AnyQuotes, window, cx| { vim.object(Object::AnyQuotes, window, cx) }); @@ -557,9 +560,6 @@ impl Object { if let Some(range) = self.range(map, selection.clone(), around) { selection.start = range.start; selection.end = range.end; - if !around && self.is_multiline() { - preserve_indented_newline(map, selection); - } true } else { false @@ -567,50 +567,6 @@ impl Object { } } -/// Returns a range without the final newline char. -/// -/// If the selection spans multiple lines and is preceded by an opening brace (`{`), -/// this function will trim the selection to exclude the final newline -/// in order to preserve a properly indented line. -pub fn preserve_indented_newline(map: &DisplaySnapshot, selection: &mut Selection) { - let (start_point, end_point) = (selection.start.to_point(map), selection.end.to_point(map)); - - if start_point.row == end_point.row { - return; - } - - let start_offset = selection.start.to_offset(map, Bias::Left); - let mut pos = start_offset; - - while pos > 0 { - pos -= 1; - let current_char = map.buffer_chars_at(pos).next().map(|(ch, _)| ch); - - match current_char { - Some(ch) if !ch.is_whitespace() => break, - Some('\n') if pos > 0 => { - let prev_char = map.buffer_chars_at(pos - 1).next().map(|(ch, _)| ch); - if prev_char == Some('{') { - let end_pos = selection.end.to_offset(map, Bias::Left); - for (ch, offset) in map.reverse_buffer_chars_at(end_pos) { - match ch { - '\n' => { - selection.end = offset.to_display_point(map); - selection.reversed = true; - break; - } - ch if !ch.is_whitespace() => break, - _ => continue, - } - } - } - break; - } - _ => continue, - } - } -} - /// Returns a range that surrounds the word `relative_to` is in. /// /// If `relative_to` is at the start of a word, return the word. @@ -1515,38 +1471,37 @@ fn surrounding_markers( } } - if !around && search_across_lines { - // Handle trailing newline after opening - if let Some((ch, range)) = movement::chars_after(map, opening.end).next() { - if ch == '\n' { - opening.end = range.end; - - // After newline, skip leading whitespace - let mut chars = movement::chars_after(map, opening.end).peekable(); - while let Some((ch, range)) = chars.peek() { - if !ch.is_whitespace() { - break; - } - opening.end = range.end; - chars.next(); + // Adjust selection to remove leading and trailing whitespace for multiline inner brackets + if !around && open_marker != close_marker { + let start_point = opening.end.to_display_point(map); + let end_point = closing.start.to_display_point(map); + let start_offset = start_point.to_offset(map, Bias::Left); + let end_offset = end_point.to_offset(map, Bias::Left); + + if start_point.row() != end_point.row() + && map + .buffer_chars_at(start_offset) + .take_while(|(_, offset)| offset < &end_offset) + .any(|(ch, _)| !ch.is_whitespace()) + { + let mut first_non_ws = None; + let mut last_non_ws = None; + for (ch, offset) in map.buffer_chars_at(start_offset) { + if !ch.is_whitespace() { + first_non_ws = Some(offset); + break; } } - } - - // Handle leading whitespace before closing - let mut last_newline_end = None; - for (ch, range) in movement::chars_before(map, closing.start) { - if !ch.is_whitespace() { - break; + for (ch, offset) in map.reverse_buffer_chars_at(end_offset) { + if !ch.is_whitespace() { + last_non_ws = Some(offset + ch.len_utf8()); + break; + } } - if ch == '\n' { - last_newline_end = Some(range.end); - break; + if let Some(start) = first_non_ws { + opening.end = start; } - } - // Adjust closing.start to exclude whitespace after a newline, if present - if let Some(end) = last_newline_end { - if end > opening.end { + if let Some(end) = last_non_ws { closing.start = end; } } @@ -1901,10 +1856,10 @@ mod test { cx.assert_state( indoc! { "func empty(a string) bool { - «ˇif a == \"\" { + «if a == \"\" { return true } - return false» + return falseˇ» }" }, Mode::Visual, @@ -1926,7 +1881,7 @@ mod test { indoc! { "func empty(a string) bool { if a == \"\" { - «ˇreturn true» + «return trueˇ» } return false }" @@ -1950,7 +1905,7 @@ mod test { indoc! { "func empty(a string) bool { if a == \"\" { - «ˇreturn true» + «return trueˇ» } return false }" @@ -1973,14 +1928,33 @@ mod test { cx.assert_state( indoc! { "func empty(a string) bool { - «ˇif a == \"\" { + «if a == \"\" { return true } - return false» + return falseˇ» }" }, Mode::Visual, ); + + cx.set_state( + indoc! { + "func empty(a string) bool { + if a == \"\" { + ˇ + + }" + }, + Mode::Normal, + ); + cx.simulate_keystrokes("c i {"); + cx.assert_state( + indoc! { + "func empty(a string) bool { + if a == \"\" {ˇ}" + }, + Mode::Insert, + ); } #[gpui::test] @@ -2600,7 +2574,6 @@ mod test { #[gpui::test] async fn test_anybrackets_trailing_space(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; - cx.set_shared_state("(trailingˇ whitespace )") .await; cx.simulate_shared_keystrokes("v i b").await; diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 1a4cda5e2241b7..a9e4dd9767fc48 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -16,7 +16,7 @@ use workspace::searchable::Direction; use crate::{ motion::{first_non_whitespace, next_line_end, start_of_line, Motion}, - object::{self, Object}, + object::Object, state::{Mode, Operator}, Vim, }; @@ -375,9 +375,6 @@ impl Vim { } else { selection.end = range.end; } - if !around && object.is_multiline() { - object::preserve_indented_newline(map, selection); - } } // In the visual selection result of a paragraph object, the cursor is diff --git a/crates/vim/test_data/test_multiline_surrounding_character_objects.json b/crates/vim/test_data/test_multiline_surrounding_character_objects.json deleted file mode 100644 index c61b7b9145b94e..00000000000000 --- a/crates/vim/test_data/test_multiline_surrounding_character_objects.json +++ /dev/null @@ -1,20 +0,0 @@ -{"Put":{"state":"func empty(a string) bool {\n if a == \"\" {\n return true\n }\n ˇreturn false\n}"}} -{"Key":"v"} -{"Key":"i"} -{"Key":"{"} -{"Get":{"state":"func empty(a string) bool {\n «ˇif a == \"\" {\n return true\n }\n return false»\n}","mode":"Visual"}} -{"Put":{"state":"func empty(a string) bool {\n if a == \"\" {\n ˇreturn true\n }\n return false\n}"}} -{"Key":"v"} -{"Key":"i"} -{"Key":"{"} -{"Get":{"state":"func empty(a string) bool {\n if a == \"\" {\n «ˇreturn true»\n }\n return false\n}","mode":"Visual"}} -{"Put":{"state":"func empty(a string) bool {\n if a == \"\" ˇ{\n return true\n }\n return false\n}"}} -{"Key":"v"} -{"Key":"i"} -{"Key":"{"} -{"Get":{"state":"func empty(a string) bool {\n if a == \"\" {\n «ˇreturn true»\n }\n return false\n}","mode":"Visual"}} -{"Put":{"state":"func empty(a string) bool {\n if a == \"\" {\n return true\n }\n return false\nˇ}"}} -{"Key":"v"} -{"Key":"i"} -{"Key":"{"} -{"Get":{"state":"func empty(a string) bool {\n «ˇif a == \"\" {\n return true\n }\n return false»\n}","mode":"Visual"}} diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 402567cf8c6e80..8dcb4d43a9d90a 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -20,7 +20,7 @@ use futures::{ }; use fuzzy::CharBag; use git::{ - repository::{Branch, GitRepository, RepoPath}, + repository::{Branch, GitRepository, RepoPath, UpstreamTrackingStatus}, status::{ FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode, }, @@ -202,21 +202,25 @@ pub struct RepositoryEntry { pub(crate) statuses_by_path: SumTree, work_directory_id: ProjectEntryId, pub work_directory: WorkDirectory, - pub(crate) branch: Option, + pub(crate) current_branch: Option, pub current_merge_conflicts: TreeSet, } -impl Deref for RepositoryEntry { - type Target = WorkDirectory; +impl RepositoryEntry { + pub fn relativize(&self, path: &Path) -> Result { + self.work_directory.relativize(path) + } - fn deref(&self) -> &Self::Target { - &self.work_directory + pub fn unrelativize(&self, path: &RepoPath) -> Option> { + self.work_directory.unrelativize(path) + } + + pub fn directory_contains(&self, path: impl AsRef) -> bool { + self.work_directory.directory_contains(path) } -} -impl RepositoryEntry { pub fn branch(&self) -> Option<&Branch> { - self.branch.as_ref() + self.current_branch.as_ref() } pub fn work_directory_id(&self) -> ProjectEntryId { @@ -244,8 +248,11 @@ impl RepositoryEntry { pub fn initial_update(&self) -> proto::RepositoryEntry { proto::RepositoryEntry { work_directory_id: self.work_directory_id.to_proto(), - branch: self.branch.as_ref().map(|branch| branch.name.to_string()), - branch_summary: self.branch.as_ref().map(branch_to_proto), + branch: self + .current_branch + .as_ref() + .map(|branch| branch.name.to_string()), + branch_summary: self.current_branch.as_ref().map(branch_to_proto), updated_statuses: self .statuses_by_path .iter() @@ -304,8 +311,11 @@ impl RepositoryEntry { proto::RepositoryEntry { work_directory_id: self.work_directory_id.to_proto(), - branch: self.branch.as_ref().map(|branch| branch.name.to_string()), - branch_summary: self.branch.as_ref().map(branch_to_proto), + branch: self + .current_branch + .as_ref() + .map(|branch| branch.name.to_string()), + branch_summary: self.current_branch.as_ref().map(branch_to_proto), updated_statuses, removed_statuses, current_merge_conflicts: self @@ -329,7 +339,7 @@ pub fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch { ref_name: upstream.ref_name.to_string(), tracking: upstream .tracking - .as_ref() + .status() .map(|upstream| proto::UpstreamTracking { ahead: upstream.ahead as u64, behind: upstream.behind as u64, @@ -355,12 +365,16 @@ pub fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch { .as_ref() .map(|upstream| git::repository::Upstream { ref_name: upstream.ref_name.to_string().into(), - tracking: upstream.tracking.as_ref().map(|tracking| { - git::repository::UpstreamTracking { - ahead: tracking.ahead as u32, - behind: tracking.behind as u32, - } - }), + tracking: upstream + .tracking + .as_ref() + .map(|tracking| { + git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus { + ahead: tracking.ahead as u32, + behind: tracking.behind as u32, + }) + }) + .unwrap_or(git::repository::UpstreamTracking::Gone), }), most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| { git::repository::CommitSummary { @@ -2682,7 +2696,8 @@ impl Snapshot { self.repositories .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| { - repo.branch = repository.branch_summary.as_ref().map(proto_to_branch); + repo.current_branch = + repository.branch_summary.as_ref().map(proto_to_branch); repo.statuses_by_path.edit(edits, &()); repo.current_merge_conflicts = conflicted_paths }); @@ -2704,7 +2719,7 @@ impl Snapshot { work_directory: WorkDirectory::InProject { relative_path: work_dir_entry.path.clone(), }, - branch: repository.branch_summary.as_ref().map(proto_to_branch), + current_branch: repository.branch_summary.as_ref().map(proto_to_branch), statuses_by_path: statuses, current_merge_conflicts: conflicted_paths, }, @@ -2823,7 +2838,7 @@ impl Snapshot { pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> { self.repositories .iter() - .filter(|repo| repo.work_directory.directory_contains(path)) + .filter(|repo| repo.directory_contains(path)) .last() } @@ -3506,7 +3521,7 @@ impl BackgroundScannerState { RepositoryEntry { work_directory_id: work_dir_id, work_directory: work_directory.clone(), - branch: None, + current_branch: None, statuses_by_path: Default::default(), current_merge_conflicts: Default::default(), }, @@ -5472,6 +5487,9 @@ impl BackgroundScanner { }, &(), ); + if status.is_conflicted() { + repository.current_merge_conflicts.insert(repo_path.clone()); + } if let Some(path) = project_path { changed_paths.push(path); @@ -5577,7 +5595,7 @@ fn update_branches( let mut repository = snapshot .repository(repository.work_directory.path_key()) .context("Missing repository")?; - repository.branch = branches.into_iter().find(|branch| branch.is_head); + repository.current_branch = branches.into_iter().find(|branch| branch.is_head); let mut state = state.lock(); state @@ -6009,7 +6027,13 @@ impl<'a> GitTraversal<'a> { }; // Update our state if we changed repositories. - if reset || self.repo_location.as_ref().map(|(prev_repo, _)| prev_repo) != Some(&repo) { + if reset + || self + .repo_location + .as_ref() + .map(|(prev_repo, _)| &prev_repo.work_directory) + != Some(&repo.work_directory) + { self.repo_location = Some((repo, repo.statuses_by_path.cursor::(&()))); } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 405c1b752bb67f..cd2c2e051d5b69 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -26,7 +26,7 @@ use std::{ sync::Arc, time::Duration, }; -use util::{test::TempTree, ResultExt}; +use util::{path, test::TempTree, ResultExt}; #[gpui::test] async fn test_traversal(cx: &mut TestAppContext) { @@ -1650,7 +1650,7 @@ async fn test_random_worktree_operations_during_initial_scan( .map(|o| o.parse().unwrap()) .unwrap_or(20); - let root_dir = Path::new("/test"); + let root_dir = Path::new(path!("/test")); let fs = FakeFs::new(cx.background_executor.clone()) as Arc; fs.as_fake().insert_tree(root_dir, json!({})).await; for _ in 0..initial_entries { @@ -1741,7 +1741,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) .map(|o| o.parse().unwrap()) .unwrap_or(20); - let root_dir = Path::new("/test"); + let root_dir = Path::new(path!("/test")); let fs = FakeFs::new(cx.background_executor.clone()) as Arc; fs.as_fake().insert_tree(root_dir, json!({})).await; for _ in 0..initial_entries { diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 4ff92e6f8e4a46..671b3d0b14bb7a 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -438,7 +438,7 @@ fn main() { cx, ); supermaven::init(app_state.client.clone(), cx); - language_model::init(cx); + language_model::init(app_state.client.clone(), cx); language_models::init( app_state.user_store.clone(), app_state.client.clone(), diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index d8199e0076b243..6d2dfb6cabf050 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4232,6 +4232,7 @@ mod tests { app_state.languages.add(markdown_language()); + gpui_tokio::init(cx); vim_mode_setting::init(cx); theme::init(theme::LoadThemes::JustBase, cx); audio::init((), cx); @@ -4255,7 +4256,7 @@ mod tests { cx, ); image_viewer::init(cx); - language_model::init(cx); + language_model::init(app_state.client.clone(), cx); language_models::init( app_state.user_store.clone(), app_state.client.clone(), diff --git a/crates/zed/src/zed/inline_completion_registry.rs b/crates/zed/src/zed/inline_completion_registry.rs index 3e5107d1c373d9..edd1624033a540 100644 --- a/crates/zed/src/zed/inline_completion_registry.rs +++ b/crates/zed/src/zed/inline_completion_registry.rs @@ -9,7 +9,7 @@ use settings::SettingsStore; use std::{cell::RefCell, rc::Rc, sync::Arc}; use supermaven::{Supermaven, SupermavenCompletionProvider}; use ui::Window; -use zeta::ProviderDataCollection; +use zeta::{ProviderDataCollection, ZetaInlineCompletionProvider}; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let editors: Rc, AnyWindowHandle>>> = Rc::default(); @@ -225,7 +225,9 @@ fn assign_edit_prediction_provider( let singleton_buffer = editor.buffer().read(cx).as_singleton(); match provider { - EditPredictionProvider::None => {} + EditPredictionProvider::None => { + editor.set_edit_prediction_provider::(None, window, cx); + } EditPredictionProvider::Copilot => { if let Some(copilot) = Copilot::global(cx) { if let Some(buffer) = singleton_buffer { @@ -265,7 +267,7 @@ fn assign_edit_prediction_provider( } let zeta = zeta::Zeta::register( - Some(cx.entity()), + editor.workspace().map(|w| w.downgrade()), worktree, client.clone(), user_store, diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index e453fc4922e9ae..e5cdba5be28b9c 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -87,46 +87,21 @@ impl Render for QuickActionBar { return div().id("empty quick action bar"); }; - let ( - selection_menu_enabled, - inlay_hints_enabled, - supports_inlay_hints, - inline_diagnostics_enabled, - supports_inline_diagnostics, - git_blame_inline_enabled, - show_git_blame_gutter, - auto_signature_help_enabled, - show_inline_completions, - inline_completion_enabled, - ) = { - let supports_inlay_hints = - editor.update(cx, |editor, cx| editor.supports_inlay_hints(cx)); - let editor = editor.read(cx); - let selection_menu_enabled = editor.selection_menu_enabled(cx); - let inlay_hints_enabled = editor.inlay_hints_enabled(); - let show_inline_diagnostics = editor.show_inline_diagnostics(); - let supports_inline_diagnostics = editor.inline_diagnostics_enabled(); - let git_blame_inline_enabled = editor.git_blame_inline_enabled(); - let show_git_blame_gutter = editor.show_git_blame_gutter(); - let auto_signature_help_enabled = editor.auto_signature_help_enabled(cx); - let show_edit_predictions = editor.edit_predictions_enabled(); - let inline_completion_enabled = editor.inline_completions_enabled(cx); - - ( - selection_menu_enabled, - inlay_hints_enabled, - supports_inlay_hints, - show_inline_diagnostics, - supports_inline_diagnostics, - git_blame_inline_enabled, - show_git_blame_gutter, - auto_signature_help_enabled, - show_edit_predictions, - inline_completion_enabled, - ) - }; - - let focus_handle = editor.read(cx).focus_handle(cx); + let supports_inlay_hints = editor.update(cx, |editor, cx| editor.supports_inlay_hints(cx)); + let editor_value = editor.read(cx); + let selection_menu_enabled = editor_value.selection_menu_enabled(cx); + let inlay_hints_enabled = editor_value.inlay_hints_enabled(); + let inline_diagnostics_enabled = editor_value.show_inline_diagnostics(); + let supports_inline_diagnostics = editor_value.inline_diagnostics_enabled(); + let git_blame_inline_enabled = editor_value.git_blame_inline_enabled(); + let show_git_blame_gutter = editor_value.show_git_blame_gutter(); + let auto_signature_help_enabled = editor_value.auto_signature_help_enabled(cx); + let has_edit_prediction_provider = editor_value.edit_prediction_provider().is_some(); + let show_edit_predictions = editor_value.edit_predictions_enabled(); + let edit_predictions_enabled_at_cursor = + editor_value.edit_predictions_enabled_at_cursor(cx); + + let focus_handle = editor_value.focus_handle(cx); let search_button = editor.is_singleton(cx).then(|| { QuickActionBarButton::new( @@ -328,33 +303,35 @@ impl Render for QuickActionBar { }, ); - let mut inline_completion_entry = ContextMenuEntry::new("Edit Predictions") - .toggleable(IconPosition::Start, inline_completion_enabled && show_inline_completions) - .disabled(!inline_completion_enabled) - .action(Some( - editor::actions::ToggleEditPrediction.boxed_clone(), - )).handler({ - let editor = editor.clone(); - move |window, cx| { - editor - .update(cx, |editor, cx| { - editor.toggle_inline_completions( - &editor::actions::ToggleEditPrediction, - window, - cx, - ); - }) - .ok(); - } - }); - if !inline_completion_enabled { - inline_completion_entry = inline_completion_entry.documentation_aside(|_| { - Label::new("You can't toggle edit predictions for this file as it is within the excluded files list.").into_any_element() - }); + if has_edit_prediction_provider { + let mut inline_completion_entry = ContextMenuEntry::new("Edit Predictions") + .toggleable(IconPosition::Start, edit_predictions_enabled_at_cursor && show_edit_predictions) + .disabled(!edit_predictions_enabled_at_cursor) + .action(Some( + editor::actions::ToggleEditPrediction.boxed_clone(), + )).handler({ + let editor = editor.clone(); + move |window, cx| { + editor + .update(cx, |editor, cx| { + editor.toggle_edit_predictions( + &editor::actions::ToggleEditPrediction, + window, + cx, + ); + }) + .ok(); + } + }); + if !edit_predictions_enabled_at_cursor { + inline_completion_entry = inline_completion_entry.documentation_aside(|_| { + Label::new("You can't toggle edit predictions for this file as it is within the excluded files list.").into_any_element() + }); + } + + menu = menu.item(inline_completion_entry); } - menu = menu.item(inline_completion_entry); - menu = menu.separator(); menu = menu.toggleable_entry( diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index 515624962a6b94..6621417b1885f6 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -33,7 +33,7 @@ http_client.workspace = true indoc.workspace = true inline_completion.workspace = true language.workspace = true -language_models.workspace = true +language_model.workspace = true log.workspace = true menu.workspace = true migrator.workspace = true diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 82fd0e9991d19f..7b91a5a09fe7de 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -9,7 +9,6 @@ mod rate_completion_modal; pub(crate) use completion_diff_element::*; use db::kvp::KEY_VALUE_STORE; -use editor::Editor; pub use init::*; use inline_completion::DataCollectionState; pub use license_detection::is_license_eligible_for_data_collection; @@ -24,14 +23,14 @@ use collections::{HashMap, HashSet, VecDeque}; use futures::AsyncReadExt; use gpui::{ actions, App, AppContext as _, AsyncApp, Context, Entity, EntityId, Global, SemanticVersion, - Subscription, Task, + Subscription, Task, WeakEntity, }; use http_client::{HttpClient, Method}; use input_excerpt::excerpt_for_cursor_position; use language::{ text_diff, Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, ToOffset, ToPoint, }; -use language_models::LlmApiToken; +use language_model::{LlmApiToken, RefreshLlmTokenListener}; use postage::watch; use project::Project; use release_channel::AppVersion; @@ -186,7 +185,7 @@ impl std::fmt::Debug for InlineCompletion { } pub struct Zeta { - editor: Option>, + workspace: Option>, client: Arc, events: VecDeque, registered_buffers: HashMap, @@ -209,14 +208,14 @@ impl Zeta { } pub fn register( - editor: Option>, + workspace: Option>, worktree: Option>, client: Arc, user_store: Entity, cx: &mut App, ) -> Entity { let this = Self::global(cx).unwrap_or_else(|| { - let entity = cx.new(|cx| Self::new(editor, client, user_store, cx)); + let entity = cx.new(|cx| Self::new(workspace, client, user_store, cx)); cx.set_global(ZetaGlobal(entity.clone())); entity }); @@ -239,18 +238,18 @@ impl Zeta { } fn new( - editor: Option>, + workspace: Option>, client: Arc, user_store: Entity, cx: &mut Context, ) -> Self { - let refresh_llm_token_listener = language_models::RefreshLlmTokenListener::global(cx); + let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); let data_collection_choice = Self::load_data_collection_choices(); let data_collection_choice = cx.new(|_| data_collection_choice); Self { - editor, + workspace, client, events: VecDeque::new(), shown_completions: VecDeque::new(), @@ -705,10 +704,7 @@ and then another can_collect_data: bool, cx: &mut Context, ) -> Task>> { - let workspace = self - .editor - .as_ref() - .and_then(|editor| editor.read(cx).workspace()); + let workspace = self.workspace.as_ref().and_then(|w| w.upgrade()); self.request_completion_impl( workspace, project, @@ -1649,7 +1645,6 @@ mod tests { use http_client::FakeHttpClient; use indoc::indoc; use language::Point; - use language_models::RefreshLlmTokenListener; use rpc::proto; use settings::SettingsStore; diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index ab252ba1a1d685..7e8de04fcb24e1 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -36,7 +36,7 @@ The following configuration can be used to change the inlay hint settings for `r } ``` -See [Inlay Hints](https://rust-analyzer.github.io/manual.html#inlay-hints) in the Rust Analyzer Manual for more information. +See [Inlay Hints](https://rust-analyzer.github.io/book/features.html#inlay-hints) in the Rust Analyzer Manual for more information. ## Target directory @@ -101,7 +101,7 @@ This `"path"` has to be an absolute path. TBD: Is it possible to specify RUSTFLAGS? https://github.com/zed-industries/zed/issues/14334 --> -Rust-analyzer [manual](https://rust-analyzer.github.io/manual.html) describes various features and configuration options for rust-analyzer language server. +Rust-analyzer [manual](https://rust-analyzer.github.io/book/) describes various features and configuration options for rust-analyzer language server. Rust-analyzer in Zed runs with the default parameters. ### Large projects and performance @@ -129,7 +129,7 @@ While that works fine on small projects, it does not scale well. The alternatives would be to use [tasks](../tasks.md), as Zed already provides a `cargo check --workspace --all-targets` task and the ability to cmd/ctrl-click on the terminal output to navigate to the error, and limit or turn off the check on save feature entirely. -Check on save feature is responsible for returning part of the diagnostics based on cargo check output, so turning it off will limit rust-analyzer with its own [diagnostics](https://rust-analyzer.github.io/manual.html#diagnostics). +Check on save feature is responsible for returning part of the diagnostics based on cargo check output, so turning it off will limit rust-analyzer with its own [diagnostics](https://rust-analyzer.github.io/book/diagnostics.html). Consider more `rust-analyzer.cargo.` and `rust-analyzer.check.` and `rust-analyzer.diagnostics.` settings from the manual for more fine-grained configuration. Here's a snippet for Zed settings.json (the language server will restart automatically after the `lsp.rust-analyzer` section is edited and saved): diff --git a/script/bundle-mac b/script/bundle-mac index 0828d2552312bf..e943dfb8efaf59 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -118,7 +118,7 @@ mv Cargo.toml.backup Cargo.toml popd echo "Bundled ${app_path}" -if [[ -n "${MACOS_CERTIFICATE:-}" && -n "${MACOS_CERTIFICATE_PASSWORD:-}" && -n "${APPLE_NOTARIZATION_USERNAME:-}" && -n "${APPLE_NOTARIZATION_PASSWORD:-}" ]]; then +if [[ -n "${MACOS_CERTIFICATE:-}" && -n "${MACOS_CERTIFICATE_PASSWORD:-}" && -n "${APPLE_NOTARIZATION_KEY:-}" && -n "${APPLE_NOTARIZATION_KEY_ID:-}" && -n "${APPLE_NOTARIZATION_ISSUER_ID:-}" ]]; then can_code_sign=true echo "Setting up keychain for code signing..." @@ -247,7 +247,7 @@ function sign_app_binaries() { /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${app_path}/Contents/MacOS/zed" -v /usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${app_path}" -v else - echo "One or more of the following variables are missing: MACOS_CERTIFICATE, MACOS_CERTIFICATE_PASSWORD, APPLE_NOTARIZATION_USERNAME, APPLE_NOTARIZATION_PASSWORD" + echo "One or more of the following variables are missing: MACOS_CERTIFICATE, MACOS_CERTIFICATE_PASSWORD, APPLE_NOTARIZATION_KEY, APPLE_NOTARIZATION_KEY_ID, APPLE_NOTARIZATION_ISSUER_ID" if [[ "$local_only" = false ]]; then echo "To create a self-signed local build use ./scripts/build.sh -ldf" exit 1 @@ -311,23 +311,7 @@ function sign_app_binaries() { rm -rf ${dmg_source_directory} mkdir -p ${dmg_source_directory} mv "${app_path}" "${dmg_source_directory}" - - if [[ $can_code_sign = true ]]; then - echo "Creating temporary DMG at ${dmg_file_path} using ${dmg_source_directory} to notarize app bundle" - hdiutil create -volname Zed -srcfolder "${dmg_source_directory}" -ov -format UDZO "${dmg_file_path}" - - echo "Code-signing DMG" - /usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "$(pwd)/${dmg_file_path}" -v - - echo "Notarizing DMG with Apple" - "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTARIZATION_TEAM" "${dmg_file_path}" - - echo "Removing temporary DMG (used only for notarization)" - rm "${dmg_file_path}" - - echo "Stapling notarization ticket to ${dmg_source_directory}/${bundle_name}" - "${xcode_bin_dir_path}/stapler" staple "${dmg_source_directory}/${bundle_name}" - fi + notarization_key_file=$(mktemp) echo "Adding symlink to /Applications to ${dmg_source_directory}" ln -s /Applications ${dmg_source_directory} @@ -347,7 +331,9 @@ function sign_app_binaries() { if [[ $can_code_sign = true ]]; then echo "Notarizing DMG with Apple" /usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "$(pwd)/${dmg_file_path}" -v - "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTARIZATION_TEAM" "${dmg_file_path}" + echo "$APPLE_NOTARIZATION_KEY" > "$notarization_key_file" + "${xcode_bin_dir_path}/notarytool" submit --wait --key "$notarization_key_file" --key-id "$APPLE_NOTARIZATION_KEY_ID" --issuer "$APPLE_NOTARIZATION_ISSUER_ID" "${dmg_file_path}" + rm "$notarization_key_file" "${xcode_bin_dir_path}/stapler" staple "${dmg_file_path}" fi diff --git a/script/generate-licenses b/script/generate-licenses index 09126e8ad6e18c..368f63b7c03413 100755 --- a/script/generate-licenses +++ b/script/generate-licenses @@ -6,15 +6,17 @@ CARGO_ABOUT_VERSION="0.6.6" OUTPUT_FILE="${1:-$(pwd)/assets/licenses.md}" TEMPLATE_FILE="script/licenses/template.md.hbs" -> $OUTPUT_FILE +echo -n "" > "$OUTPUT_FILE" -echo -e "# ###### THEME LICENSES ######\n" >> $OUTPUT_FILE -cat assets/themes/LICENSES >> $OUTPUT_FILE +{ + echo -e "# ###### THEME LICENSES ######\n" + cat assets/themes/LICENSES -echo -e "\n# ###### ICON LICENSES ######\n" >> $OUTPUT_FILE -cat assets/icons/LICENSES >> $OUTPUT_FILE + echo -e "\n# ###### ICON LICENSES ######\n" + cat assets/icons/LICENSES -echo -e "\n# ###### CODE LICENSES ######\n" >> $OUTPUT_FILE + echo -e "\n# ###### CODE LICENSES ######\n" +} >> "$OUTPUT_FILE" if ! cargo install --list | grep "cargo-about v$CARGO_ABOUT_VERSION" > /dev/null; then echo "Installing cargo-about@$CARGO_ABOUT_VERSION..." @@ -28,14 +30,14 @@ echo "Generating cargo licenses" cargo about generate \ --fail \ -c script/licenses/zed-licenses.toml \ - "${TEMPLATE_FILE}" >> $OUTPUT_FILE - -sed -i.bak 's/"/"/g' $OUTPUT_FILE -sed -i.bak 's/'/'\''/g' $OUTPUT_FILE # The ` '\'' ` thing ends the string, appends a single quote, and re-opens the string -sed -i.bak 's/=/=/g' $OUTPUT_FILE -sed -i.bak 's/`/`/g' $OUTPUT_FILE -sed -i.bak 's/<//g' $OUTPUT_FILE + "$TEMPLATE_FILE" >> "$OUTPUT_FILE" + +sed -i.bak 's/"/"/g' "$OUTPUT_FILE" +sed -i.bak 's/'/'\''/g' "$OUTPUT_FILE" # The ` '\'' ` thing ends the string, appends a single quote, and re-opens the string +sed -i.bak 's/=/=/g' "$OUTPUT_FILE" +sed -i.bak 's/`/`/g' "$OUTPUT_FILE" +sed -i.bak 's/<//g' "$OUTPUT_FILE" rm -rf "${OUTPUT_FILE}.bak" diff --git a/script/generate-licenses-csv b/script/generate-licenses-csv index cce97a11a61625..75d988462c22c1 100755 --- a/script/generate-licenses-csv +++ b/script/generate-licenses-csv @@ -18,8 +18,8 @@ echo "Generating cargo licenses" cargo about generate \ --fail \ -c script/licenses/zed-licenses.toml \ - script/licenses/template.csv.hbs \ + "$TEMPLATE_FILE" \ | awk 'NR==1{print;next} NF{print | "sort"}' \ - > $OUTPUT_FILE + > "$OUTPUT_FILE" echo "generate-licenses-csv completed. See $OUTPUT_FILE"