diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yml b/.github/ISSUE_TEMPLATE/1_bug_report.yml
index 783a1ec05e0ca5..a2064a0157a4d1 100644
--- a/.github/ISSUE_TEMPLATE/1_bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/1_bug_report.yml
@@ -10,16 +10,39 @@ body:
value: |
-
+ SUMMARY_SENTENCE_HERE
+
+
+
+
Steps to trigger the problem:
1.
2.
3.
+ 4.
Actual Behavior:
Expected Behavior:
+
+
validations:
required: true
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 419127b6aea9fe..912f19e4978b50 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -109,8 +109,16 @@ jobs:
- name: cargo clippy
run: ./script/clippy
+ - name: Install cargo-machete
+ uses: clechasseur/rs-cargo@v2
+ with:
+ command: install
+ args: cargo-machete@0.7.0
+
- name: Check unused dependencies
- uses: bnjbvr/cargo-machete@main
+ uses: clechasseur/rs-cargo@v2
+ with:
+ command: machete
- name: Check licenses
run: |
@@ -298,8 +306,9 @@ jobs:
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
- APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
- APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
+ APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+ APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+ APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml
index 64c719e7de14cc..fa9ec83e7e1dc0 100644
--- a/.github/workflows/release_nightly.yml
+++ b/.github/workflows/release_nightly.yml
@@ -62,8 +62,9 @@ jobs:
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
- APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
- APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
+ APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
+ APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
+ APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
diff --git a/Cargo.lock b/Cargo.lock
index c2371d6efa4ecd..916237c84a41b8 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -394,7 +394,6 @@ dependencies = [
"language",
"language_model",
"language_model_selector",
- "language_models",
"languages",
"log",
"lsp",
@@ -462,7 +461,6 @@ dependencies = [
"language",
"language_model",
"language_model_selector",
- "language_models",
"log",
"lsp",
"markdown",
@@ -517,7 +515,6 @@ dependencies = [
"language",
"language_model",
"language_model_selector",
- "language_models",
"languages",
"log",
"multi_buffer",
@@ -1268,6 +1265,30 @@ dependencies = [
"uuid",
]
+[[package]]
+name = "aws-sdk-bedrockruntime"
+version = "1.74.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6938541d1948a543bca23303fec4cff9c36bf0e63b8fa3ae1b337bcb9d5b81af"
+dependencies = [
+ "aws-credential-types",
+ "aws-runtime",
+ "aws-smithy-async",
+ "aws-smithy-eventstream",
+ "aws-smithy-http",
+ "aws-smithy-json",
+ "aws-smithy-runtime",
+ "aws-smithy-runtime-api",
+ "aws-smithy-types",
+ "aws-types",
+ "bytes 1.10.0",
+ "fastrand 2.3.0",
+ "http 0.2.12",
+ "once_cell",
+ "regex-lite",
+ "tracing",
+]
+
[[package]]
name = "aws-sdk-kinesis"
version = "1.61.0"
@@ -1597,6 +1618,17 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "aws_http_client"
+version = "0.1.0"
+dependencies = [
+ "aws-smithy-runtime-api",
+ "aws-smithy-types",
+ "futures 0.3.31",
+ "http_client",
+ "tokio",
+]
+
[[package]]
name = "axum"
version = "0.6.20"
@@ -1726,6 +1758,22 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
+[[package]]
+name = "bedrock"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "aws-sdk-bedrockruntime",
+ "aws-smithy-types",
+ "futures 0.3.31",
+ "schemars",
+ "serde",
+ "serde_json",
+ "strum",
+ "thiserror 1.0.69",
+ "tokio",
+]
+
[[package]]
name = "bigdecimal"
version = "0.4.7"
@@ -3114,7 +3162,9 @@ dependencies = [
"clock",
"collections",
"command_palette_hooks",
+ "ctor",
"editor",
+ "env_logger 0.11.6",
"fs",
"futures 0.3.31",
"gpui",
@@ -3122,6 +3172,7 @@ dependencies = [
"indoc",
"inline_completion",
"language",
+ "log",
"lsp",
"menu",
"node_runtime",
@@ -5412,6 +5463,7 @@ dependencies = [
"pretty_assertions",
"regex",
"rope",
+ "schemars",
"serde",
"serde_json",
"smol",
@@ -7076,17 +7128,14 @@ dependencies = [
"anthropic",
"anyhow",
"base64 0.22.1",
+ "client",
"collections",
- "deepseek",
"futures 0.3.31",
"google_ai",
"gpui",
"http_client",
"image",
- "lmstudio",
"log",
- "mistral",
- "ollama",
"open_ai",
"parking_lot",
"proto",
@@ -7095,6 +7144,7 @@ dependencies = [
"serde_json",
"smol",
"strum",
+ "telemetry_events",
"thiserror 1.0.69",
"ui",
"util",
@@ -7121,6 +7171,10 @@ version = "0.1.0"
dependencies = [
"anthropic",
"anyhow",
+ "aws-config",
+ "aws-credential-types",
+ "aws_http_client",
+ "bedrock",
"client",
"collections",
"copilot",
@@ -7132,6 +7186,7 @@ dependencies = [
"futures 0.3.31",
"google_ai",
"gpui",
+ "gpui_tokio",
"http_client",
"language_model",
"lmstudio",
@@ -7147,10 +7202,9 @@ dependencies = [
"settings",
"smol",
"strum",
- "telemetry_events",
"theme",
- "thiserror 1.0.69",
"tiktoken-rs",
+ "tokio",
"ui",
"util",
]
@@ -17263,7 +17317,7 @@ dependencies = [
"indoc",
"inline_completion",
"language",
- "language_models",
+ "language_model",
"log",
"menu",
"migrator",
diff --git a/Cargo.toml b/Cargo.toml
index 3949f1a08cc086..43e2f0532a6265 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -15,6 +15,8 @@ members = [
"crates/audio",
"crates/auto_update",
"crates/auto_update_ui",
+ "crates/aws_http_client",
+ "crates/bedrock",
"crates/breadcrumbs",
"crates/buffer_diff",
"crates/call",
@@ -222,6 +224,8 @@ assistant_tools = { path = "crates/assistant_tools" }
audio = { path = "crates/audio" }
auto_update = { path = "crates/auto_update" }
auto_update_ui = { path = "crates/auto_update_ui" }
+aws_http_client = { path = "crates/aws_http_client" }
+bedrock = { path = "crates/bedrock" }
breadcrumbs = { path = "crates/breadcrumbs" }
call = { path = "crates/call" }
channel = { path = "crates/channel" }
@@ -390,6 +394,11 @@ async-trait = "0.1"
async-tungstenite = "0.28"
async-watch = "0.3.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
+aws-config = { version = "1.5.16", features = ["behavior-version-latest"] }
+aws-credential-types = { version = "1.2.1", features = ["hardcoded-credentials"] }
+aws-sdk-bedrockruntime = { version = "1.73.0", features = ["behavior-version-latest"] }
+aws-smithy-runtime-api = { version = "1.7.3", features = ["http-1x", "client"] }
+aws-smithy-types = { version = "1.2.13", features = ["http-body-1-x"] }
base64 = "0.22"
bitflags = "2.6.0"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
diff --git a/assets/icons/ai_bedrock.svg b/assets/icons/ai_bedrock.svg
new file mode 100644
index 00000000000000..2b672c364ea42e
--- /dev/null
+++ b/assets/icons/ai_bedrock.svg
@@ -0,0 +1,4 @@
+
+
diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json
index 1eb646c311d538..1b4a3c9523747c 100644
--- a/assets/keymaps/default-linux.json
+++ b/assets/keymaps/default-linux.json
@@ -184,9 +184,9 @@
"ctrl-alt-/": "assistant::ToggleModelSelector",
"ctrl-k h": "assistant::DeployHistory",
"ctrl-k l": "assistant::DeployPromptLibrary",
- "new": "assistant::NewContext",
- "ctrl-t": "assistant::NewContext",
- "ctrl-n": "assistant::NewContext"
+ "new": "assistant::NewChat",
+ "ctrl-t": "assistant::NewChat",
+ "ctrl-n": "assistant::NewChat"
}
},
{
@@ -368,7 +368,12 @@
"ctrl-\\": "pane::SplitRight",
"ctrl-k v": "markdown::OpenPreviewToTheSide",
"ctrl-shift-v": "markdown::OpenPreview",
- "ctrl-alt-shift-c": "editor::DisplayCursorNames"
+ "ctrl-alt-shift-c": "editor::DisplayCursorNames",
+ "ctrl-alt-y": "git::ToggleStaged",
+ "alt-y": "git::StageAndNext",
+ "alt-shift-y": "git::UnstageAndNext",
+ "alt-.": "editor::GoToHunk",
+ "alt-,": "editor::GoToPrevHunk"
}
},
{
@@ -705,12 +710,6 @@
"space": "project_panel::Open"
}
},
- {
- "context": "GitPanel && !CommitEditor",
- "bindings": {
- "escape": "git_panel::Close"
- }
- },
{
"context": "GitPanel && ChangesList",
"bindings": {
@@ -722,19 +721,36 @@
"ctrl-shift-space": "git::UnstageAll",
"tab": "git_panel::FocusEditor",
"shift-tab": "git_panel::FocusEditor",
- "escape": "git_panel::ToggleFocus"
+ "escape": "git_panel::ToggleFocus",
+ "ctrl-enter": "git::Commit",
+ "alt-enter": "menu::SecondaryConfirm"
+ }
+ },
+ {
+ "context": "GitCommit > Editor",
+ "bindings": {
+ "enter": "editor::Newline",
+ "ctrl-enter": "git::Commit"
}
},
{
"context": "GitPanel > Editor",
"bindings": {
"escape": "git_panel::FocusChanges",
- "ctrl-enter": "git::Commit",
"tab": "git_panel::FocusChanges",
"shift-tab": "git_panel::FocusChanges",
+ "ctrl-enter": "git::Commit",
"alt-up": "git_panel::FocusChanges"
}
},
+ {
+ "context": "GitCommit > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "editor::Newline",
+ "ctrl-enter": "git::Commit"
+ }
+ },
{
"context": "CollabPanel && not_editing",
"bindings": {
@@ -813,6 +829,7 @@
"pagedown": ["terminal::SendKeystroke", "pagedown"],
"escape": ["terminal::SendKeystroke", "escape"],
"enter": ["terminal::SendKeystroke", "enter"],
+ "ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
"shift-pageup": "terminal::ScrollPageUp",
"shift-pagedown": "terminal::ScrollPageDown",
diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json
index 3c10d6fa003bc2..63cd8f348b5add 100644
--- a/assets/keymaps/default-macos.json
+++ b/assets/keymaps/default-macos.json
@@ -211,8 +211,8 @@
"cmd-alt-/": "assistant::ToggleModelSelector",
"cmd-k h": "assistant::DeployHistory",
"cmd-k l": "assistant::DeployPromptLibrary",
- "cmd-t": "assistant::NewContext",
- "cmd-n": "assistant::NewContext"
+ "cmd-t": "assistant::NewChat",
+ "cmd-n": "assistant::NewChat"
}
},
{
@@ -751,22 +751,22 @@
}
},
{
- "context": "GitCommit > Editor",
+ "context": "GitPanel > Editor",
"use_key_equivalents": true,
"bindings": {
"enter": "editor::Newline",
- "cmd-enter": "git::Commit"
+ "cmd-enter": "git::Commit",
+ "tab": "git_panel::FocusChanges",
+ "shift-tab": "git_panel::FocusChanges",
+ "alt-up": "git_panel::FocusChanges"
}
},
{
- "context": "GitPanel > Editor",
+ "context": "GitCommit > Editor",
"use_key_equivalents": true,
"bindings": {
"enter": "editor::Newline",
- "cmd-enter": "git::Commit",
- "tab": "git_panel::FocusChanges",
- "shift-tab": "git_panel::FocusChanges",
- "alt-up": "git_panel::FocusChanges"
+ "cmd-enter": "git::Commit"
}
},
{
diff --git a/assets/keymaps/linux/emacs.json b/assets/keymaps/linux/emacs.json
index 2c1128d8d66922..cf1afd309c8497 100755
--- a/assets/keymaps/linux/emacs.json
+++ b/assets/keymaps/linux/emacs.json
@@ -48,6 +48,8 @@
"ctrl-_": "editor::Undo", // undo
"ctrl-/": "editor::Undo", // undo
"ctrl-x u": "editor::Undo", // undo
+ "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph
+ "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph
"ctrl-v": "editor::MovePageDown", // scroll-up
"alt-v": "editor::MovePageUp", // scroll-down
"ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer
diff --git a/assets/keymaps/macos/emacs.json b/assets/keymaps/macos/emacs.json
index 2c1128d8d66922..cf1afd309c8497 100755
--- a/assets/keymaps/macos/emacs.json
+++ b/assets/keymaps/macos/emacs.json
@@ -48,6 +48,8 @@
"ctrl-_": "editor::Undo", // undo
"ctrl-/": "editor::Undo", // undo
"ctrl-x u": "editor::Undo", // undo
+ "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph
+ "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph
"ctrl-v": "editor::MovePageDown", // scroll-up
"alt-v": "editor::MovePageUp", // scroll-down
"ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer
diff --git a/assets/settings/default.json b/assets/settings/default.json
index b6f3c71a8e4964..1c103c026857b3 100644
--- a/assets/settings/default.json
+++ b/assets/settings/default.json
@@ -1093,6 +1093,7 @@
"tab_size": 2
},
"Diff": {
+ "show_edit_predictions": false,
"remove_trailing_whitespace_on_save": false,
"ensure_final_newline_on_save": false
},
diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json
index 958cf4797dc6f4..f1a6b5c519ba2f 100644
--- a/assets/themes/gruvbox/gruvbox.json
+++ b/assets/themes/gruvbox/gruvbox.json
@@ -379,7 +379,7 @@
"font_weight": null
},
"variable": {
- "color": "#83a598ff",
+ "color": "#ebdbb2ff",
"font_style": null,
"font_weight": null
},
@@ -767,7 +767,7 @@
"font_weight": null
},
"variable": {
- "color": "#83a598ff",
+ "color": "#ebdbb2ff",
"font_style": null,
"font_weight": null
},
@@ -1155,7 +1155,7 @@
"font_weight": null
},
"variable": {
- "color": "#83a598ff",
+ "color": "#ebdbb2ff",
"font_style": null,
"font_weight": null
},
@@ -1543,7 +1543,7 @@
"font_weight": null
},
"variable": {
- "color": "#066578ff",
+ "color": "#282828ff",
"font_style": null,
"font_weight": null
},
@@ -1931,7 +1931,7 @@
"font_weight": null
},
"variable": {
- "color": "#066578ff",
+ "color": "#282828ff",
"font_style": null,
"font_weight": null
},
@@ -2319,7 +2319,7 @@
"font_weight": null
},
"variable": {
- "color": "#066578ff",
+ "color": "#282828ff",
"font_style": null,
"font_weight": null
},
diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json
index 9d7a29cd957783..3c7421c04ec39c 100644
--- a/assets/themes/one/one.json
+++ b/assets/themes/one/one.json
@@ -365,7 +365,7 @@
"font_weight": null
},
"variable": {
- "color": "#dce0e5ff",
+ "color": "#acb2beff",
"font_style": null,
"font_weight": null
},
diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs
index 8100cbe5c86ceb..c64d621143c29d 100644
--- a/crates/anthropic/src/anthropic.rs
+++ b/crates/anthropic/src/anthropic.rs
@@ -30,6 +30,8 @@ pub enum Model {
#[default]
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
Claude3_5Sonnet,
+ #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
+ Claude3_7Sonnet,
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
Claude3_5Haiku,
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
@@ -59,6 +61,8 @@ impl Model {
pub fn from_id(id: &str) -> Result {
if id.starts_with("claude-3-5-sonnet") {
Ok(Self::Claude3_5Sonnet)
+ } else if id.starts_with("claude-3-7-sonnet") {
+ Ok(Self::Claude3_7Sonnet)
} else if id.starts_with("claude-3-5-haiku") {
Ok(Self::Claude3_5Haiku)
} else if id.starts_with("claude-3-opus") {
@@ -75,6 +79,7 @@ impl Model {
pub fn id(&self) -> &str {
match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
+ Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
Model::Claude3Opus => "claude-3-opus-latest",
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
@@ -85,6 +90,7 @@ impl Model {
pub fn display_name(&self) -> &str {
match self {
+ Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3_5Haiku => "Claude 3.5 Haiku",
Self::Claude3Opus => "Claude 3 Opus",
@@ -98,13 +104,14 @@ impl Model {
pub fn cache_configuration(&self) -> Option {
match self {
- Self::Claude3_5Sonnet | Self::Claude3_5Haiku | Self::Claude3Haiku => {
- Some(AnthropicModelCacheConfiguration {
- min_total_token: 2_048,
- should_speculate: true,
- max_cache_anchors: 4,
- })
- }
+ Self::Claude3_5Sonnet
+ | Self::Claude3_5Haiku
+ | Self::Claude3_7Sonnet
+ | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
+ min_total_token: 2_048,
+ should_speculate: true,
+ max_cache_anchors: 4,
+ }),
Self::Custom {
cache_configuration,
..
@@ -117,6 +124,7 @@ impl Model {
match self {
Self::Claude3_5Sonnet
| Self::Claude3_5Haiku
+ | Self::Claude3_7Sonnet
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => 200_000,
@@ -127,7 +135,7 @@ impl Model {
pub fn max_output_tokens(&self) -> u32 {
match self {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
- Self::Claude3_5Sonnet | Self::Claude3_5Haiku => 8_192,
+ Self::Claude3_5Sonnet | Self::Claude3_7Sonnet | Self::Claude3_5Haiku => 8_192,
Self::Custom {
max_output_tokens, ..
} => max_output_tokens.unwrap_or(4_096),
@@ -137,6 +145,7 @@ impl Model {
pub fn default_temperature(&self) -> f32 {
match self {
Self::Claude3_5Sonnet
+ | Self::Claude3_7Sonnet
| Self::Claude3_5Haiku
| Self::Claude3Opus
| Self::Claude3Sonnet
diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml
index 1bb7cbc1ae1163..7df6ff10ac4295 100644
--- a/crates/assistant/Cargo.toml
+++ b/crates/assistant/Cargo.toml
@@ -43,7 +43,6 @@ indoc.workspace = true
language.workspace = true
language_model.workspace = true
language_model_selector.workspace = true
-language_models.workspace = true
log.workspace = true
lsp.workspace = true
menu.workspace = true
diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs
index 7817c958b395c8..3c2c1db9974541 100644
--- a/crates/assistant/src/assistant.rs
+++ b/crates/assistant/src/assistant.rs
@@ -33,7 +33,7 @@ actions!(
[
InsertActivePrompt,
DeployHistory,
- NewContext,
+ NewChat,
CycleNextInlineAssist,
CyclePreviousInlineAssist
]
diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs
index e0791e003937ac..d40640246d6c23 100644
--- a/crates/assistant/src/assistant_panel.rs
+++ b/crates/assistant/src/assistant_panel.rs
@@ -1,6 +1,6 @@
use crate::assistant_configuration::{ConfigurationView, ConfigurationViewEvent};
use crate::{
- terminal_inline_assistant::TerminalInlineAssistant, DeployHistory, InlineAssistant, NewContext,
+ terminal_inline_assistant::TerminalInlineAssistant, DeployHistory, InlineAssistant, NewChat,
};
use anyhow::{anyhow, Result};
use assistant_context_editor::{
@@ -129,7 +129,7 @@ impl AssistantPanel {
workspace.project().clone(),
Default::default(),
None,
- NewContext.boxed_clone(),
+ NewChat.boxed_clone(),
window,
cx,
);
@@ -228,12 +228,12 @@ impl AssistantPanel {
IconButton::new("new-chat", IconName::Plus)
.icon_size(IconSize::Small)
.on_click(cx.listener(|_, _, window, cx| {
- window.dispatch_action(NewContext.boxed_clone(), cx)
+ window.dispatch_action(NewChat.boxed_clone(), cx)
}))
.tooltip(move |window, cx| {
Tooltip::for_action_in(
"New Chat",
- &NewContext,
+ &NewChat,
&focus_handle,
window,
cx,
@@ -256,7 +256,7 @@ impl AssistantPanel {
let focus_handle = _pane.focus_handle(cx);
Some(ContextMenu::build(window, cx, move |menu, _, _| {
menu.context(focus_handle.clone())
- .action("New Chat", Box::new(NewContext))
+ .action("New Chat", Box::new(NewChat))
.action("History", Box::new(DeployHistory))
.action("Prompt Library", Box::new(DeployPromptLibrary))
.action("Configure", Box::new(ShowConfiguration))
@@ -760,7 +760,7 @@ impl AssistantPanel {
pub fn create_new_context(
workspace: &mut Workspace,
- _: &NewContext,
+ _: &NewChat,
window: &mut Window,
cx: &mut Context,
) {
@@ -1206,7 +1206,7 @@ impl Render for AssistantPanel {
v_flex()
.key_context("AssistantPanel")
.size_full()
- .on_action(cx.listener(|this, _: &NewContext, window, cx| {
+ .on_action(cx.listener(|this, _: &NewChat, window, cx| {
this.new_context(window, cx);
}))
.on_action(cx.listener(|this, _: &ShowConfiguration, window, cx| {
diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs
index eb154ea0209a9c..02ae9a2333d502 100644
--- a/crates/assistant/src/inline_assistant.rs
+++ b/crates/assistant/src/inline_assistant.rs
@@ -32,11 +32,10 @@ use gpui::{
};
use language::{line_diff, Buffer, IndentKind, Point, Selection, TransactionId};
use language_model::{
- LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
- LanguageModelTextStream, Role,
+ report_assistant_event, LanguageModel, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, LanguageModelTextStream, Role,
};
use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu};
-use language_models::report_assistant_event;
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use project::{CodeAction, ProjectTransaction};
diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs
index e8b049371b2c1f..87b5f4bbda654e 100644
--- a/crates/assistant/src/terminal_inline_assistant.rs
+++ b/crates/assistant/src/terminal_inline_assistant.rs
@@ -16,10 +16,10 @@ use gpui::{
};
use language::Buffer;
use language_model::{
- LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
+ report_assistant_event, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, Role,
};
use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu};
-use language_models::report_assistant_event;
use prompt_library::PromptBuilder;
use settings::{update_settings_file, Settings};
use std::{
diff --git a/crates/assistant2/Cargo.toml b/crates/assistant2/Cargo.toml
index 9a74a5e2fec07b..13116c2ab5f022 100644
--- a/crates/assistant2/Cargo.toml
+++ b/crates/assistant2/Cargo.toml
@@ -46,7 +46,6 @@ itertools.workspace = true
language.workspace = true
language_model.workspace = true
language_model_selector.workspace = true
-language_models.workspace = true
log.workspace = true
lsp.workspace = true
markdown.workspace = true
diff --git a/crates/assistant2/src/buffer_codegen.rs b/crates/assistant2/src/buffer_codegen.rs
index 4e62f9549dc820..a07b288f71dd26 100644
--- a/crates/assistant2/src/buffer_codegen.rs
+++ b/crates/assistant2/src/buffer_codegen.rs
@@ -9,10 +9,9 @@ use futures::{channel::mpsc, future::LocalBoxFuture, join, SinkExt, Stream, Stre
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription, Task};
use language::{line_diff, Buffer, IndentKind, Point, TransactionId};
use language_model::{
- LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
- LanguageModelTextStream, Role,
+ report_assistant_event, LanguageModel, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, LanguageModelTextStream, Role,
};
-use language_models::report_assistant_event;
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use prompt_library::PromptBuilder;
diff --git a/crates/assistant2/src/inline_assistant.rs b/crates/assistant2/src/inline_assistant.rs
index c307b47cb14294..a7f7b1601de030 100644
--- a/crates/assistant2/src/inline_assistant.rs
+++ b/crates/assistant2/src/inline_assistant.rs
@@ -24,8 +24,7 @@ use gpui::{
UpdateGlobal, WeakEntity, Window,
};
use language::{Buffer, Point, Selection, TransactionId};
-use language_model::LanguageModelRegistry;
-use language_models::report_assistant_event;
+use language_model::{report_assistant_event, LanguageModelRegistry};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use project::{CodeAction, ProjectTransaction};
diff --git a/crates/assistant2/src/terminal_codegen.rs b/crates/assistant2/src/terminal_codegen.rs
index c9b6a541080449..5a1873fe556dfa 100644
--- a/crates/assistant2/src/terminal_codegen.rs
+++ b/crates/assistant2/src/terminal_codegen.rs
@@ -2,8 +2,7 @@ use crate::inline_prompt_editor::CodegenStatus;
use client::telemetry::Telemetry;
use futures::{channel::mpsc, SinkExt, StreamExt};
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task};
-use language_model::{LanguageModelRegistry, LanguageModelRequest};
-use language_models::report_assistant_event;
+use language_model::{report_assistant_event, LanguageModelRegistry, LanguageModelRequest};
use std::{sync::Arc, time::Instant};
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
use terminal::Terminal;
diff --git a/crates/assistant2/src/terminal_inline_assistant.rs b/crates/assistant2/src/terminal_inline_assistant.rs
index 9abe2cbadb3338..788ccc9ecacc8e 100644
--- a/crates/assistant2/src/terminal_inline_assistant.rs
+++ b/crates/assistant2/src/terminal_inline_assistant.rs
@@ -13,9 +13,9 @@ use fs::Fs;
use gpui::{App, Entity, Focusable, Global, Subscription, UpdateGlobal, WeakEntity};
use language::Buffer;
use language_model::{
- LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
+ report_assistant_event, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, Role,
};
-use language_models::report_assistant_event;
use prompt_library::PromptBuilder;
use std::sync::Arc;
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
diff --git a/crates/assistant2/src/thread.rs b/crates/assistant2/src/thread.rs
index 9ccb0664807bf5..7eeb13f8726e42 100644
--- a/crates/assistant2/src/thread.rs
+++ b/crates/assistant2/src/thread.rs
@@ -10,9 +10,9 @@ use gpui::{App, Context, EventEmitter, SharedString, Task};
use language_model::{
LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolUse,
- LanguageModelToolUseId, MessageContent, Role, StopReason,
+ LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
+ Role, StopReason,
};
-use language_models::provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError};
use serde::{Deserialize, Serialize};
use util::{post_inc, TryFutureExt as _};
use uuid::Uuid;
diff --git a/crates/assistant_context_editor/Cargo.toml b/crates/assistant_context_editor/Cargo.toml
index aebadc4ca9d029..0feb0543bf85f0 100644
--- a/crates/assistant_context_editor/Cargo.toml
+++ b/crates/assistant_context_editor/Cargo.toml
@@ -30,7 +30,6 @@ indexed_docs.workspace = true
language.workspace = true
language_model.workspace = true
language_model_selector.workspace = true
-language_models.workspace = true
log.workspace = true
multi_buffer.workspace = true
open_ai.workspace = true
diff --git a/crates/assistant_context_editor/src/context.rs b/crates/assistant_context_editor/src/context.rs
index 7b72c4c04de8c4..d6447572bc7e03 100644
--- a/crates/assistant_context_editor/src/context.rs
+++ b/crates/assistant_context_editor/src/context.rs
@@ -19,13 +19,10 @@ use gpui::{
};
use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
use language_model::{
- LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
- LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
- LanguageModelToolUseId, MessageContent, Role, StopReason,
-};
-use language_models::{
- provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
- report_assistant_event,
+ report_assistant_event, LanguageModel, LanguageModelCacheConfiguration,
+ LanguageModelCompletionEvent, LanguageModelImage, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, LanguageModelToolUseId, MaxMonthlySpendReachedError,
+ MessageContent, PaymentRequiredError, Role, StopReason,
};
use open_ai::Model as OpenAiModel;
use paths::contexts_dir;
diff --git a/crates/assistant_context_editor/src/context_editor.rs b/crates/assistant_context_editor/src/context_editor.rs
index e3be2eec77f98d..12a522f56a36a1 100644
--- a/crates/assistant_context_editor/src/context_editor.rs
+++ b/crates/assistant_context_editor/src/context_editor.rs
@@ -1234,8 +1234,8 @@ impl ContextEditor {
.px_1()
.mr_0p5()
.border_1()
- .border_color(theme::color_alpha(colors.border_variant, 0.6))
- .bg(theme::color_alpha(colors.element_background, 0.6))
+ .border_color(colors.border_variant.alpha(0.6))
+ .bg(colors.element_background.alpha(0.6))
.child("esc"),
)
.child("to cancel")
@@ -1514,15 +1514,11 @@ impl ContextEditor {
(!text.is_empty()).then_some((text, true))
} else {
- let anchor = context_editor.selections.newest_anchor();
- let text = context_editor
- .buffer()
- .read(cx)
- .read(cx)
- .text_for_range(anchor.range())
- .collect::();
+ let selection = context_editor.selections.newest_adjusted(cx);
+ let buffer = context_editor.buffer().read(cx).snapshot(cx);
+ let selected_text = buffer.text_for_range(selection.range()).collect::();
- (!text.is_empty()).then_some((text, false))
+ (!selected_text.is_empty()).then_some((selected_text, false))
}
})
}
@@ -1777,23 +1773,16 @@ impl ContextEditor {
&mut self,
cx: &mut Context,
) -> (String, CopyMetadata, Vec>) {
- let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| {
- let mut selection = editor.selections.newest::(cx);
+ let (selection, creases) = self.editor.update(cx, |editor, cx| {
+ let mut selection = editor.selections.newest_adjusted(cx);
let snapshot = editor.buffer().read(cx).snapshot(cx);
- let is_entire_line = selection.is_empty() || editor.selections.line_mode;
- if is_entire_line {
- selection.start = Point::new(selection.start.row, 0);
- selection.end =
- cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0));
- selection.goal = SelectionGoal::None;
- }
+ selection.goal = SelectionGoal::None;
let selection_start = snapshot.point_to_offset(selection.start);
(
- snapshot.clone(),
- selection.clone(),
+ selection.map(|point| snapshot.point_to_offset(point)),
editor.display_map.update(cx, |display_map, cx| {
display_map
.snapshot(cx)
@@ -1833,7 +1822,6 @@ impl ContextEditor {
)
});
- let selection = selection.map(|point| snapshot.point_to_offset(point));
let context = self.context.read(cx);
let mut text = String::new();
diff --git a/crates/assistant_settings/src/assistant_settings.rs b/crates/assistant_settings/src/assistant_settings.rs
index 5e044282b07b49..d12f4a23f05fe3 100644
--- a/crates/assistant_settings/src/assistant_settings.rs
+++ b/crates/assistant_settings/src/assistant_settings.rs
@@ -359,6 +359,7 @@ fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema:
schemars::schema::SchemaObject {
enum_values: Some(vec![
"anthropic".into(),
+ "bedrock".into(),
"google".into(),
"lmstudio".into(),
"ollama".into(),
diff --git a/crates/assistant_tools/src/now_tool.rs b/crates/assistant_tools/src/now_tool.rs
index b9d22b66b48c95..4e4e7228922e0e 100644
--- a/crates/assistant_tools/src/now_tool.rs
+++ b/crates/assistant_tools/src/now_tool.rs
@@ -17,7 +17,7 @@ pub enum Timezone {
}
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
-pub struct FileToolInput {
+pub struct NowToolInput {
/// The timezone to use for the datetime.
timezone: Timezone,
}
@@ -34,7 +34,7 @@ impl Tool for NowTool {
}
fn input_schema(&self) -> serde_json::Value {
- let schema = schemars::schema_for!(FileToolInput);
+ let schema = schemars::schema_for!(NowToolInput);
serde_json::to_value(&schema).unwrap()
}
@@ -45,7 +45,7 @@ impl Tool for NowTool {
_window: &mut Window,
_cx: &mut App,
) -> Task> {
- let input: FileToolInput = match serde_json::from_value(input) {
+ let input: NowToolInput = match serde_json::from_value(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
diff --git a/crates/aws_http_client/Cargo.toml b/crates/aws_http_client/Cargo.toml
new file mode 100644
index 00000000000000..8715fe1b56de18
--- /dev/null
+++ b/crates/aws_http_client/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "aws_http_client"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/aws_http_client.rs"
+
+[features]
+default = []
+
+[dependencies]
+aws-smithy-runtime-api.workspace = true
+aws-smithy-types.workspace = true
+futures.workspace = true
+http_client.workspace = true
+tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
diff --git a/crates/aws_http_client/LICENSE-GPL b/crates/aws_http_client/LICENSE-GPL
new file mode 120000
index 00000000000000..89e542f750cd38
--- /dev/null
+++ b/crates/aws_http_client/LICENSE-GPL
@@ -0,0 +1 @@
+../../LICENSE-GPL
\ No newline at end of file
diff --git a/crates/aws_http_client/src/aws_http_client.rs b/crates/aws_http_client/src/aws_http_client.rs
new file mode 100644
index 00000000000000..f99280658131e2
--- /dev/null
+++ b/crates/aws_http_client/src/aws_http_client.rs
@@ -0,0 +1,118 @@
+use std::fmt;
+use std::sync::Arc;
+
+use aws_smithy_runtime_api::client::http::{
+ HttpClient as AwsClient, HttpConnector as AwsConnector,
+ HttpConnectorFuture as AwsConnectorFuture, HttpConnectorFuture, HttpConnectorSettings,
+ SharedHttpConnector,
+};
+use aws_smithy_runtime_api::client::orchestrator::{HttpRequest as AwsHttpRequest, HttpResponse};
+use aws_smithy_runtime_api::client::result::ConnectorError;
+use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents;
+use aws_smithy_runtime_api::http::StatusCode;
+use aws_smithy_types::body::SdkBody;
+use futures::AsyncReadExt;
+use http_client::{AsyncBody, Inner};
+use http_client::{HttpClient, Request};
+use tokio::runtime::Handle;
+
+struct AwsHttpConnector {
+ client: Arc,
+ handle: Handle,
+}
+
+impl std::fmt::Debug for AwsHttpConnector {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("AwsHttpConnector").finish()
+ }
+}
+
+impl AwsConnector for AwsHttpConnector {
+ fn call(&self, request: AwsHttpRequest) -> AwsConnectorFuture {
+ let req = match request.try_into_http1x() {
+ Ok(req) => req,
+ Err(err) => {
+ return HttpConnectorFuture::ready(Err(ConnectorError::other(err.into(), None)))
+ }
+ };
+
+ let (parts, body) = req.into_parts();
+
+ let response = self
+ .client
+ .send(Request::from_parts(parts, convert_to_async_body(body)));
+
+ let handle = self.handle.clone();
+
+ HttpConnectorFuture::new(async move {
+ let response = match response.await {
+ Ok(response) => response,
+ Err(err) => return Err(ConnectorError::other(err.into(), None)),
+ };
+ let (parts, body) = response.into_parts();
+ let body = convert_to_sdk_body(body, handle).await;
+
+ Ok(HttpResponse::new(
+ StatusCode::try_from(parts.status.as_u16()).unwrap(),
+ body,
+ ))
+ })
+ }
+}
+
+#[derive(Clone)]
+pub struct AwsHttpClient {
+ client: Arc,
+ handler: Handle,
+}
+
+impl std::fmt::Debug for AwsHttpClient {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("AwsHttpClient").finish()
+ }
+}
+
+impl AwsHttpClient {
+ pub fn new(client: Arc, handle: Handle) -> Self {
+ Self {
+ client,
+ handler: handle,
+ }
+ }
+}
+
+impl AwsClient for AwsHttpClient {
+ fn http_connector(
+ &self,
+ _settings: &HttpConnectorSettings,
+ _components: &RuntimeComponents,
+ ) -> SharedHttpConnector {
+ SharedHttpConnector::new(AwsHttpConnector {
+ client: self.client.clone(),
+ handle: self.handler.clone(),
+ })
+ }
+}
+
+pub async fn convert_to_sdk_body(body: AsyncBody, handle: Handle) -> SdkBody {
+ match body.0 {
+ Inner::Empty => SdkBody::empty(),
+ Inner::Bytes(bytes) => SdkBody::from(bytes.into_inner()),
+ Inner::AsyncReader(mut reader) => {
+ let buffer = handle.spawn(async move {
+ let mut buffer = Vec::new();
+ let _ = reader.read_to_end(&mut buffer).await;
+ buffer
+ });
+
+ SdkBody::from(buffer.await.unwrap_or_default())
+ }
+ }
+}
+
+pub fn convert_to_async_body(body: SdkBody) -> AsyncBody {
+ match body.bytes() {
+ Some(bytes) => AsyncBody::from((*bytes).to_vec()),
+ None => AsyncBody::empty(),
+ }
+}
diff --git a/crates/bedrock/Cargo.toml b/crates/bedrock/Cargo.toml
new file mode 100644
index 00000000000000..e99f7e2cf08327
--- /dev/null
+++ b/crates/bedrock/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "bedrock"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/bedrock.rs"
+
+[features]
+default = []
+schemars = ["dep:schemars"]
+
+[dependencies]
+anyhow.workspace = true
+aws-sdk-bedrockruntime = { workspace = true, features = ["behavior-version-latest"] }
+aws-smithy-types = {workspace = true}
+futures.workspace = true
+schemars = { workspace = true, optional = true }
+serde.workspace = true
+serde_json.workspace = true
+strum.workspace = true
+thiserror.workspace = true
+tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
diff --git a/crates/bedrock/LICENSE-GPL b/crates/bedrock/LICENSE-GPL
new file mode 120000
index 00000000000000..89e542f750cd38
--- /dev/null
+++ b/crates/bedrock/LICENSE-GPL
@@ -0,0 +1 @@
+../../LICENSE-GPL
\ No newline at end of file
diff --git a/crates/bedrock/src/bedrock.rs b/crates/bedrock/src/bedrock.rs
new file mode 100644
index 00000000000000..fa17bc03834941
--- /dev/null
+++ b/crates/bedrock/src/bedrock.rs
@@ -0,0 +1,166 @@
+mod models;
+
+use std::pin::Pin;
+
+use anyhow::{anyhow, Context, Error, Result};
+use aws_sdk_bedrockruntime as bedrock;
+pub use aws_sdk_bedrockruntime as bedrock_client;
+pub use aws_sdk_bedrockruntime::types::{
+ ContentBlock as BedrockInnerContent, SpecificToolChoice as BedrockSpecificTool,
+ ToolChoice as BedrockToolChoice, ToolInputSchema as BedrockToolInputSchema,
+ ToolSpecification as BedrockTool,
+};
+use aws_smithy_types::{Document, Number as AwsNumber};
+pub use bedrock::operation::converse_stream::ConverseStreamInput as BedrockStreamingRequest;
+pub use bedrock::types::{
+ ContentBlock as BedrockRequestContent, ConversationRole as BedrockRole,
+ ConverseOutput as BedrockResponse, ConverseStreamOutput as BedrockStreamingResponse,
+ Message as BedrockMessage, ResponseStream as BedrockResponseStream,
+};
+use futures::stream::{self, BoxStream, Stream};
+use serde::{Deserialize, Serialize};
+use serde_json::{Number, Value};
+use thiserror::Error;
+
+pub use crate::models::*;
+
+pub async fn complete(
+ client: &bedrock::Client,
+ request: Request,
+) -> Result {
+ let response = bedrock::Client::converse(client)
+ .model_id(request.model.clone())
+ .set_messages(request.messages.into())
+ .send()
+ .await
+ .context("failed to send request to Bedrock");
+
+ match response {
+ Ok(output) => output
+ .output
+ .ok_or_else(|| BedrockError::Other(anyhow!("no output"))),
+ Err(err) => Err(BedrockError::Other(err)),
+ }
+}
+
+pub async fn stream_completion(
+ client: bedrock::Client,
+ request: Request,
+ handle: tokio::runtime::Handle,
+) -> Result>, Error> {
+ handle
+ .spawn(async move {
+ let response = bedrock::Client::converse_stream(&client)
+ .model_id(request.model.clone())
+ .set_messages(request.messages.into())
+ .send()
+ .await;
+
+ match response {
+ Ok(output) => {
+ let stream: Pin<
+ Box<
+ dyn Stream- >
+ + Send,
+ >,
+ > = Box::pin(stream::unfold(output.stream, |mut stream| async move {
+ match stream.recv().await {
+ Ok(Some(output)) => Some((Ok(output), stream)),
+ Ok(None) => None,
+ Err(err) => {
+ Some((
+ // TODO: Figure out how we can capture Throttling Exceptions
+ Err(BedrockError::ClientError(anyhow!(
+ "{:?}",
+ aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
+ ))),
+ stream,
+ ))
+ }
+ }
+ }));
+ Ok(stream)
+ }
+ Err(err) => Err(anyhow!(
+ "{:?}",
+ aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
+ )),
+ }
+ })
+ .await
+ .map_err(|err| anyhow!("failed to spawn task: {err:?}"))?
+}
+
+pub fn aws_document_to_value(document: &Document) -> Value {
+ match document {
+ Document::Null => Value::Null,
+ Document::Bool(value) => Value::Bool(*value),
+ Document::Number(value) => match *value {
+ AwsNumber::PosInt(value) => Value::Number(Number::from(value)),
+ AwsNumber::NegInt(value) => Value::Number(Number::from(value)),
+ AwsNumber::Float(value) => Value::Number(Number::from_f64(value).unwrap()),
+ },
+ Document::String(value) => Value::String(value.clone()),
+ Document::Array(array) => Value::Array(array.iter().map(aws_document_to_value).collect()),
+ Document::Object(map) => Value::Object(
+ map.iter()
+ .map(|(key, value)| (key.clone(), aws_document_to_value(value)))
+ .collect(),
+ ),
+ }
+}
+
+pub fn value_to_aws_document(value: &Value) -> Document {
+ match value {
+ Value::Null => Document::Null,
+ Value::Bool(value) => Document::Bool(*value),
+ Value::Number(value) => {
+ if let Some(value) = value.as_u64() {
+ Document::Number(AwsNumber::PosInt(value))
+ } else if let Some(value) = value.as_i64() {
+ Document::Number(AwsNumber::NegInt(value))
+ } else if let Some(value) = value.as_f64() {
+ Document::Number(AwsNumber::Float(value))
+ } else {
+ Document::Null
+ }
+ }
+ Value::String(value) => Document::String(value.clone()),
+ Value::Array(array) => Document::Array(array.iter().map(value_to_aws_document).collect()),
+ Value::Object(map) => Document::Object(
+ map.iter()
+ .map(|(key, value)| (key.clone(), value_to_aws_document(value)))
+ .collect(),
+ ),
+ }
+}
+
+#[derive(Debug)]
+pub struct Request {
+ pub model: String,
+ pub max_tokens: u32,
+ pub messages: Vec,
+ pub tools: Vec,
+ pub tool_choice: Option,
+ pub system: Option,
+ pub metadata: Option,
+ pub stop_sequences: Vec,
+ pub temperature: Option,
+ pub top_k: Option,
+ pub top_p: Option,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Metadata {
+ pub user_id: Option,
+}
+
+#[derive(Error, Debug)]
+pub enum BedrockError {
+ #[error("client error: {0}")]
+ ClientError(anyhow::Error),
+ #[error("extension error: {0}")]
+ ExtensionError(anyhow::Error),
+ #[error(transparent)]
+ Other(#[from] anyhow::Error),
+}
diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs
new file mode 100644
index 00000000000000..a8d0614e5d21b2
--- /dev/null
+++ b/crates/bedrock/src/models.rs
@@ -0,0 +1,199 @@
+use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
+use strum::EnumIter;
+
+#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
+pub enum Model {
+ // Anthropic models (already included)
+ #[default]
+ #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
+ Claude3_5Sonnet,
+ #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
+ Claude3Opus,
+ #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
+ Claude3Sonnet,
+ #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
+ Claude3_5Haiku,
+ // Amazon Nova Models
+ AmazonNovaLite,
+ AmazonNovaMicro,
+ AmazonNovaPro,
+ // AI21 models
+ AI21J2GrandeInstruct,
+ AI21J2JumboInstruct,
+ AI21J2Mid,
+ AI21J2MidV1,
+ AI21J2Ultra,
+ AI21J2UltraV1_8k,
+ AI21J2UltraV1,
+ AI21JambaInstructV1,
+ AI21Jamba15LargeV1,
+ AI21Jamba15MiniV1,
+ // Cohere models
+ CohereCommandTextV14_4k,
+ CohereCommandRV1,
+ CohereCommandRPlusV1,
+ CohereCommandLightTextV14_4k,
+ // Meta models
+ MetaLlama38BInstructV1,
+ MetaLlama370BInstructV1,
+ MetaLlama318BInstructV1_128k,
+ MetaLlama318BInstructV1,
+ MetaLlama3170BInstructV1_128k,
+ MetaLlama3170BInstructV1,
+ MetaLlama3211BInstructV1,
+ MetaLlama3290BInstructV1,
+ MetaLlama321BInstructV1,
+ MetaLlama323BInstructV1,
+ // Mistral models
+ MistralMistral7BInstructV0,
+ MistralMixtral8x7BInstructV0,
+ MistralMistralLarge2402V1,
+ MistralMistralSmall2402V1,
+ #[serde(rename = "custom")]
+ Custom {
+ name: String,
+ max_tokens: usize,
+ /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
+ display_name: Option,
+ max_output_tokens: Option,
+ default_temperature: Option,
+ },
+}
+
+impl Model {
+ pub fn from_id(id: &str) -> anyhow::Result {
+ if id.starts_with("claude-3-5-sonnet") {
+ Ok(Self::Claude3_5Sonnet)
+ } else if id.starts_with("claude-3-opus") {
+ Ok(Self::Claude3Opus)
+ } else if id.starts_with("claude-3-sonnet") {
+ Ok(Self::Claude3Sonnet)
+ } else if id.starts_with("claude-3-5-haiku") {
+ Ok(Self::Claude3_5Haiku)
+ } else {
+ Err(anyhow!("invalid model id"))
+ }
+ }
+
+ pub fn id(&self) -> &str {
+ match self {
+ Model::Claude3_5Sonnet => "us.anthropic.claude-3-5-sonnet-20241022-v2:0",
+ Model::Claude3Opus => "us.anthropic.claude-3-opus-20240229-v1:0",
+ Model::Claude3Sonnet => "us.anthropic.claude-3-sonnet-20240229-v1:0",
+ Model::Claude3_5Haiku => "us.anthropic.claude-3-5-haiku-20241022-v1:0",
+ Model::AmazonNovaLite => "us.amazon.nova-lite-v1:0",
+ Model::AmazonNovaMicro => "us.amazon.nova-micro-v1:0",
+ Model::AmazonNovaPro => "us.amazon.nova-pro-v1:0",
+ Model::AI21J2GrandeInstruct => "ai21.j2-grande-instruct",
+ Model::AI21J2JumboInstruct => "ai21.j2-jumbo-instruct",
+ Model::AI21J2Mid => "ai21.j2-mid",
+ Model::AI21J2MidV1 => "ai21.j2-mid-v1",
+ Model::AI21J2Ultra => "ai21.j2-ultra",
+ Model::AI21J2UltraV1_8k => "ai21.j2-ultra-v1:0:8k",
+ Model::AI21J2UltraV1 => "ai21.j2-ultra-v1",
+ Model::AI21JambaInstructV1 => "ai21.jamba-instruct-v1:0",
+ Model::AI21Jamba15LargeV1 => "ai21.jamba-1-5-large-v1:0",
+ Model::AI21Jamba15MiniV1 => "ai21.jamba-1-5-mini-v1:0",
+ Model::CohereCommandTextV14_4k => "cohere.command-text-v14:7:4k",
+ Model::CohereCommandRV1 => "cohere.command-r-v1:0",
+ Model::CohereCommandRPlusV1 => "cohere.command-r-plus-v1:0",
+ Model::CohereCommandLightTextV14_4k => "cohere.command-light-text-v14:7:4k",
+ Model::MetaLlama38BInstructV1 => "meta.llama3-8b-instruct-v1:0",
+ Model::MetaLlama370BInstructV1 => "meta.llama3-70b-instruct-v1:0",
+ Model::MetaLlama318BInstructV1_128k => "meta.llama3-1-8b-instruct-v1:0:128k",
+ Model::MetaLlama318BInstructV1 => "meta.llama3-1-8b-instruct-v1:0",
+ Model::MetaLlama3170BInstructV1_128k => "meta.llama3-1-70b-instruct-v1:0:128k",
+ Model::MetaLlama3170BInstructV1 => "meta.llama3-1-70b-instruct-v1:0",
+ Model::MetaLlama3211BInstructV1 => "meta.llama3-2-11b-instruct-v1:0",
+ Model::MetaLlama3290BInstructV1 => "meta.llama3-2-90b-instruct-v1:0",
+ Model::MetaLlama321BInstructV1 => "meta.llama3-2-1b-instruct-v1:0",
+ Model::MetaLlama323BInstructV1 => "meta.llama3-2-3b-instruct-v1:0",
+ Model::MistralMistral7BInstructV0 => "mistral.mistral-7b-instruct-v0:2",
+ Model::MistralMixtral8x7BInstructV0 => "mistral.mixtral-8x7b-instruct-v0:1",
+ Model::MistralMistralLarge2402V1 => "mistral.mistral-large-2402-v1:0",
+ Model::MistralMistralSmall2402V1 => "mistral.mistral-small-2402-v1:0",
+ Self::Custom { name, .. } => name,
+ }
+ }
+
+ pub fn display_name(&self) -> &str {
+ match self {
+ Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
+ Self::Claude3Opus => "Claude 3 Opus",
+ Self::Claude3Sonnet => "Claude 3 Sonnet",
+ Self::Claude3_5Haiku => "Claude 3.5 Haiku",
+ Self::AmazonNovaLite => "Amazon Nova Lite",
+ Self::AmazonNovaMicro => "Amazon Nova Micro",
+ Self::AmazonNovaPro => "Amazon Nova Pro",
+ Self::AI21J2GrandeInstruct => "AI21 Jurassic2 Grande Instruct",
+ Self::AI21J2JumboInstruct => "AI21 Jurassic2 Jumbo Instruct",
+ Self::AI21J2Mid => "AI21 Jurassic2 Mid",
+ Self::AI21J2MidV1 => "AI21 Jurassic2 Mid V1",
+ Self::AI21J2Ultra => "AI21 Jurassic2 Ultra",
+ Self::AI21J2UltraV1_8k => "AI21 Jurassic2 Ultra V1 8K",
+ Self::AI21J2UltraV1 => "AI21 Jurassic2 Ultra V1",
+ Self::AI21JambaInstructV1 => "AI21 Jamba Instruct",
+ Self::AI21Jamba15LargeV1 => "AI21 Jamba 1.5 Large",
+ Self::AI21Jamba15MiniV1 => "AI21 Jamba 1.5 Mini",
+ Self::CohereCommandTextV14_4k => "Cohere Command Text V14 4K",
+ Self::CohereCommandRV1 => "Cohere Command R V1",
+ Self::CohereCommandRPlusV1 => "Cohere Command R Plus V1",
+ Self::CohereCommandLightTextV14_4k => "Cohere Command Light Text V14 4K",
+ Self::MetaLlama38BInstructV1 => "Meta Llama 3 8B Instruct V1",
+ Self::MetaLlama370BInstructV1 => "Meta Llama 3 70B Instruct V1",
+ Self::MetaLlama318BInstructV1_128k => "Meta Llama 3 1.8B Instruct V1 128K",
+ Self::MetaLlama318BInstructV1 => "Meta Llama 3 1.8B Instruct V1",
+ Self::MetaLlama3170BInstructV1_128k => "Meta Llama 3 1 70B Instruct V1 128K",
+ Self::MetaLlama3170BInstructV1 => "Meta Llama 3 1 70B Instruct V1",
+ Self::MetaLlama3211BInstructV1 => "Meta Llama 3 2 11B Instruct V1",
+ Self::MetaLlama3290BInstructV1 => "Meta Llama 3 2 90B Instruct V1",
+ Self::MetaLlama321BInstructV1 => "Meta Llama 3 2 1B Instruct V1",
+ Self::MetaLlama323BInstructV1 => "Meta Llama 3 2 3B Instruct V1",
+ Self::MistralMistral7BInstructV0 => "Mistral 7B Instruct V0",
+ Self::MistralMixtral8x7BInstructV0 => "Mistral Mixtral 8x7B Instruct V0",
+ Self::MistralMistralLarge2402V1 => "Mistral Large 2402 V1",
+ Self::MistralMistralSmall2402V1 => "Mistral Small 2402 V1",
+ Self::Custom {
+ display_name, name, ..
+ } => display_name.as_deref().unwrap_or(name),
+ }
+ }
+
+ pub fn max_token_count(&self) -> usize {
+ match self {
+ Self::Claude3_5Sonnet
+ | Self::Claude3Opus
+ | Self::Claude3Sonnet
+ | Self::Claude3_5Haiku => 200_000,
+ Self::Custom { max_tokens, .. } => *max_tokens,
+ _ => 200_000,
+ }
+ }
+
+ pub fn max_output_tokens(&self) -> u32 {
+ match self {
+ Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
+ Self::Claude3_5Sonnet => 8_192,
+ Self::Custom {
+ max_output_tokens, ..
+ } => max_output_tokens.unwrap_or(4_096),
+ _ => 4_096,
+ }
+ }
+
+ pub fn default_temperature(&self) -> f32 {
+ match self {
+ Self::Claude3_5Sonnet
+ | Self::Claude3Opus
+ | Self::Claude3Sonnet
+ | Self::Claude3_5Haiku => 1.0,
+ Self::Custom {
+ default_temperature,
+ ..
+ } => default_temperature.unwrap_or(1.0),
+ _ => 1.0,
+ }
+ }
+}
diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs
index 7223bb7086bbf3..cc1767b4cbb553 100644
--- a/crates/buffer_diff/src/buffer_diff.rs
+++ b/crates/buffer_diff/src/buffer_diff.rs
@@ -3,7 +3,8 @@ use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter};
use language::{Language, LanguageRegistry};
use rope::Rope;
-use std::{cmp, future::Future, iter, ops::Range, sync::Arc};
+use std::cmp::Ordering;
+use std::{future::Future, iter, ops::Range, sync::Arc};
use sum_tree::SumTree;
use text::ToOffset as _;
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
@@ -68,7 +69,6 @@ pub struct DiffHunk {
/// The range in the buffer's diff base text to which this hunk corresponds.
pub diff_base_byte_range: Range,
pub secondary_status: DiffHunkSecondaryStatus,
- pub secondary_diff_base_byte_range: Option>,
}
/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range.
@@ -110,12 +110,17 @@ impl sum_tree::Summary for DiffHunkSummary {
}
impl<'a> sum_tree::SeekTarget<'a, DiffHunkSummary, DiffHunkSummary> for Anchor {
- fn cmp(
- &self,
- cursor_location: &DiffHunkSummary,
- buffer: &text::BufferSnapshot,
- ) -> cmp::Ordering {
- self.cmp(&cursor_location.buffer_range.end, buffer)
+ fn cmp(&self, cursor_location: &DiffHunkSummary, buffer: &text::BufferSnapshot) -> Ordering {
+ if self
+ .cmp(&cursor_location.buffer_range.start, buffer)
+ .is_lt()
+ {
+ Ordering::Less
+ } else if self.cmp(&cursor_location.buffer_range.end, buffer).is_gt() {
+ Ordering::Greater
+ } else {
+ Ordering::Equal
+ }
}
}
@@ -171,97 +176,96 @@ impl BufferDiffSnapshot {
}
}
- fn buffer_range_to_unchanged_diff_base_range(
- &self,
- buffer_range: Range,
- buffer: &text::BufferSnapshot,
- ) -> Option> {
- let mut hunks = self.inner.hunks.iter();
- let mut start = 0;
- let mut pos = buffer.anchor_before(0);
- while let Some(hunk) = hunks.next() {
- assert!(buffer_range.start.cmp(&pos, buffer).is_ge());
- assert!(hunk.buffer_range.start.cmp(&pos, buffer).is_ge());
- if hunk
- .buffer_range
- .start
- .cmp(&buffer_range.end, buffer)
- .is_ge()
- {
- // target buffer range is contained in the unchanged stretch leading up to this next hunk,
- // so do a final adjustment based on that
- break;
- }
-
- // if the target buffer range intersects this hunk at all, no dice
- if buffer_range
- .start
- .cmp(&hunk.buffer_range.end, buffer)
- .is_lt()
- {
- return None;
- }
-
- start += hunk.buffer_range.start.to_offset(buffer) - pos.to_offset(buffer);
- start += hunk.diff_base_byte_range.end - hunk.diff_base_byte_range.start;
- pos = hunk.buffer_range.end;
- }
- start += buffer_range.start.to_offset(buffer) - pos.to_offset(buffer);
- let end = start + buffer_range.end.to_offset(buffer) - buffer_range.start.to_offset(buffer);
- Some(start..end)
- }
-
- pub fn secondary_edits_for_stage_or_unstage(
+ pub fn new_secondary_text_for_stage_or_unstage(
&self,
stage: bool,
- hunks: impl Iterator
- , Option>, Range)>,
+ hunks: impl Iterator
- , Range)>,
buffer: &text::BufferSnapshot,
- ) -> Vec<(Range, String)> {
- let Some(secondary_diff) = self.secondary_diff() else {
- log::debug!("no secondary diff");
- return Vec::new();
+ cx: &mut App,
+ ) -> Option {
+ let secondary_diff = self.secondary_diff()?;
+ let index_base = if let Some(index_base) = secondary_diff.base_text() {
+ index_base.text.as_rope().clone()
+ } else if stage {
+ Rope::from("")
+ } else {
+ return None;
};
- let index_base = secondary_diff.base_text().map_or_else(
- || Rope::from(""),
- |snapshot| snapshot.text.as_rope().clone(),
- );
let head_base = self.base_text().map_or_else(
|| Rope::from(""),
|snapshot| snapshot.text.as_rope().clone(),
);
- log::debug!("original: {:?}", index_base.to_string());
+
+ let mut secondary_cursor = secondary_diff.inner.hunks.cursor::(buffer);
+ secondary_cursor.next(buffer);
let mut edits = Vec::new();
- for (diff_base_byte_range, secondary_diff_base_byte_range, buffer_range) in hunks {
- let (index_byte_range, replacement_text) = if stage {
+ let mut prev_secondary_hunk_buffer_offset = 0;
+ let mut prev_secondary_hunk_base_text_offset = 0;
+ for (buffer_range, diff_base_byte_range) in hunks {
+ let skipped_hunks = secondary_cursor.slice(&buffer_range.start, Bias::Left, buffer);
+
+ if let Some(secondary_hunk) = skipped_hunks.last() {
+ prev_secondary_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end;
+ prev_secondary_hunk_buffer_offset =
+ secondary_hunk.buffer_range.end.to_offset(buffer);
+ }
+
+ let mut buffer_offset_range = buffer_range.to_offset(buffer);
+ let start_overshoot = buffer_offset_range.start - prev_secondary_hunk_buffer_offset;
+ let mut secondary_base_text_start =
+ prev_secondary_hunk_base_text_offset + start_overshoot;
+
+ while let Some(secondary_hunk) = secondary_cursor.item().filter(|item| {
+ item.buffer_range
+ .start
+ .cmp(&buffer_range.end, buffer)
+ .is_le()
+ }) {
+ let secondary_hunk_offset_range = secondary_hunk.buffer_range.to_offset(buffer);
+ prev_secondary_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end;
+ prev_secondary_hunk_buffer_offset = secondary_hunk_offset_range.end;
+
+ secondary_base_text_start =
+ secondary_base_text_start.min(secondary_hunk.diff_base_byte_range.start);
+ buffer_offset_range.start = buffer_offset_range
+ .start
+ .min(secondary_hunk_offset_range.start);
+
+ secondary_cursor.next(buffer);
+ }
+
+ let end_overshoot = buffer_offset_range
+ .end
+ .saturating_sub(prev_secondary_hunk_buffer_offset);
+ let secondary_base_text_end = prev_secondary_hunk_base_text_offset + end_overshoot;
+
+ let secondary_base_text_range = secondary_base_text_start..secondary_base_text_end;
+ buffer_offset_range.end = buffer_offset_range
+ .end
+ .max(prev_secondary_hunk_buffer_offset);
+
+ let replacement_text = if stage {
log::debug!("staging");
- let mut replacement_text = String::new();
- let Some(index_byte_range) = secondary_diff_base_byte_range.clone() else {
- log::debug!("not a stageable hunk");
- continue;
- };
- log::debug!("using {:?}", index_byte_range);
- for chunk in buffer.text_for_range(buffer_range.clone()) {
- replacement_text.push_str(chunk);
- }
- (index_byte_range, replacement_text)
+ buffer
+ .text_for_range(buffer_offset_range)
+ .collect::()
} else {
log::debug!("unstaging");
- let mut replacement_text = String::new();
- let Some(index_byte_range) = secondary_diff
- .buffer_range_to_unchanged_diff_base_range(buffer_range.clone(), &buffer)
- else {
- log::debug!("not an unstageable hunk");
- continue;
- };
- for chunk in head_base.chunks_in_range(diff_base_byte_range.clone()) {
- replacement_text.push_str(chunk);
- }
- (index_byte_range, replacement_text)
+ head_base
+ .chunks_in_range(diff_base_byte_range.clone())
+ .collect::()
};
- edits.push((index_byte_range, replacement_text));
+ edits.push((secondary_base_text_range, replacement_text));
}
- log::debug!("edits: {edits:?}");
- edits
+
+ let buffer = cx.new(|cx| {
+ language::Buffer::local_normalized(index_base, text::LineEnding::default(), cx)
+ });
+ let new_text = buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, None, cx);
+ buffer.as_rope().clone()
+ });
+ Some(new_text)
}
}
@@ -322,13 +326,12 @@ impl BufferDiffInner {
}
let mut secondary_status = DiffHunkSecondaryStatus::None;
- let mut secondary_diff_base_byte_range = None;
if let Some(secondary_cursor) = secondary_cursor.as_mut() {
if start_anchor
.cmp(&secondary_cursor.start().buffer_range.start, buffer)
.is_gt()
{
- secondary_cursor.seek_forward(&end_anchor, Bias::Left, buffer);
+ secondary_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
}
if let Some(secondary_hunk) = secondary_cursor.item() {
@@ -339,12 +342,12 @@ impl BufferDiffInner {
}
if secondary_range == (start_point..end_point) {
secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
- secondary_diff_base_byte_range =
- Some(secondary_hunk.diff_base_byte_range.clone());
} else if secondary_range.start <= end_point {
secondary_status = DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk;
}
}
+ } else {
+ log::debug!("no secondary cursor!!");
}
return Some(DiffHunk {
@@ -352,7 +355,6 @@ impl BufferDiffInner {
diff_base_byte_range: start_base..end_base,
buffer_range: start_anchor..end_anchor,
secondary_status,
- secondary_diff_base_byte_range,
});
})
}
@@ -387,7 +389,6 @@ impl BufferDiffInner {
buffer_range: hunk.buffer_range.clone(),
// The secondary status is not used by callers of this method.
secondary_status: DiffHunkSecondaryStatus::None,
- secondary_diff_base_byte_range: None,
})
})
}
@@ -408,12 +409,12 @@ impl BufferDiffInner {
.start
.cmp(&old_hunk.buffer_range.start, new_snapshot)
{
- cmp::Ordering::Less => {
+ Ordering::Less => {
start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end);
new_cursor.next(new_snapshot);
}
- cmp::Ordering::Equal => {
+ Ordering::Equal => {
if new_hunk != old_hunk {
start.get_or_insert(new_hunk.buffer_range.start);
if old_hunk
@@ -431,7 +432,7 @@ impl BufferDiffInner {
new_cursor.next(new_snapshot);
old_cursor.next(new_snapshot);
}
- cmp::Ordering::Greater => {
+ Ordering::Greater => {
start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end);
old_cursor.next(new_snapshot);
@@ -1059,6 +1060,7 @@ mod tests {
use rand::{rngs::StdRng, Rng as _};
use text::{Buffer, BufferId, Rope};
use unindent::Unindent as _;
+ use util::test::marked_text_ranges;
#[ctor::ctor]
fn init_logger() {
@@ -1257,6 +1259,208 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_stage_hunk(cx: &mut TestAppContext) {
+ struct Example {
+ name: &'static str,
+ head_text: String,
+ index_text: String,
+ buffer_marked_text: String,
+ final_index_text: String,
+ }
+
+ let table = [
+ Example {
+ name: "uncommitted hunk straddles end of unstaged hunk",
+ head_text: "
+ one
+ two
+ three
+ four
+ five
+ "
+ .unindent(),
+ index_text: "
+ one
+ TWO_HUNDRED
+ three
+ FOUR_HUNDRED
+ five
+ "
+ .unindent(),
+ buffer_marked_text: "
+ ZERO
+ one
+ two
+ «THREE_HUNDRED
+ FOUR_HUNDRED»
+ five
+ SIX
+ "
+ .unindent(),
+ final_index_text: "
+ one
+ two
+ THREE_HUNDRED
+ FOUR_HUNDRED
+ five
+ "
+ .unindent(),
+ },
+ Example {
+ name: "uncommitted hunk straddles start of unstaged hunk",
+ head_text: "
+ one
+ two
+ three
+ four
+ five
+ "
+ .unindent(),
+ index_text: "
+ one
+ TWO_HUNDRED
+ three
+ FOUR_HUNDRED
+ five
+ "
+ .unindent(),
+ buffer_marked_text: "
+ ZERO
+ one
+ «TWO_HUNDRED
+ THREE_HUNDRED»
+ four
+ five
+ SIX
+ "
+ .unindent(),
+ final_index_text: "
+ one
+ TWO_HUNDRED
+ THREE_HUNDRED
+ four
+ five
+ "
+ .unindent(),
+ },
+ Example {
+ name: "uncommitted hunk strictly contains unstaged hunks",
+ head_text: "
+ one
+ two
+ three
+ four
+ five
+ six
+ seven
+ "
+ .unindent(),
+ index_text: "
+ one
+ TWO
+ THREE
+ FOUR
+ FIVE
+ SIX
+ seven
+ "
+ .unindent(),
+ buffer_marked_text: "
+ one
+ TWO
+ «THREE_HUNDRED
+ FOUR
+ FIVE_HUNDRED»
+ SIX
+ seven
+ "
+ .unindent(),
+ final_index_text: "
+ one
+ TWO
+ THREE_HUNDRED
+ FOUR
+ FIVE_HUNDRED
+ SIX
+ seven
+ "
+ .unindent(),
+ },
+ Example {
+ name: "uncommitted deletion hunk",
+ head_text: "
+ one
+ two
+ three
+ four
+ five
+ "
+ .unindent(),
+ index_text: "
+ one
+ two
+ three
+ four
+ five
+ "
+ .unindent(),
+ buffer_marked_text: "
+ one
+ ˇfive
+ "
+ .unindent(),
+ final_index_text: "
+ one
+ five
+ "
+ .unindent(),
+ },
+ ];
+
+ for example in table {
+ let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false);
+ let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
+ let uncommitted_diff =
+ BufferDiff::build_sync(buffer.clone(), example.head_text.clone(), cx);
+ let unstaged_diff =
+ BufferDiff::build_sync(buffer.clone(), example.index_text.clone(), cx);
+ let uncommitted_diff = BufferDiffSnapshot {
+ inner: uncommitted_diff,
+ secondary_diff: Some(Box::new(BufferDiffSnapshot {
+ inner: unstaged_diff,
+ is_single_insertion: false,
+ secondary_diff: None,
+ })),
+ is_single_insertion: false,
+ };
+
+ let range = buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end);
+
+ let new_index_text = cx
+ .update(|cx| {
+ uncommitted_diff.new_secondary_text_for_stage_or_unstage(
+ true,
+ uncommitted_diff
+ .hunks_intersecting_range(range, &buffer)
+ .map(|hunk| {
+ (hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone())
+ }),
+ &buffer,
+ cx,
+ )
+ })
+ .unwrap()
+ .to_string();
+ pretty_assertions::assert_eq!(
+ new_index_text,
+ example.final_index_text,
+ "example: {}",
+ example.name
+ );
+ }
+ }
+
#[gpui::test]
async fn test_buffer_diff_compare(cx: &mut TestAppContext) {
let base_text = "
@@ -1382,7 +1586,7 @@ mod tests {
}
#[gpui::test(iterations = 100)]
- async fn test_secondary_edits_for_stage_unstage(cx: &mut TestAppContext, mut rng: StdRng) {
+ async fn test_staging_and_unstaging_hunks(cx: &mut TestAppContext, mut rng: StdRng) {
fn gen_line(rng: &mut StdRng) -> String {
if rng.gen_bool(0.2) {
"\n".to_owned()
@@ -1447,7 +1651,7 @@ mod tests {
fn uncommitted_diff(
working_copy: &language::BufferSnapshot,
- index_text: &Entity,
+ index_text: &Rope,
head_text: String,
cx: &mut TestAppContext,
) -> BufferDiff {
@@ -1456,7 +1660,7 @@ mod tests {
buffer_id: working_copy.remote_id(),
inner: BufferDiff::build_sync(
working_copy.text.clone(),
- index_text.read_with(cx, |index_text, _| index_text.text()),
+ index_text.to_string(),
cx,
),
secondary_diff: None,
@@ -1487,17 +1691,11 @@ mod tests {
)
});
let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot());
- let index_text = cx.new(|cx| {
- language::Buffer::local_normalized(
- if rng.gen() {
- Rope::from(head_text.as_str())
- } else {
- working_copy.as_rope().clone()
- },
- text::LineEnding::default(),
- cx,
- )
- });
+ let mut index_text = if rng.gen() {
+ Rope::from(head_text.as_str())
+ } else {
+ working_copy.as_rope().clone()
+ };
let mut diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx);
let mut hunks = cx.update(|cx| {
@@ -1511,37 +1709,29 @@ mod tests {
for _ in 0..operations {
let i = rng.gen_range(0..hunks.len());
let hunk = &mut hunks[i];
- let hunk_fields = (
- hunk.diff_base_byte_range.clone(),
- hunk.secondary_diff_base_byte_range.clone(),
- hunk.buffer_range.clone(),
- );
- let stage = match (
- hunk.secondary_status,
- hunk.secondary_diff_base_byte_range.clone(),
- ) {
- (DiffHunkSecondaryStatus::HasSecondaryHunk, Some(_)) => {
+ let stage = match hunk.secondary_status {
+ DiffHunkSecondaryStatus::HasSecondaryHunk => {
hunk.secondary_status = DiffHunkSecondaryStatus::None;
- hunk.secondary_diff_base_byte_range = None;
true
}
- (DiffHunkSecondaryStatus::None, None) => {
+ DiffHunkSecondaryStatus::None => {
hunk.secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
- // We don't look at this, just notice whether it's Some or not.
- hunk.secondary_diff_base_byte_range = Some(17..17);
false
}
_ => unreachable!(),
};
let snapshot = cx.update(|cx| diff.snapshot(cx));
- let edits = snapshot.secondary_edits_for_stage_or_unstage(
- stage,
- [hunk_fields].into_iter(),
- &working_copy,
- );
- index_text.update(cx, |index_text, cx| {
- index_text.edit(edits, None, cx);
+ index_text = cx.update(|cx| {
+ snapshot
+ .new_secondary_text_for_stage_or_unstage(
+ stage,
+ [(hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone())]
+ .into_iter(),
+ &working_copy,
+ cx,
+ )
+ .unwrap()
});
diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx);
@@ -1550,6 +1740,7 @@ mod tests {
.collect::>()
});
assert_eq!(hunks.len(), found_hunks.len());
+
for (expected_hunk, found_hunk) in hunks.iter().zip(&found_hunks) {
assert_eq!(
expected_hunk.buffer_range.to_point(&working_copy),
@@ -1560,10 +1751,6 @@ mod tests {
found_hunk.diff_base_byte_range
);
assert_eq!(expected_hunk.secondary_status, found_hunk.secondary_status);
- assert_eq!(
- expected_hunk.secondary_diff_base_byte_range.is_some(),
- found_hunk.secondary_diff_base_byte_range.is_some()
- )
}
hunks = found_hunks;
}
diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs
index b1ab7586613aa2..b79931818e2cb4 100644
--- a/crates/collab/src/llm.rs
+++ b/crates/collab/src/llm.rs
@@ -256,6 +256,7 @@ async fn perform_completion(
// so that users can use the new version, without having to update Zed.
request.model = match model.as_str() {
"claude-3-5-sonnet" => anthropic::Model::Claude3_5Sonnet.id().to_string(),
+ "claude-3-7-sonnet" => anthropic::Model::Claude3_7Sonnet.id().to_string(),
"claude-3-opus" => anthropic::Model::Claude3Opus.id().to_string(),
"claude-3-haiku" => anthropic::Model::Claude3Haiku.id().to_string(),
"claude-3-sonnet" => anthropic::Model::Claude3Sonnet.id().to_string(),
diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs
index 19ce05f1eb8e59..885e6b62cd6d86 100644
--- a/crates/collab/src/rpc.rs
+++ b/crates/collab/src/rpc.rs
@@ -392,9 +392,13 @@ impl Server {
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_mutating_project_request::)
+ .add_request_handler(forward_mutating_project_request::)
+ .add_request_handler(forward_mutating_project_request::)
+ .add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_mutating_project_request::)
+ .add_request_handler(forward_read_only_project_request::)
.add_request_handler(forward_read_only_project_request::)
.add_request_handler(forward_read_only_project_request::)
.add_request_handler(forward_read_only_project_request::)
diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml
index d3c21084002d89..867e8fd3bb68e1 100644
--- a/crates/copilot/Cargo.toml
+++ b/crates/copilot/Cargo.toml
@@ -38,6 +38,7 @@ gpui.workspace = true
http_client.workspace = true
inline_completion.workspace = true
language.workspace = true
+log.workspace = true
lsp.workspace = true
menu.workspace = true
node_runtime.workspace = true
@@ -62,7 +63,9 @@ async-std = { version = "1.12.0", features = ["unstable"] }
client = { workspace = true, features = ["test-support"] }
clock = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
+ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
+env_logger.workspace = true
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs
index 5edc0d5954329a..ff54ce1cef79f8 100644
--- a/crates/copilot/src/copilot.rs
+++ b/crates/copilot/src/copilot.rs
@@ -16,6 +16,7 @@ use gpui::{
};
use http_client::github::get_release_by_tag_name;
use http_client::HttpClient;
+use language::language_settings::CopilotSettings;
use language::{
language_settings::{all_language_settings, language_settings, EditPredictionProvider},
point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16,
@@ -367,13 +368,13 @@ impl Copilot {
let server_id = self.server_id;
let http = self.http.clone();
let node_runtime = self.node_runtime.clone();
- if all_language_settings(None, cx).edit_predictions.provider
- == EditPredictionProvider::Copilot
- {
+ let language_settings = all_language_settings(None, cx);
+ if language_settings.edit_predictions.provider == EditPredictionProvider::Copilot {
if matches!(self.server, CopilotServer::Disabled) {
+ let env = self.build_env(&language_settings.edit_predictions.copilot);
let start_task = cx
.spawn(move |this, cx| {
- Self::start_language_server(server_id, http, node_runtime, this, cx)
+ Self::start_language_server(server_id, http, node_runtime, env, this, cx)
})
.shared();
self.server = CopilotServer::Starting { task: start_task };
@@ -385,6 +386,30 @@ impl Copilot {
}
}
+ fn build_env(&self, copilot_settings: &CopilotSettings) -> Option> {
+ let proxy_url = copilot_settings.proxy.clone()?;
+ let no_verify = copilot_settings.proxy_no_verify;
+ let http_or_https_proxy = if proxy_url.starts_with("http:") {
+ "HTTP_PROXY"
+ } else if proxy_url.starts_with("https:") {
+ "HTTPS_PROXY"
+ } else {
+ log::error!(
+ "Unsupported protocol scheme for language server proxy (must be http or https)"
+ );
+ return None;
+ };
+
+ let mut env = HashMap::default();
+ env.insert(http_or_https_proxy.to_string(), proxy_url);
+
+ if let Some(true) = no_verify {
+ env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
+ };
+
+ Some(env)
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn fake(cx: &mut gpui::TestAppContext) -> (Entity, lsp::FakeLanguageServer) {
use lsp::FakeLanguageServer;
@@ -422,6 +447,7 @@ impl Copilot {
new_server_id: LanguageServerId,
http: Arc,
node_runtime: NodeRuntime,
+ env: Option>,
this: WeakEntity,
mut cx: AsyncApp,
) {
@@ -432,8 +458,7 @@ impl Copilot {
let binary = LanguageServerBinary {
path: node_path,
arguments,
- // TODO: We could set HTTP_PROXY etc here and fix the copilot issue.
- env: None,
+ env,
};
let root_path = if cfg!(target_os = "windows") {
@@ -611,6 +636,8 @@ impl Copilot {
}
pub fn reinstall(&mut self, cx: &mut Context) -> Task<()> {
+ let language_settings = all_language_settings(None, cx);
+ let env = self.build_env(&language_settings.edit_predictions.copilot);
let start_task = cx
.spawn({
let http = self.http.clone();
@@ -618,7 +645,7 @@ impl Copilot {
let server_id = self.server_id;
move |this, cx| async move {
clear_copilot_dir().await;
- Self::start_language_server(server_id, http, node_runtime, this, cx).await
+ Self::start_language_server(server_id, http, node_runtime, env, this, cx).await
}
})
.shared();
@@ -1279,3 +1306,11 @@ mod tests {
}
}
}
+
+#[cfg(test)]
+#[ctor::ctor]
+fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::init();
+ }
+}
diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs
index 70acf082bf81ab..b456052338db87 100644
--- a/crates/editor/src/editor.rs
+++ b/crates/editor/src/editor.rs
@@ -63,7 +63,7 @@ pub use editor_settings::{
CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar,
};
pub use editor_settings_controls::*;
-use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap};
+use element::{layout_line, AcceptEditPredictionBinding, LineWithInvisibles, PositionMap};
pub use element::{
CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition,
};
@@ -82,9 +82,9 @@ use git::blame::GitBlame;
use gpui::{
div, impl_actions, point, prelude::*, pulsating_between, px, relative, size, Action, Animation,
AnimationExt, AnyElement, App, AsyncWindowContext, AvailableSpace, Background, Bounds,
- ClickEvent, ClipboardEntry, ClipboardItem, Context, DispatchPhase, Entity, EntityInputHandler,
- EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight, Global,
- HighlightStyle, Hsla, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad,
+ ClickEvent, ClipboardEntry, ClipboardItem, Context, DispatchPhase, Edges, Entity,
+ EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight,
+ Global, HighlightStyle, Hsla, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad,
ParentElement, Pixels, Render, SharedString, Size, Styled, StyledText, Subscription, Task,
TextStyle, TextStyleRefinement, UTF16Selection, UnderlineStyle, UniformListScrollHandle,
WeakEntity, WeakFocusHandle, Window,
@@ -119,6 +119,7 @@ use project::{
pub use proposed_changes_editor::{
ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar,
};
+use smallvec::smallvec;
use std::iter::Peekable;
use task::{ResolvedTask, TaskTemplate, TaskVariables};
@@ -534,7 +535,7 @@ impl ScrollbarMarkerState {
#[derive(Clone, Debug)]
struct RunnableTasks {
templates: Vec<(TaskSourceKind, TaskTemplate)>,
- offset: MultiBufferOffset,
+ offset: multi_buffer::Anchor,
// We need the column at which the task context evaluation should take place (when we're spawning it via gutter).
column: u32,
// Values of all named captures, including those starting with '_'
@@ -562,8 +563,6 @@ struct ResolvedTasks {
position: Anchor,
}
-#[derive(Copy, Clone, Debug)]
-struct MultiBufferOffset(usize);
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
struct BufferOffset(usize);
@@ -688,8 +687,8 @@ pub struct Editor {
show_inline_completions_override: Option,
menu_inline_completions_policy: MenuInlineCompletionsPolicy,
edit_prediction_preview: EditPredictionPreview,
- edit_prediction_cursor_on_leading_whitespace: bool,
- edit_prediction_requires_modifier_in_leading_space: bool,
+ edit_prediction_indent_conflict: bool,
+ edit_prediction_requires_modifier_in_indent_conflict: bool,
inlay_hint_cache: InlayHintCache,
next_inlay_id: usize,
_subscriptions: Vec,
@@ -707,7 +706,6 @@ pub struct Editor {
show_git_blame_inline: bool,
show_git_blame_inline_delay_task: Option>,
git_blame_inline_tooltip: Option>,
- distinguish_unstaged_diff_hunks: bool,
git_blame_inline_enabled: bool,
serialize_dirty_buffers: bool,
show_selection_menu: Option,
@@ -1432,12 +1430,11 @@ impl Editor {
show_inline_completions_override: None,
menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider,
edit_prediction_settings: EditPredictionSettings::Disabled,
- edit_prediction_cursor_on_leading_whitespace: false,
- edit_prediction_requires_modifier_in_leading_space: true,
+ edit_prediction_indent_conflict: false,
+ edit_prediction_requires_modifier_in_indent_conflict: true,
custom_context_menu: None,
show_git_blame_gutter: false,
show_git_blame_inline: false,
- distinguish_unstaged_diff_hunks: false,
show_selection_menu: None,
show_git_blame_inline_delay_task: None,
git_blame_inline_tooltip: None,
@@ -1613,7 +1610,7 @@ impl Editor {
|| self.edit_prediction_requires_modifier()
// Require modifier key when the cursor is on leading whitespace, to allow `tab`
// bindings to insert tab characters.
- || (self.edit_prediction_requires_modifier_in_leading_space && self.edit_prediction_cursor_on_leading_whitespace)
+ || (self.edit_prediction_requires_modifier_in_indent_conflict && self.edit_prediction_indent_conflict)
}
pub fn accept_edit_prediction_keybind(
@@ -1861,6 +1858,7 @@ impl Editor {
}),
provider: Arc::new(provider),
});
+ self.update_edit_prediction_settings(cx);
self.refresh_inline_completion(false, false, window, cx);
}
@@ -1980,7 +1978,7 @@ impl Editor {
self.auto_replace_emoji_shortcode = auto_replace;
}
- pub fn toggle_inline_completions(
+ pub fn toggle_edit_predictions(
&mut self,
_: &ToggleEditPrediction,
window: &mut Window,
@@ -2001,6 +1999,7 @@ impl Editor {
cx: &mut Context,
) {
self.show_inline_completions_override = show_edit_predictions;
+ self.update_edit_prediction_settings(cx);
if let Some(false) = show_edit_predictions {
self.discard_inline_completion(false, cx);
@@ -2185,7 +2184,7 @@ impl Editor {
self.refresh_selected_text_highlights(window, cx);
refresh_matching_bracket_highlights(self, window, cx);
self.update_visible_inline_completion(window, cx);
- self.edit_prediction_requires_modifier_in_leading_space = true;
+ self.edit_prediction_requires_modifier_in_indent_conflict = true;
linked_editing_ranges::refresh_linked_ranges(self, window, cx);
if self.git_blame_inline_enabled {
self.start_inline_blame_timer(window, cx);
@@ -4859,7 +4858,7 @@ impl Editor {
let (buffer, cursor_buffer_position) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
- if !self.inline_completions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) {
+ if !self.edit_predictions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) {
self.discard_inline_completion(false, cx);
return None;
}
@@ -4908,6 +4907,22 @@ impl Editor {
}
}
+ pub fn update_edit_prediction_settings(&mut self, cx: &mut Context) {
+ if self.edit_prediction_provider.is_none() {
+ self.edit_prediction_settings = EditPredictionSettings::Disabled;
+ } else {
+ let selection = self.selections.newest_anchor();
+ let cursor = selection.head();
+
+ if let Some((buffer, cursor_buffer_position)) =
+ self.buffer.read(cx).text_anchor_for_position(cursor, cx)
+ {
+ self.edit_prediction_settings =
+ self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx);
+ }
+ }
+ }
+
fn edit_prediction_settings_at_position(
&self,
buffer: &Entity,
@@ -4962,18 +4977,18 @@ impl Editor {
)
}
- pub fn inline_completions_enabled(&self, cx: &App) -> bool {
+ pub fn edit_predictions_enabled_at_cursor(&self, cx: &App) -> bool {
let cursor = self.selections.newest_anchor().head();
if let Some((buffer, cursor_position)) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)
{
- self.inline_completions_enabled_in_buffer(&buffer, cursor_position, cx)
+ self.edit_predictions_enabled_in_buffer(&buffer, cursor_position, cx)
} else {
false
}
}
- fn inline_completions_enabled_in_buffer(
+ fn edit_predictions_enabled_in_buffer(
&self,
buffer: &Entity,
buffer_position: language::Anchor,
@@ -5171,7 +5186,7 @@ impl Editor {
}
}
- self.edit_prediction_requires_modifier_in_leading_space = false;
+ self.edit_prediction_requires_modifier_in_indent_conflict = false;
}
pub fn accept_partial_inline_completion(
@@ -5469,8 +5484,19 @@ impl Editor {
self.edit_prediction_settings =
self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx);
- self.edit_prediction_cursor_on_leading_whitespace =
- multibuffer.is_line_whitespace_upto(cursor);
+ self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor);
+
+ if self.edit_prediction_indent_conflict {
+ let cursor_point = cursor.to_point(&multibuffer);
+
+ let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx);
+
+ if let Some((_, indent)) = indents.iter().next() {
+ if indent.len == cursor_point.column {
+ self.edit_prediction_indent_conflict = false;
+ }
+ }
+ }
let inline_completion = provider.suggest(&buffer, cursor_buffer_position, cx)?;
let edits = inline_completion
@@ -6052,6 +6078,524 @@ impl Editor {
.map(|menu| menu.origin())
}
+ const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = Pixels(24.);
+ const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = Pixels(2.);
+
+ #[allow(clippy::too_many_arguments)]
+ fn render_edit_prediction_popover(
+ &mut self,
+ text_bounds: &Bounds,
+ content_origin: gpui::Point,
+ editor_snapshot: &EditorSnapshot,
+ visible_row_range: Range,
+ scroll_top: f32,
+ scroll_bottom: f32,
+ line_layouts: &[LineWithInvisibles],
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point,
+ newest_selection_head: Option,
+ editor_width: Pixels,
+ style: &EditorStyle,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<(AnyElement, gpui::Point)> {
+ let active_inline_completion = self.active_inline_completion.as_ref()?;
+
+ if self.edit_prediction_visible_in_cursor_popover(true) {
+ return None;
+ }
+
+ match &active_inline_completion.completion {
+ InlineCompletion::Move { target, .. } => {
+ let target_display_point = target.to_display_point(editor_snapshot);
+
+ if self.edit_prediction_requires_modifier() {
+ if !self.edit_prediction_preview_is_active() {
+ return None;
+ }
+
+ self.render_edit_prediction_modifier_jump_popover(
+ text_bounds,
+ content_origin,
+ visible_row_range,
+ line_layouts,
+ line_height,
+ scroll_pixel_position,
+ newest_selection_head,
+ target_display_point,
+ window,
+ cx,
+ )
+ } else {
+ self.render_edit_prediction_eager_jump_popover(
+ text_bounds,
+ content_origin,
+ editor_snapshot,
+ visible_row_range,
+ scroll_top,
+ scroll_bottom,
+ line_height,
+ scroll_pixel_position,
+ target_display_point,
+ editor_width,
+ window,
+ cx,
+ )
+ }
+ }
+ InlineCompletion::Edit {
+ display_mode: EditDisplayMode::Inline,
+ ..
+ } => None,
+ InlineCompletion::Edit {
+ display_mode: EditDisplayMode::TabAccept,
+ edits,
+ ..
+ } => {
+ let range = &edits.first()?.0;
+ let target_display_point = range.end.to_display_point(editor_snapshot);
+
+ self.render_edit_prediction_end_of_line_popover(
+ "Accept",
+ editor_snapshot,
+ visible_row_range,
+ target_display_point,
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ editor_width,
+ window,
+ cx,
+ )
+ }
+ InlineCompletion::Edit {
+ edits,
+ edit_preview,
+ display_mode: EditDisplayMode::DiffPopover,
+ snapshot,
+ } => self.render_edit_prediction_diff_popover(
+ text_bounds,
+ content_origin,
+ editor_snapshot,
+ visible_row_range,
+ line_layouts,
+ line_height,
+ scroll_pixel_position,
+ newest_selection_head,
+ editor_width,
+ style,
+ edits,
+ edit_preview,
+ snapshot,
+ window,
+ cx,
+ ),
+ }
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn render_edit_prediction_modifier_jump_popover(
+ &mut self,
+ text_bounds: &Bounds,
+ content_origin: gpui::Point,
+ visible_row_range: Range,
+ line_layouts: &[LineWithInvisibles],
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point,
+ newest_selection_head: Option,
+ target_display_point: DisplayPoint,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<(AnyElement, gpui::Point)> {
+ let scrolled_content_origin =
+ content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0));
+
+ const SCROLL_PADDING_Y: Pixels = px(12.);
+
+ if target_display_point.row() < visible_row_range.start {
+ return self.render_edit_prediction_scroll_popover(
+ |_| SCROLL_PADDING_Y,
+ IconName::ArrowUp,
+ visible_row_range,
+ line_layouts,
+ newest_selection_head,
+ scrolled_content_origin,
+ window,
+ cx,
+ );
+ } else if target_display_point.row() >= visible_row_range.end {
+ return self.render_edit_prediction_scroll_popover(
+ |size| text_bounds.size.height - size.height - SCROLL_PADDING_Y,
+ IconName::ArrowDown,
+ visible_row_range,
+ line_layouts,
+ newest_selection_head,
+ scrolled_content_origin,
+ window,
+ cx,
+ );
+ }
+
+ const POLE_WIDTH: Pixels = px(2.);
+
+ let mut element = v_flex()
+ .items_end()
+ .child(
+ self.render_edit_prediction_line_popover("Jump", None, window, cx)?
+ .rounded_br(px(0.))
+ .rounded_tr(px(0.))
+ .border_r_2(),
+ )
+ .child(
+ div()
+ .w(POLE_WIDTH)
+ .bg(Editor::edit_prediction_callout_popover_border_color(cx))
+ .h(line_height),
+ )
+ .into_any();
+
+ let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+
+ let line_layout =
+ line_layouts.get(target_display_point.row().minus(visible_row_range.start) as usize)?;
+ let target_column = target_display_point.column() as usize;
+
+ let target_x = line_layout.x_for_index(target_column);
+ let target_y =
+ (target_display_point.row().as_f32() * line_height) - scroll_pixel_position.y;
+
+ let mut origin = scrolled_content_origin + point(target_x, target_y)
+ - point(size.width - POLE_WIDTH, size.height - line_height);
+
+ origin.x = origin.x.max(content_origin.x);
+
+ element.prepaint_at(origin, window, cx);
+
+ Some((element, origin))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn render_edit_prediction_scroll_popover(
+ &mut self,
+ to_y: impl Fn(Size) -> Pixels,
+ scroll_icon: IconName,
+ visible_row_range: Range,
+ line_layouts: &[LineWithInvisibles],
+ newest_selection_head: Option,
+ scrolled_content_origin: gpui::Point,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<(AnyElement, gpui::Point)> {
+ let mut element = self
+ .render_edit_prediction_line_popover("Scroll", Some(scroll_icon), window, cx)?
+ .into_any();
+
+ let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+
+ let cursor = newest_selection_head?;
+ let cursor_row_layout =
+ line_layouts.get(cursor.row().minus(visible_row_range.start) as usize)?;
+ let cursor_column = cursor.column() as usize;
+
+ let cursor_character_x = cursor_row_layout.x_for_index(cursor_column);
+
+ let origin = scrolled_content_origin + point(cursor_character_x, to_y(size));
+
+ element.prepaint_at(origin, window, cx);
+ Some((element, origin))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn render_edit_prediction_eager_jump_popover(
+ &mut self,
+ text_bounds: &Bounds,
+ content_origin: gpui::Point,
+ editor_snapshot: &EditorSnapshot,
+ visible_row_range: Range,
+ scroll_top: f32,
+ scroll_bottom: f32,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point,
+ target_display_point: DisplayPoint,
+ editor_width: Pixels,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<(AnyElement, gpui::Point)> {
+ if target_display_point.row().as_f32() < scroll_top {
+ let mut element = self
+ .render_edit_prediction_line_popover(
+ "Jump to Edit",
+ Some(IconName::ArrowUp),
+ window,
+ cx,
+ )?
+ .into_any();
+
+ let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+ let offset = point(
+ (text_bounds.size.width - size.width) / 2.,
+ Self::EDIT_PREDICTION_POPOVER_PADDING_Y,
+ );
+
+ let origin = text_bounds.origin + offset;
+ element.prepaint_at(origin, window, cx);
+ Some((element, origin))
+ } else if (target_display_point.row().as_f32() + 1.) > scroll_bottom {
+ let mut element = self
+ .render_edit_prediction_line_popover(
+ "Jump to Edit",
+ Some(IconName::ArrowDown),
+ window,
+ cx,
+ )?
+ .into_any();
+
+ let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+ let offset = point(
+ (text_bounds.size.width - size.width) / 2.,
+ text_bounds.size.height - size.height - Self::EDIT_PREDICTION_POPOVER_PADDING_Y,
+ );
+
+ let origin = text_bounds.origin + offset;
+ element.prepaint_at(origin, window, cx);
+ Some((element, origin))
+ } else {
+ self.render_edit_prediction_end_of_line_popover(
+ "Jump to Edit",
+ editor_snapshot,
+ visible_row_range,
+ target_display_point,
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ editor_width,
+ window,
+ cx,
+ )
+ }
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn render_edit_prediction_end_of_line_popover(
+ self: &mut Editor,
+ label: &'static str,
+ editor_snapshot: &EditorSnapshot,
+ visible_row_range: Range,
+ target_display_point: DisplayPoint,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point,
+ content_origin: gpui::Point,
+ editor_width: Pixels,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<(AnyElement, gpui::Point)> {
+ let target_line_end = DisplayPoint::new(
+ target_display_point.row(),
+ editor_snapshot.line_len(target_display_point.row()),
+ );
+
+ let mut element = self
+ .render_edit_prediction_line_popover(label, None, window, cx)?
+ .into_any();
+
+ let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+
+ let line_origin = self.display_to_pixel_point(target_line_end, editor_snapshot, window)?;
+
+ let start_point = content_origin - point(scroll_pixel_position.x, Pixels::ZERO);
+ let mut origin = start_point
+ + line_origin
+ + point(Self::EDIT_PREDICTION_POPOVER_PADDING_X, Pixels::ZERO);
+ origin.x = origin.x.max(content_origin.x);
+
+ let max_x = content_origin.x + editor_width - size.width;
+
+ if origin.x > max_x {
+ let offset = line_height + Self::EDIT_PREDICTION_POPOVER_PADDING_Y;
+
+ let icon = if visible_row_range.contains(&(target_display_point.row() + 2)) {
+ origin.y += offset;
+ IconName::ArrowUp
+ } else {
+ origin.y -= offset;
+ IconName::ArrowDown
+ };
+
+ element = self
+ .render_edit_prediction_line_popover(label, Some(icon), window, cx)?
+ .into_any();
+
+ let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+
+ origin.x = content_origin.x + editor_width - size.width - px(2.);
+ }
+
+ element.prepaint_at(origin, window, cx);
+ Some((element, origin))
+ }
+
+ #[allow(clippy::too_many_arguments)]
+ fn render_edit_prediction_diff_popover(
+ self: &Editor,
+ text_bounds: &Bounds,
+ content_origin: gpui::Point,
+ editor_snapshot: &EditorSnapshot,
+ visible_row_range: Range,
+ line_layouts: &[LineWithInvisibles],
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point,
+ newest_selection_head: Option,
+ editor_width: Pixels,
+ style: &EditorStyle,
+ edits: &Vec<(Range, String)>,
+ edit_preview: &Option,
+ snapshot: &language::BufferSnapshot,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<(AnyElement, gpui::Point)> {
+ let edit_start = edits
+ .first()
+ .unwrap()
+ .0
+ .start
+ .to_display_point(editor_snapshot);
+ let edit_end = edits
+ .last()
+ .unwrap()
+ .0
+ .end
+ .to_display_point(editor_snapshot);
+
+ let is_visible = visible_row_range.contains(&edit_start.row())
+ || visible_row_range.contains(&edit_end.row());
+ if !is_visible {
+ return None;
+ }
+
+ let highlighted_edits =
+ crate::inline_completion_edit_text(&snapshot, edits, edit_preview.as_ref()?, false, cx);
+
+ let styled_text = highlighted_edits.to_styled_text(&style.text);
+ let line_count = highlighted_edits.text.lines().count();
+
+ const BORDER_WIDTH: Pixels = px(1.);
+
+ let mut element = h_flex()
+ .items_start()
+ .child(
+ h_flex()
+ .bg(cx.theme().colors().editor_background)
+ .border(BORDER_WIDTH)
+ .shadow_sm()
+ .border_color(cx.theme().colors().border)
+ .rounded_l_lg()
+ .when(line_count > 1, |el| el.rounded_br_lg())
+ .pr_1()
+ .child(styled_text),
+ )
+ .child(
+ h_flex()
+ .h(line_height + BORDER_WIDTH * px(2.))
+ .px_1p5()
+ .gap_1()
+ // Workaround: For some reason, there's a gap if we don't do this
+ .ml(-BORDER_WIDTH)
+ .shadow(smallvec![gpui::BoxShadow {
+ color: gpui::black().opacity(0.05),
+ offset: point(px(1.), px(1.)),
+ blur_radius: px(2.),
+ spread_radius: px(0.),
+ }])
+ .bg(Editor::edit_prediction_line_popover_bg_color(cx))
+ .border(BORDER_WIDTH)
+ .border_color(cx.theme().colors().border)
+ .rounded_r_lg()
+ .children(self.render_edit_prediction_accept_keybind(window, cx)),
+ )
+ .into_any();
+
+ let longest_row =
+ editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1);
+ let longest_line_width = if visible_row_range.contains(&longest_row) {
+ line_layouts[(longest_row.0 - visible_row_range.start.0) as usize].width
+ } else {
+ layout_line(
+ longest_row,
+ editor_snapshot,
+ style,
+ editor_width,
+ |_| false,
+ window,
+ cx,
+ )
+ .width
+ };
+
+ let viewport_bounds =
+ Bounds::new(Default::default(), window.viewport_size()).extend(Edges {
+ right: -EditorElement::SCROLLBAR_WIDTH,
+ ..Default::default()
+ });
+
+ let x_after_longest =
+ text_bounds.origin.x + longest_line_width + Self::EDIT_PREDICTION_POPOVER_PADDING_X
+ - scroll_pixel_position.x;
+
+ let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx);
+
+ // Fully visible if it can be displayed within the window (allow overlapping other
+ // panes). However, this is only allowed if the popover starts within text_bounds.
+ let can_position_to_the_right = x_after_longest < text_bounds.right()
+ && x_after_longest + element_bounds.width < viewport_bounds.right();
+
+ let mut origin = if can_position_to_the_right {
+ point(
+ x_after_longest,
+ text_bounds.origin.y + edit_start.row().as_f32() * line_height
+ - scroll_pixel_position.y,
+ )
+ } else {
+ let cursor_row = newest_selection_head.map(|head| head.row());
+ let above_edit = edit_start
+ .row()
+ .0
+ .checked_sub(line_count as u32)
+ .map(DisplayRow);
+ let below_edit = Some(edit_end.row() + 1);
+ let above_cursor =
+ cursor_row.and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow));
+ let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1);
+
+ // Place the edit popover adjacent to the edit if there is a location
+ // available that is onscreen and does not obscure the cursor. Otherwise,
+ // place it adjacent to the cursor.
+ let row_target = [above_edit, below_edit, above_cursor, below_cursor]
+ .into_iter()
+ .flatten()
+ .find(|&start_row| {
+ let end_row = start_row + line_count as u32;
+ visible_row_range.contains(&start_row)
+ && visible_row_range.contains(&end_row)
+ && cursor_row.map_or(true, |cursor_row| {
+ !((start_row..end_row).contains(&cursor_row))
+ })
+ })?;
+
+ content_origin
+ + point(
+ -scroll_pixel_position.x,
+ row_target.as_f32() * line_height - scroll_pixel_position.y,
+ )
+ };
+
+ origin.x -= BORDER_WIDTH;
+
+ window.defer_draw(element, origin, 1);
+
+ // Do not return an element, since it will already be drawn due to defer_draw.
+ None
+ }
+
fn edit_prediction_cursor_popover_height(&self) -> Pixels {
px(30.)
}
@@ -10821,7 +11365,9 @@ impl Editor {
(runnable.buffer_id, row),
RunnableTasks {
templates: tasks,
- offset: MultiBufferOffset(runnable.run_range.start),
+ offset: snapshot
+ .buffer_snapshot
+ .anchor_before(runnable.run_range.start),
context_range,
column: point.column,
extra_variables: runnable.extra_captures,
@@ -11599,7 +12145,9 @@ impl Editor {
let range = editor.range_for_match(&range);
let range = collapse_multiline_range(range);
- if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() {
+ if !split
+ && Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref()
+ {
editor.go_to_singleton_buffer_range(range.clone(), window, cx);
} else {
window.defer(cx, move |window, cx| {
@@ -13263,10 +13811,6 @@ impl Editor {
});
}
- pub fn set_distinguish_unstaged_diff_hunks(&mut self) {
- self.distinguish_unstaged_diff_hunks = true;
- }
-
pub fn expand_all_diff_hunks(
&mut self,
_: &ExpandAllDiffHunks,
@@ -13312,7 +13856,7 @@ impl Editor {
snapshot: &MultiBufferSnapshot,
) -> bool {
let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot);
- hunks.any(|hunk| hunk.secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk)
+ hunks.any(|hunk| hunk.secondary_status != DiffHunkSecondaryStatus::None)
}
pub fn toggle_staged_selected_diff_hunks(
@@ -13457,12 +14001,8 @@ impl Editor {
log::debug!("no diff for buffer id");
return;
};
- let Some(secondary_diff) = diff.secondary_diff() else {
- log::debug!("no secondary diff for buffer id");
- return;
- };
- let edits = diff.secondary_edits_for_stage_or_unstage(
+ let Some(new_index_text) = diff.new_secondary_text_for_stage_or_unstage(
stage,
hunks.filter_map(|hunk| {
if stage && hunk.secondary_status == DiffHunkSecondaryStatus::None {
@@ -13472,29 +14012,14 @@ impl Editor {
{
return None;
}
- Some((
- hunk.diff_base_byte_range.clone(),
- hunk.secondary_diff_base_byte_range.clone(),
- hunk.buffer_range.clone(),
- ))
+ Some((hunk.buffer_range.clone(), hunk.diff_base_byte_range.clone()))
}),
&buffer_snapshot,
- );
-
- let Some(index_base) = secondary_diff
- .base_text()
- .map(|snapshot| snapshot.text.as_rope().clone())
- else {
- log::debug!("no index base");
+ cx,
+ ) else {
+ log::debug!("missing secondary diff or index text");
return;
};
- let index_buffer = cx.new(|cx| {
- Buffer::local_normalized(index_base.clone(), text::LineEnding::default(), cx)
- });
- let new_index_text = index_buffer.update(cx, |index_buffer, cx| {
- index_buffer.edit(edits, None, cx);
- index_buffer.snapshot().as_rope().to_string()
- });
let new_index_text = if new_index_text.is_empty()
&& !stage
&& (diff.is_single_insertion
@@ -13512,7 +14037,12 @@ impl Editor {
.update(cx, |buffer_store, cx| buffer_store.save_buffer(buffer, cx))
.detach_and_log_err(cx);
- let _ = repo.read(cx).set_index_text(&path, new_index_text);
+ cx.background_spawn(
+ repo.read(cx)
+ .set_index_text(&path, new_index_text.map(|rope| rope.to_string()))
+ .log_err(),
+ )
+ .detach();
}
pub fn expand_selected_diff_hunks(&mut self, cx: &mut Context) {
@@ -15198,6 +15728,7 @@ impl Editor {
fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) {
self.tasks_update_task = Some(self.refresh_runnables(window, cx));
+ self.update_edit_prediction_settings(cx);
self.refresh_inline_completion(true, false, window, cx);
self.refresh_inlay_hints(
InlayHintRefreshReason::SettingsChange(inlay_hint_settings(
@@ -15386,27 +15917,39 @@ impl Editor {
let selections = self.selections.all::(cx);
let multi_buffer = self.buffer.read(cx);
for selection in selections {
- for (buffer, mut range, _) in multi_buffer
+ for (snapshot, range, _, anchor) in multi_buffer
.snapshot(cx)
- .range_to_buffer_ranges(selection.range())
+ .range_to_buffer_ranges_with_deleted_hunks(selection.range())
{
- // When editing branch buffers, jump to the corresponding location
- // in their base buffer.
- let mut buffer_handle = multi_buffer.buffer(buffer.remote_id()).unwrap();
- let buffer = buffer_handle.read(cx);
- if let Some(base_buffer) = buffer.base_buffer() {
- range = buffer.range_to_version(range, &base_buffer.read(cx).version());
- buffer_handle = base_buffer;
- }
-
- if selection.reversed {
- mem::swap(&mut range.start, &mut range.end);
+ if let Some(anchor) = anchor {
+ // selection is in a deleted hunk
+ let Some(buffer_id) = anchor.buffer_id else {
+ continue;
+ };
+ let Some(buffer_handle) = multi_buffer.buffer(buffer_id) else {
+ continue;
+ };
+ let offset = text::ToOffset::to_offset(
+ &anchor.text_anchor,
+ &buffer_handle.read(cx).snapshot(),
+ );
+ let range = offset..offset;
+ new_selections_by_buffer
+ .entry(buffer_handle)
+ .or_insert((Vec::new(), None))
+ .0
+ .push(range)
+ } else {
+ let Some(buffer_handle) = multi_buffer.buffer(snapshot.remote_id())
+ else {
+ continue;
+ };
+ new_selections_by_buffer
+ .entry(buffer_handle)
+ .or_insert((Vec::new(), None))
+ .0
+ .push(range)
}
- new_selections_by_buffer
- .entry(buffer_handle)
- .or_insert((Vec::new(), None))
- .0
- .push(range)
}
}
}
diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs
index da41b1db2a0236..95e272c76edf3a 100644
--- a/crates/editor/src/editor_tests.rs
+++ b/crates/editor/src/editor_tests.rs
@@ -7,7 +7,7 @@ use crate::{
},
JoinLines,
};
-use buffer_diff::{BufferDiff, DiffHunkStatus};
+use buffer_diff::{BufferDiff, DiffHunkStatus, DiffHunkStatusKind};
use futures::StreamExt;
use gpui::{
div, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext,
@@ -3392,7 +3392,7 @@ async fn test_join_lines_with_git_diff_base(executor: BackgroundExecutor, cx: &m
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
// Join lines
@@ -3432,7 +3432,7 @@ async fn test_custom_newlines_cause_no_false_positive_diffs(
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.set_state("Line 0\r\nLine 1\rˇ\nLine 2\r\nLine 3");
- cx.set_diff_base("Line 0\r\nLine 1\r\nLine 2\r\nLine 3");
+ cx.set_head_text("Line 0\r\nLine 1\r\nLine 2\r\nLine 3");
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -5814,7 +5814,7 @@ async fn test_fold_function_bodies(cx: &mut TestAppContext) {
let mut cx = EditorLspTestContext::new_rust(Default::default(), cx).await;
cx.set_state(&text);
- cx.set_diff_base(&base_text);
+ cx.set_head_text(&base_text);
cx.update_editor(|editor, window, cx| {
editor.expand_all_diff_hunks(&Default::default(), window, cx);
});
@@ -11042,7 +11042,7 @@ async fn test_go_to_hunk(executor: BackgroundExecutor, cx: &mut TestAppContext)
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -12535,7 +12535,7 @@ async fn test_deleting_over_diff_hunk(cx: &mut TestAppContext) {
three
"#};
- cx.set_diff_base(base_text);
+ cx.set_head_text(base_text);
cx.set_state("\nˇ\n");
cx.executor().run_until_parked();
cx.update_editor(|editor, _window, cx| {
@@ -13172,7 +13172,7 @@ async fn test_toggle_selected_diff_hunks(executor: BackgroundExecutor, cx: &mut
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -13306,7 +13306,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -13334,7 +13334,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
.unindent(),
);
- cx.set_diff_base("new diff base!");
+ cx.set_head_text("new diff base!");
executor.run_until_parked();
cx.assert_state_with_diff(
r#"
@@ -13634,7 +13634,7 @@ async fn test_edits_around_expanded_insertion_hunks(
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -13782,7 +13782,7 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
- cx.set_diff_base(indoc! { "
+ cx.set_head_text(indoc! { "
one
two
three
@@ -13905,7 +13905,7 @@ async fn test_edits_around_expanded_deletion_hunks(
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -14028,7 +14028,7 @@ async fn test_backspace_after_deletion_hunk(executor: BackgroundExecutor, cx: &m
.unindent(),
);
- cx.set_diff_base(&base_text);
+ cx.set_head_text(&base_text);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -14110,7 +14110,7 @@ async fn test_edit_after_expanded_modification_hunk(
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, window, cx| {
editor.expand_all_diff_hunks(&ExpandAllDiffHunks, window, cx);
@@ -14845,7 +14845,7 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp
"#
.unindent(),
);
- cx.set_diff_base(&diff_base);
+ cx.set_head_text(&diff_base);
cx.update_editor(|editor, window, cx| {
editor.expand_all_diff_hunks(&ExpandAllDiffHunks, window, cx);
});
@@ -14982,6 +14982,80 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp
);
}
+#[gpui::test]
+async fn test_partially_staged_hunk(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ cx.set_head_text(indoc! { "
+ one
+ two
+ three
+ four
+ five
+ "
+ });
+ cx.set_index_text(indoc! { "
+ one
+ two
+ three
+ four
+ five
+ "
+ });
+ cx.set_state(indoc! {"
+ one
+ TWO
+ ˇTHREE
+ FOUR
+ five
+ "});
+ cx.run_until_parked();
+ cx.update_editor(|editor, window, cx| {
+ editor.toggle_staged_selected_diff_hunks(&Default::default(), window, cx);
+ });
+ cx.run_until_parked();
+ cx.assert_index_text(Some(indoc! {"
+ one
+ TWO
+ THREE
+ FOUR
+ five
+ "}));
+ cx.set_state(indoc! { "
+ one
+ TWO
+ ˇTHREE-HUNDRED
+ FOUR
+ five
+ "});
+ cx.run_until_parked();
+ cx.update_editor(|editor, window, cx| {
+ let snapshot = editor.snapshot(window, cx);
+ let hunks = editor
+ .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot)
+ .collect::>();
+ assert_eq!(hunks.len(), 1);
+ assert_eq!(
+ hunks[0].status(),
+ DiffHunkStatus {
+ kind: DiffHunkStatusKind::Modified,
+ secondary: DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk
+ }
+ );
+
+ editor.toggle_staged_selected_diff_hunks(&Default::default(), window, cx);
+ });
+ cx.run_until_parked();
+ cx.assert_index_text(Some(indoc! {"
+ one
+ TWO
+ THREE-HUNDRED
+ FOUR
+ five
+ "}));
+}
+
#[gpui::test]
fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -15341,11 +15415,12 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) {
});
editor.update_in(cx, |editor, window, cx| {
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
editor.tasks.insert(
(buffer.read(cx).remote_id(), 3),
RunnableTasks {
templates: vec![],
- offset: MultiBufferOffset(43),
+ offset: snapshot.anchor_before(43),
column: 0,
extra_variables: HashMap::default(),
context_range: BufferOffset(43)..BufferOffset(85),
@@ -15355,7 +15430,7 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) {
(buffer.read(cx).remote_id(), 8),
RunnableTasks {
templates: vec![],
- offset: MultiBufferOffset(86),
+ offset: snapshot.anchor_before(86),
column: 0,
extra_variables: HashMap::default(),
context_range: BufferOffset(86)..BufferOffset(191),
@@ -16686,7 +16761,7 @@ fn assert_hunk_revert(
cx: &mut EditorLspTestContext,
) {
cx.set_state(not_reverted_text_with_selections);
- cx.set_diff_base(base_text);
+ cx.set_head_text(base_text);
cx.executor().run_until_parked();
let actual_hunk_statuses_before = cx.update_editor(|editor, window, cx| {
diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs
index b04114c44c8ff3..1a46252d70680f 100644
--- a/crates/editor/src/element.rs
+++ b/crates/editor/src/element.rs
@@ -50,7 +50,7 @@ use language::{
use lsp::DiagnosticSeverity;
use multi_buffer::{
Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow,
- RowInfo, ToOffset,
+ RowInfo,
};
use project::{
debugger::breakpoint_store::{Breakpoint, BreakpointKind},
@@ -410,7 +410,7 @@ impl EditorElement {
register_action(editor, window, Editor::toggle_relative_line_numbers);
register_action(editor, window, Editor::toggle_indent_guides);
register_action(editor, window, Editor::toggle_inlay_hints);
- register_action(editor, window, Editor::toggle_inline_completions);
+ register_action(editor, window, Editor::toggle_edit_predictions);
register_action(editor, window, Editor::toggle_inline_diagnostics);
register_action(editor, window, hover_popover::hover);
register_action(editor, window, Editor::reveal_in_finder);
@@ -2141,21 +2141,22 @@ impl EditorElement {
None
};
- let offset_range_start = snapshot
- .display_point_to_anchor(DisplayPoint::new(range.start, 0), Bias::Left)
- .to_offset(&snapshot.buffer_snapshot);
- let offset_range_end = snapshot
- .display_point_to_anchor(DisplayPoint::new(range.end, 0), Bias::Right)
- .to_offset(&snapshot.buffer_snapshot);
+ let offset_range_start =
+ snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left);
+
+ let offset_range_end =
+ snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right);
editor
.tasks
.iter()
.filter_map(|(_, tasks)| {
- if tasks.offset.0 < offset_range_start || tasks.offset.0 >= offset_range_end {
+ let multibuffer_point = tasks.offset.to_point(&snapshot.buffer_snapshot);
+ if multibuffer_point < offset_range_start
+ || multibuffer_point > offset_range_end
+ {
return None;
}
- let multibuffer_point = tasks.offset.0.to_point(&snapshot.buffer_snapshot);
let multibuffer_row = MultiBufferRow(multibuffer_point.row);
let buffer_folded = snapshot
.buffer_snapshot
@@ -3805,391 +3806,6 @@ impl EditorElement {
}
}
- #[allow(clippy::too_many_arguments)]
- fn layout_edit_prediction_popover(
- &self,
- text_bounds: &Bounds,
- content_origin: gpui::Point,
- editor_snapshot: &EditorSnapshot,
- visible_row_range: Range,
- scroll_top: f32,
- scroll_bottom: f32,
- line_layouts: &[LineWithInvisibles],
- line_height: Pixels,
- scroll_pixel_position: gpui::Point,
- newest_selection_head: Option,
- editor_width: Pixels,
- style: &EditorStyle,
- window: &mut Window,
- cx: &mut App,
- ) -> Option<(AnyElement, gpui::Point)> {
- const PADDING_X: Pixels = Pixels(24.);
- const PADDING_Y: Pixels = Pixels(2.);
-
- let editor = self.editor.read(cx);
- let active_inline_completion = editor.active_inline_completion.as_ref()?;
-
- if editor.edit_prediction_visible_in_cursor_popover(true) {
- return None;
- }
-
- // Adjust text origin for horizontal scrolling (in some cases here)
- let start_point = content_origin - gpui::Point::new(scroll_pixel_position.x, Pixels(0.0));
-
- // Clamp left offset after extreme scrollings
- let clamp_start = |point: gpui::Point| gpui::Point {
- x: point.x.max(content_origin.x),
- y: point.y,
- };
-
- match &active_inline_completion.completion {
- InlineCompletion::Move { target, .. } => {
- let target_display_point = target.to_display_point(editor_snapshot);
-
- if editor.edit_prediction_requires_modifier() {
- if !editor.edit_prediction_preview_is_active() {
- return None;
- }
-
- if target_display_point.row() < visible_row_range.start {
- let mut element = editor
- .render_edit_prediction_line_popover(
- "Scroll",
- Some(IconName::ArrowUp),
- window,
- cx,
- )?
- .into_any();
-
- element.layout_as_root(AvailableSpace::min_size(), window, cx);
-
- let cursor = newest_selection_head?;
- let cursor_row_layout = line_layouts
- .get(cursor.row().minus(visible_row_range.start) as usize)?;
- let cursor_column = cursor.column() as usize;
-
- let cursor_character_x = cursor_row_layout.x_for_index(cursor_column);
-
- const PADDING_Y: Pixels = px(12.);
-
- let origin = start_point + point(cursor_character_x, PADDING_Y);
-
- element.prepaint_at(origin, window, cx);
- return Some((element, origin));
- } else if target_display_point.row() >= visible_row_range.end {
- let mut element = editor
- .render_edit_prediction_line_popover(
- "Scroll",
- Some(IconName::ArrowDown),
- window,
- cx,
- )?
- .into_any();
-
- let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
-
- let cursor = newest_selection_head?;
- let cursor_row_layout = line_layouts
- .get(cursor.row().minus(visible_row_range.start) as usize)?;
- let cursor_column = cursor.column() as usize;
-
- let cursor_character_x = cursor_row_layout.x_for_index(cursor_column);
- const PADDING_Y: Pixels = px(12.);
-
- let origin = start_point
- + point(
- cursor_character_x,
- text_bounds.size.height - size.height - PADDING_Y,
- );
-
- element.prepaint_at(origin, window, cx);
- return Some((element, origin));
- } else {
- const POLE_WIDTH: Pixels = px(2.);
-
- let mut element = v_flex()
- .items_end()
- .child(
- editor
- .render_edit_prediction_line_popover("Jump", None, window, cx)?
- .rounded_br(px(0.))
- .rounded_tr(px(0.))
- .border_r_2(),
- )
- .child(
- div()
- .w(POLE_WIDTH)
- .bg(Editor::edit_prediction_callout_popover_border_color(cx))
- .h(line_height),
- )
- .into_any();
-
- let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
-
- let line_layout =
- line_layouts
- .get(target_display_point.row().minus(visible_row_range.start)
- as usize)?;
- let target_column = target_display_point.column() as usize;
-
- let target_x = line_layout.x_for_index(target_column);
- let target_y = (target_display_point.row().as_f32() * line_height)
- - scroll_pixel_position.y;
-
- let origin = clamp_start(
- start_point + point(target_x, target_y)
- - point(size.width - POLE_WIDTH, size.height - line_height),
- );
-
- element.prepaint_at(origin, window, cx);
-
- return Some((element, origin));
- }
- }
-
- if target_display_point.row().as_f32() < scroll_top {
- let mut element = editor
- .render_edit_prediction_line_popover(
- "Jump to Edit",
- Some(IconName::ArrowUp),
- window,
- cx,
- )?
- .into_any();
-
- let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
- let offset = point((text_bounds.size.width - size.width) / 2., PADDING_Y);
-
- let origin = text_bounds.origin + offset;
- element.prepaint_at(origin, window, cx);
- Some((element, origin))
- } else if (target_display_point.row().as_f32() + 1.) > scroll_bottom {
- let mut element = editor
- .render_edit_prediction_line_popover(
- "Jump to Edit",
- Some(IconName::ArrowDown),
- window,
- cx,
- )?
- .into_any();
-
- let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
- let offset = point(
- (text_bounds.size.width - size.width) / 2.,
- text_bounds.size.height - size.height - PADDING_Y,
- );
-
- let origin = text_bounds.origin + offset;
- element.prepaint_at(origin, window, cx);
- Some((element, origin))
- } else {
- let mut element = editor
- .render_edit_prediction_line_popover("Jump to Edit", None, window, cx)?
- .into_any();
- let target_line_end = DisplayPoint::new(
- target_display_point.row(),
- editor_snapshot.line_len(target_display_point.row()),
- );
- let origin = self.editor.update(cx, |editor, _cx| {
- editor.display_to_pixel_point(target_line_end, editor_snapshot, window)
- })?;
-
- let origin = clamp_start(start_point + origin + point(PADDING_X, px(0.)));
- element.prepaint_as_root(origin, AvailableSpace::min_size(), window, cx);
- Some((element, origin))
- }
- }
- InlineCompletion::Edit {
- edits,
- edit_preview,
- display_mode,
- snapshot,
- } => {
- if self.editor.read(cx).has_visible_completions_menu() {
- return None;
- }
-
- let edit_start = edits
- .first()
- .unwrap()
- .0
- .start
- .to_display_point(editor_snapshot);
- let edit_end = edits
- .last()
- .unwrap()
- .0
- .end
- .to_display_point(editor_snapshot);
-
- let is_visible = visible_row_range.contains(&edit_start.row())
- || visible_row_range.contains(&edit_end.row());
- if !is_visible {
- return None;
- }
-
- match display_mode {
- EditDisplayMode::TabAccept => {
- let range = &edits.first()?.0;
- let target_display_point = range.end.to_display_point(editor_snapshot);
-
- let target_line_end = DisplayPoint::new(
- target_display_point.row(),
- editor_snapshot.line_len(target_display_point.row()),
- );
- let (mut element, origin) = self.editor.update(cx, |editor, cx| {
- Some((
- editor
- .render_edit_prediction_line_popover(
- "Accept", None, window, cx,
- )?
- .into_any(),
- editor.display_to_pixel_point(
- target_line_end,
- editor_snapshot,
- window,
- )?,
- ))
- })?;
-
- let origin = clamp_start(start_point + origin + point(PADDING_X, px(0.)));
- element.prepaint_as_root(origin, AvailableSpace::min_size(), window, cx);
- return Some((element, origin));
- }
- EditDisplayMode::Inline => return None,
- EditDisplayMode::DiffPopover => {}
- }
-
- let highlighted_edits = crate::inline_completion_edit_text(
- &snapshot,
- edits,
- edit_preview.as_ref()?,
- false,
- cx,
- );
-
- let styled_text = highlighted_edits.to_styled_text(&style.text);
- let line_count = highlighted_edits.text.lines().count();
-
- const BORDER_WIDTH: Pixels = px(1.);
-
- let mut element = h_flex()
- .items_start()
- .child(
- h_flex()
- .bg(cx.theme().colors().editor_background)
- .border(BORDER_WIDTH)
- .shadow_sm()
- .border_color(cx.theme().colors().border)
- .rounded_l_lg()
- .when(line_count > 1, |el| el.rounded_br_lg())
- .pr_1()
- .child(styled_text),
- )
- .child(
- h_flex()
- .h(line_height + BORDER_WIDTH * px(2.))
- .px_1p5()
- .gap_1()
- // Workaround: For some reason, there's a gap if we don't do this
- .ml(-BORDER_WIDTH)
- .shadow(smallvec![gpui::BoxShadow {
- color: gpui::black().opacity(0.05),
- offset: point(px(1.), px(1.)),
- blur_radius: px(2.),
- spread_radius: px(0.),
- }])
- .bg(Editor::edit_prediction_line_popover_bg_color(cx))
- .border(BORDER_WIDTH)
- .border_color(cx.theme().colors().border)
- .rounded_r_lg()
- .children(editor.render_edit_prediction_accept_keybind(window, cx)),
- )
- .into_any();
-
- let longest_row =
- editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1);
- let longest_line_width = if visible_row_range.contains(&longest_row) {
- line_layouts[(longest_row.0 - visible_row_range.start.0) as usize].width
- } else {
- layout_line(
- longest_row,
- editor_snapshot,
- style,
- editor_width,
- |_| false,
- window,
- cx,
- )
- .width
- };
-
- let viewport_bounds = Bounds::new(Default::default(), window.viewport_size())
- .extend(Edges {
- right: -Self::SCROLLBAR_WIDTH,
- ..Default::default()
- });
-
- let x_after_longest =
- text_bounds.origin.x + longest_line_width + PADDING_X - scroll_pixel_position.x;
-
- let element_bounds = element.layout_as_root(AvailableSpace::min_size(), window, cx);
-
- // Fully visible if it can be displayed within the window (allow overlapping other
- // panes). However, this is only allowed if the popover starts within text_bounds.
- let can_position_to_the_right = x_after_longest < text_bounds.right()
- && x_after_longest + element_bounds.width < viewport_bounds.right();
-
- let mut origin = if can_position_to_the_right {
- point(
- x_after_longest,
- text_bounds.origin.y + edit_start.row().as_f32() * line_height
- - scroll_pixel_position.y,
- )
- } else {
- let cursor_row = newest_selection_head.map(|head| head.row());
- let above_edit = edit_start
- .row()
- .0
- .checked_sub(line_count as u32)
- .map(DisplayRow);
- let below_edit = Some(edit_end.row() + 1);
- let above_cursor = cursor_row
- .and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow));
- let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1);
-
- // Place the edit popover adjacent to the edit if there is a location
- // available that is onscreen and does not obscure the cursor. Otherwise,
- // place it adjacent to the cursor.
- let row_target = [above_edit, below_edit, above_cursor, below_cursor]
- .into_iter()
- .flatten()
- .find(|&start_row| {
- let end_row = start_row + line_count as u32;
- visible_row_range.contains(&start_row)
- && visible_row_range.contains(&end_row)
- && cursor_row.map_or(true, |cursor_row| {
- !((start_row..end_row).contains(&cursor_row))
- })
- })?;
-
- content_origin
- + point(
- -scroll_pixel_position.x,
- row_target.as_f32() * line_height - scroll_pixel_position.y,
- )
- };
-
- origin.x -= BORDER_WIDTH;
-
- window.defer_draw(element, origin, 1);
-
- // Do not return an element, since it will already be drawn due to defer_draw.
- None
- }
- }
- }
-
fn layout_mouse_context_menu(
&self,
editor_snapshot: &EditorSnapshot,
@@ -6335,7 +5951,7 @@ pub(crate) struct LineWithInvisibles {
fragments: SmallVec<[LineFragment; 1]>,
invisibles: Vec,
len: usize,
- width: Pixels,
+ pub(crate) width: Pixels,
font_size: Pixels,
}
@@ -7519,22 +7135,25 @@ impl Element for EditorElement {
});
let (inline_completion_popover, inline_completion_popover_origin) = self
- .layout_edit_prediction_popover(
- &text_hitbox.bounds,
- content_origin,
- &snapshot,
- start_row..end_row,
- scroll_position.y,
- scroll_position.y + height_in_lines,
- &line_layouts,
- line_height,
- scroll_pixel_position,
- newest_selection_head,
- editor_width,
- &style,
- window,
- cx,
- )
+ .editor
+ .update(cx, |editor, cx| {
+ editor.render_edit_prediction_popover(
+ &text_hitbox.bounds,
+ content_origin,
+ &snapshot,
+ start_row..end_row,
+ scroll_position.y,
+ scroll_position.y + height_in_lines,
+ &line_layouts,
+ line_height,
+ scroll_pixel_position,
+ newest_selection_head,
+ editor_width,
+ &style,
+ window,
+ cx,
+ )
+ })
.unzip();
let mut inline_diagnostics = self.layout_inline_diagnostics(
@@ -8356,7 +7975,7 @@ struct BlockLayout {
style: BlockStyle,
}
-fn layout_line(
+pub fn layout_line(
row: DisplayRow,
snapshot: &EditorSnapshot,
style: &EditorStyle,
diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs
index fb63d21151d26c..6a08f6e283ca5e 100644
--- a/crates/editor/src/test/editor_test_context.rs
+++ b/crates/editor/src/test/editor_test_context.rs
@@ -285,7 +285,7 @@ impl EditorTestContext {
snapshot.anchor_before(ranges[0].start)..snapshot.anchor_after(ranges[0].end)
}
- pub fn set_diff_base(&mut self, diff_base: &str) {
+ pub fn set_head_text(&mut self, diff_base: &str) {
self.cx.run_until_parked();
let fs = self.update_editor(|editor, _, cx| {
editor.project.as_ref().unwrap().read(cx).fs().as_fake()
@@ -298,6 +298,20 @@ impl EditorTestContext {
self.cx.run_until_parked();
}
+ pub fn set_index_text(&mut self, diff_base: &str) {
+ self.cx.run_until_parked();
+ let fs = self.update_editor(|editor, _, cx| {
+ editor.project.as_ref().unwrap().read(cx).fs().as_fake()
+ });
+ let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
+ fs.set_index_for_repo(
+ &Self::root_path().join(".git"),
+ &[(path.into(), diff_base.to_string())],
+ );
+ self.cx.run_until_parked();
+ }
+
+ #[track_caller]
pub fn assert_index_text(&mut self, expected: Option<&str>) {
let fs = self.update_editor(|editor, _, cx| {
editor.project.as_ref().unwrap().read(cx).fs().as_fake()
diff --git a/crates/extensions_ui/src/components/extension_card.rs b/crates/extensions_ui/src/components/extension_card.rs
index 901e8db075a5f6..d9ae37801186c6 100644
--- a/crates/extensions_ui/src/components/extension_card.rs
+++ b/crates/extensions_ui/src/components/extension_card.rs
@@ -53,10 +53,7 @@ impl RenderOnce for ExtensionCard {
.size_full()
.items_center()
.justify_center()
- .bg(theme::color_alpha(
- cx.theme().colors().elevated_surface_background,
- 0.8,
- ))
+ .bg(cx.theme().colors().elevated_surface_background.alpha(0.8))
.child(Label::new("Overridden by dev extension.")),
)
}),
diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs
index 628c216ccd9209..35ffad95118566 100644
--- a/crates/fs/src/fs.rs
+++ b/crates/fs/src/fs.rs
@@ -1337,7 +1337,10 @@ impl FakeFs {
pub fn paths(&self, include_dot_git: bool) -> Vec {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
- queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
+ queue.push_back((
+ PathBuf::from(util::path!("/")),
+ self.state.lock().root.clone(),
+ ));
while let Some((path, entry)) = queue.pop_front() {
if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() {
for (name, entry) in entries {
@@ -1358,7 +1361,10 @@ impl FakeFs {
pub fn directories(&self, include_dot_git: bool) -> Vec {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
- queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
+ queue.push_back((
+ PathBuf::from(util::path!("/")),
+ self.state.lock().root.clone(),
+ ));
while let Some((path, entry)) = queue.pop_front() {
if let FakeFsEntry::Dir { entries, .. } = &*entry.lock() {
for (name, entry) in entries {
@@ -2020,7 +2026,11 @@ pub async fn copy_recursive<'a>(
let Ok(item_relative_path) = item.strip_prefix(source) else {
continue;
};
- let target_item = target.join(item_relative_path);
+ let target_item = if item_relative_path == Path::new("") {
+ target.to_path_buf()
+ } else {
+ target.join(item_relative_path)
+ };
if is_dir {
if !options.overwrite && fs.metadata(&target_item).await.is_ok_and(|m| m.is_some()) {
if options.ignore_if_exists {
@@ -2174,6 +2184,142 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_copy_recursive_with_single_file(executor: BackgroundExecutor) {
+ let fs = FakeFs::new(executor.clone());
+ fs.insert_tree(
+ path!("/outer"),
+ json!({
+ "a": "A",
+ "b": "B",
+ "inner": {}
+ }),
+ )
+ .await;
+
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/outer/a")),
+ PathBuf::from(path!("/outer/b")),
+ ]
+ );
+
+ let source = Path::new(path!("/outer/a"));
+ let target = Path::new(path!("/outer/a copy"));
+ copy_recursive(fs.as_ref(), source, target, Default::default())
+ .await
+ .unwrap();
+
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/outer/a")),
+ PathBuf::from(path!("/outer/a copy")),
+ PathBuf::from(path!("/outer/b")),
+ ]
+ );
+
+ let source = Path::new(path!("/outer/a"));
+ let target = Path::new(path!("/outer/inner/a copy"));
+ copy_recursive(fs.as_ref(), source, target, Default::default())
+ .await
+ .unwrap();
+
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/outer/a")),
+ PathBuf::from(path!("/outer/a copy")),
+ PathBuf::from(path!("/outer/b")),
+ PathBuf::from(path!("/outer/inner/a copy")),
+ ]
+ );
+ }
+
+ #[gpui::test]
+ async fn test_copy_recursive_with_single_dir(executor: BackgroundExecutor) {
+ let fs = FakeFs::new(executor.clone());
+ fs.insert_tree(
+ path!("/outer"),
+ json!({
+ "a": "A",
+ "empty": {},
+ "non-empty": {
+ "b": "B",
+ }
+ }),
+ )
+ .await;
+
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/outer/a")),
+ PathBuf::from(path!("/outer/non-empty/b")),
+ ]
+ );
+ assert_eq!(
+ fs.directories(false),
+ vec![
+ PathBuf::from(path!("/")),
+ PathBuf::from(path!("/outer")),
+ PathBuf::from(path!("/outer/empty")),
+ PathBuf::from(path!("/outer/non-empty")),
+ ]
+ );
+
+ let source = Path::new(path!("/outer/empty"));
+ let target = Path::new(path!("/outer/empty copy"));
+ copy_recursive(fs.as_ref(), source, target, Default::default())
+ .await
+ .unwrap();
+
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/outer/a")),
+ PathBuf::from(path!("/outer/non-empty/b")),
+ ]
+ );
+ assert_eq!(
+ fs.directories(false),
+ vec![
+ PathBuf::from(path!("/")),
+ PathBuf::from(path!("/outer")),
+ PathBuf::from(path!("/outer/empty")),
+ PathBuf::from(path!("/outer/empty copy")),
+ PathBuf::from(path!("/outer/non-empty")),
+ ]
+ );
+
+ let source = Path::new(path!("/outer/non-empty"));
+ let target = Path::new(path!("/outer/non-empty copy"));
+ copy_recursive(fs.as_ref(), source, target, Default::default())
+ .await
+ .unwrap();
+
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/outer/a")),
+ PathBuf::from(path!("/outer/non-empty/b")),
+ PathBuf::from(path!("/outer/non-empty copy/b")),
+ ]
+ );
+ assert_eq!(
+ fs.directories(false),
+ vec![
+ PathBuf::from(path!("/")),
+ PathBuf::from(path!("/outer")),
+ PathBuf::from(path!("/outer/empty")),
+ PathBuf::from(path!("/outer/empty copy")),
+ PathBuf::from(path!("/outer/non-empty")),
+ PathBuf::from(path!("/outer/non-empty copy")),
+ ]
+ );
+ }
+
#[gpui::test]
async fn test_copy_recursive(executor: BackgroundExecutor) {
let fs = FakeFs::new(executor.clone());
@@ -2185,7 +2331,8 @@ mod tests {
"b": "B",
"inner3": {
"d": "D",
- }
+ },
+ "inner4": {}
},
"inner2": {
"c": "C",
@@ -2203,6 +2350,17 @@ mod tests {
PathBuf::from(path!("/outer/inner1/inner3/d")),
]
);
+ assert_eq!(
+ fs.directories(false),
+ vec![
+ PathBuf::from(path!("/")),
+ PathBuf::from(path!("/outer")),
+ PathBuf::from(path!("/outer/inner1")),
+ PathBuf::from(path!("/outer/inner2")),
+ PathBuf::from(path!("/outer/inner1/inner3")),
+ PathBuf::from(path!("/outer/inner1/inner4")),
+ ]
+ );
let source = Path::new(path!("/outer"));
let target = Path::new(path!("/outer/inner1/outer"));
@@ -2223,6 +2381,22 @@ mod tests {
PathBuf::from(path!("/outer/inner1/outer/inner1/inner3/d")),
]
);
+ assert_eq!(
+ fs.directories(false),
+ vec![
+ PathBuf::from(path!("/")),
+ PathBuf::from(path!("/outer")),
+ PathBuf::from(path!("/outer/inner1")),
+ PathBuf::from(path!("/outer/inner2")),
+ PathBuf::from(path!("/outer/inner1/inner3")),
+ PathBuf::from(path!("/outer/inner1/inner4")),
+ PathBuf::from(path!("/outer/inner1/outer")),
+ PathBuf::from(path!("/outer/inner1/outer/inner1")),
+ PathBuf::from(path!("/outer/inner1/outer/inner2")),
+ PathBuf::from(path!("/outer/inner1/outer/inner1/inner3")),
+ PathBuf::from(path!("/outer/inner1/outer/inner1/inner4")),
+ ]
+ );
}
#[gpui::test]
diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml
index 4eefe6c262fe59..0473b1dd57d269 100644
--- a/crates/git/Cargo.toml
+++ b/crates/git/Cargo.toml
@@ -26,6 +26,7 @@ log.workspace = true
parking_lot.workspace = true
regex.workspace = true
rope.workspace = true
+schemars.workspace = true
serde.workspace = true
smol.workspace = true
sum_tree.workspace = true
diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs
index 21cd982b09f0ae..d68d9f7b655e58 100644
--- a/crates/git/src/git.rs
+++ b/crates/git/src/git.rs
@@ -8,6 +8,9 @@ pub mod status;
use anyhow::{anyhow, Context as _, Result};
use gpui::action_with_deprecated_aliases;
use gpui::actions;
+use gpui::impl_actions;
+use repository::PushOptions;
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::ffi::OsStr;
use std::fmt;
@@ -27,6 +30,13 @@ pub static COMMIT_MESSAGE: LazyLock<&'static OsStr> =
LazyLock::new(|| OsStr::new("COMMIT_EDITMSG"));
pub static INDEX_LOCK: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("index.lock"));
+#[derive(Debug, Copy, Clone, PartialEq, Deserialize, JsonSchema)]
+pub struct Push {
+ pub options: Option,
+}
+
+impl_actions!(git, [Push]);
+
actions!(
git,
[
@@ -43,6 +53,8 @@ actions!(
RestoreTrackedFiles,
TrashUntrackedFiles,
Uncommit,
+ Pull,
+ Fetch,
Commit,
]
);
diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs
index b29d4b226d059f..7b68507eca4135 100644
--- a/crates/git/src/repository.rs
+++ b/crates/git/src/repository.rs
@@ -7,6 +7,8 @@ use git2::BranchType;
use gpui::SharedString;
use parking_lot::Mutex;
use rope::Rope;
+use schemars::JsonSchema;
+use serde::Deserialize;
use std::borrow::Borrow;
use std::io::Write as _;
use std::process::Stdio;
@@ -29,6 +31,12 @@ pub struct Branch {
}
impl Branch {
+ pub fn tracking_status(&self) -> Option {
+ self.upstream
+ .as_ref()
+ .and_then(|upstream| upstream.tracking.status())
+ }
+
pub fn priority_key(&self) -> (bool, Option) {
(
self.is_head,
@@ -42,11 +50,32 @@ impl Branch {
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Upstream {
pub ref_name: SharedString,
- pub tracking: Option,
+ pub tracking: UpstreamTracking,
}
-#[derive(Clone, Debug, Hash, PartialEq, Eq)]
-pub struct UpstreamTracking {
+#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
+pub enum UpstreamTracking {
+ /// Remote ref not present in local repository.
+ Gone,
+ /// Remote ref present in local repository (fetched from remote).
+ Tracked(UpstreamTrackingStatus),
+}
+
+impl UpstreamTracking {
+ pub fn is_gone(&self) -> bool {
+ matches!(self, UpstreamTracking::Gone)
+ }
+
+ pub fn status(&self) -> Option {
+ match self {
+ UpstreamTracking::Gone => None,
+ UpstreamTracking::Tracked(status) => Some(*status),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
+pub struct UpstreamTrackingStatus {
pub ahead: u32,
pub behind: u32,
}
@@ -68,6 +97,11 @@ pub struct CommitDetails {
pub committer_name: SharedString,
}
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Remote {
+ pub name: SharedString,
+}
+
pub enum ResetMode {
// reset the branch pointer, leave index and worktree unchanged
// (this will make it look like things that were committed are now
@@ -139,6 +173,22 @@ pub trait GitRepository: Send + Sync {
fn unstage_paths(&self, paths: &[RepoPath]) -> Result<()>;
fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()>;
+
+ fn push(
+ &self,
+ branch_name: &str,
+ upstream_name: &str,
+ options: Option,
+ ) -> Result<()>;
+ fn pull(&self, branch_name: &str, upstream_name: &str) -> Result<()>;
+ fn get_remotes(&self, branch_name: Option<&str>) -> Result>;
+ fn fetch(&self) -> Result<()>;
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, JsonSchema)]
+pub enum PushOptions {
+ SetUpstream,
+ Force,
}
impl std::fmt::Debug for dyn GitRepository {
@@ -165,6 +215,14 @@ impl RealGitRepository {
hosting_provider_registry,
}
}
+
+ fn working_directory(&self) -> Result {
+ self.repository
+ .lock()
+ .workdir()
+ .context("failed to read git work directory")
+ .map(Path::to_path_buf)
+ }
}
// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
@@ -209,12 +267,7 @@ impl GitRepository for RealGitRepository {
}
fn reset(&self, commit: &str, mode: ResetMode) -> Result<()> {
- let working_directory = self
- .repository
- .lock()
- .workdir()
- .context("failed to read git work directory")?
- .to_path_buf();
+ let working_directory = self.working_directory()?;
let mode_flag = match mode {
ResetMode::Mixed => "--mixed",
@@ -238,12 +291,7 @@ impl GitRepository for RealGitRepository {
if paths.is_empty() {
return Ok(());
}
- let working_directory = self
- .repository
- .lock()
- .workdir()
- .context("failed to read git work directory")?
- .to_path_buf();
+ let working_directory = self.working_directory()?;
let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
@@ -296,12 +344,7 @@ impl GitRepository for RealGitRepository {
}
fn set_index_text(&self, path: &RepoPath, content: Option) -> anyhow::Result<()> {
- let working_directory = self
- .repository
- .lock()
- .workdir()
- .context("failed to read git work directory")?
- .to_path_buf();
+ let working_directory = self.working_directory()?;
if let Some(content) = content {
let mut child = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
@@ -485,12 +528,7 @@ impl GitRepository for RealGitRepository {
}
fn stage_paths(&self, paths: &[RepoPath]) -> Result<()> {
- let working_directory = self
- .repository
- .lock()
- .workdir()
- .context("failed to read git work directory")?
- .to_path_buf();
+ let working_directory = self.working_directory()?;
if !paths.is_empty() {
let output = new_std_command(&self.git_binary_path)
@@ -498,6 +536,8 @@ impl GitRepository for RealGitRepository {
.args(["update-index", "--add", "--remove", "--"])
.args(paths.iter().map(|p| p.as_ref()))
.output()?;
+
+ // TODO: Get remote response out of this and show it to the user
if !output.status.success() {
return Err(anyhow!(
"Failed to stage paths:\n{}",
@@ -509,12 +549,7 @@ impl GitRepository for RealGitRepository {
}
fn unstage_paths(&self, paths: &[RepoPath]) -> Result<()> {
- let working_directory = self
- .repository
- .lock()
- .workdir()
- .context("failed to read git work directory")?
- .to_path_buf();
+ let working_directory = self.working_directory()?;
if !paths.is_empty() {
let output = new_std_command(&self.git_binary_path)
@@ -522,6 +557,8 @@ impl GitRepository for RealGitRepository {
.args(["reset", "--quiet", "--"])
.args(paths.iter().map(|p| p.as_ref()))
.output()?;
+
+ // TODO: Get remote response out of this and show it to the user
if !output.status.success() {
return Err(anyhow!(
"Failed to unstage:\n{}",
@@ -533,32 +570,141 @@ impl GitRepository for RealGitRepository {
}
fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()> {
- let working_directory = self
- .repository
- .lock()
- .workdir()
- .context("failed to read git work directory")?
- .to_path_buf();
- let mut args = vec!["commit", "--quiet", "-m", message, "--cleanup=strip"];
- let author = name_and_email.map(|(name, email)| format!("{name} <{email}>"));
- if let Some(author) = author.as_deref() {
- args.push("--author");
- args.push(author);
+ let working_directory = self.working_directory()?;
+
+ let mut cmd = new_std_command(&self.git_binary_path);
+ cmd.current_dir(&working_directory)
+ .args(["commit", "--quiet", "-m"])
+ .arg(message)
+ .arg("--cleanup=strip");
+
+ if let Some((name, email)) = name_and_email {
+ cmd.arg("--author").arg(&format!("{name} <{email}>"));
}
+ let output = cmd.output()?;
+
+ // TODO: Get remote response out of this and show it to the user
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to commit:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
+ }
+ Ok(())
+ }
+
+ fn push(
+ &self,
+ branch_name: &str,
+ remote_name: &str,
+ options: Option,
+ ) -> Result<()> {
+ let working_directory = self.working_directory()?;
+
let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
- .args(args)
+ .args(["push", "--quiet"])
+ .args(options.map(|option| match option {
+ PushOptions::SetUpstream => "--set-upstream",
+ PushOptions::Force => "--force-with-lease",
+ }))
+ .arg(remote_name)
+ .arg(format!("{}:{}", branch_name, branch_name))
.output()?;
if !output.status.success() {
return Err(anyhow!(
- "Failed to commit:\n{}",
+ "Failed to push:\n{}",
String::from_utf8_lossy(&output.stderr)
));
}
+
+ // TODO: Get remote response out of this and show it to the user
Ok(())
}
+
+ fn pull(&self, branch_name: &str, remote_name: &str) -> Result<()> {
+ let working_directory = self.working_directory()?;
+
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["pull", "--quiet"])
+ .arg(remote_name)
+ .arg(branch_name)
+ .output()?;
+
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to pull:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
+ }
+
+ // TODO: Get remote response out of this and show it to the user
+ Ok(())
+ }
+
+ fn fetch(&self) -> Result<()> {
+ let working_directory = self.working_directory()?;
+
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["fetch", "--quiet", "--all"])
+ .output()?;
+
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to fetch:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
+ }
+
+ // TODO: Get remote response out of this and show it to the user
+ Ok(())
+ }
+
+ fn get_remotes(&self, branch_name: Option<&str>) -> Result> {
+ let working_directory = self.working_directory()?;
+
+ if let Some(branch_name) = branch_name {
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["config", "--get"])
+ .arg(format!("branch.{}.remote", branch_name))
+ .output()?;
+
+ if output.status.success() {
+ let remote_name = String::from_utf8_lossy(&output.stdout);
+
+ return Ok(vec![Remote {
+ name: remote_name.trim().to_string().into(),
+ }]);
+ }
+ }
+
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["remote"])
+ .output()?;
+
+ if output.status.success() {
+ let remote_names = String::from_utf8_lossy(&output.stdout)
+ .split('\n')
+ .filter(|name| !name.is_empty())
+ .map(|name| Remote {
+ name: name.trim().to_string().into(),
+ })
+ .collect();
+
+ return Ok(remote_names);
+ } else {
+ return Err(anyhow!(
+ "Failed to get remotes:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
+ }
+ }
}
#[derive(Debug, Clone)]
@@ -743,6 +889,22 @@ impl GitRepository for FakeGitRepository {
fn commit(&self, _message: &str, _name_and_email: Option<(&str, &str)>) -> Result<()> {
unimplemented!()
}
+
+ fn push(&self, _branch: &str, _remote: &str, _options: Option) -> Result<()> {
+ unimplemented!()
+ }
+
+ fn pull(&self, _branch: &str, _remote: &str) -> Result<()> {
+ unimplemented!()
+ }
+
+ fn fetch(&self) -> Result<()> {
+ unimplemented!()
+ }
+
+ fn get_remotes(&self, _branch: Option<&str>) -> Result> {
+ unimplemented!()
+ }
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
@@ -911,9 +1073,9 @@ fn parse_branch_input(input: &str) -> Result> {
Ok(branches)
}
-fn parse_upstream_track(upstream_track: &str) -> Result