diff --git a/Cargo.lock b/Cargo.lock index 7e47d6cae858c..c2e81b2c3bfba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2785,7 +2785,8 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "rustc-hash 1.1.0", + "indexmap 2.7.0", + "rustc-hash 2.1.0", ] [[package]] @@ -5194,6 +5195,7 @@ dependencies = [ "util", "windows 0.58.0", "workspace", + "worktree", ] [[package]] @@ -6232,7 +6234,7 @@ dependencies = [ "heed", "html_to_markdown", "http_client", - "indexmap 1.9.3", + "indexmap 2.7.0", "indoc", "parking_lot", "paths", @@ -8642,6 +8644,7 @@ dependencies = [ "editor", "file_icons", "fuzzy", + "git", "gpui", "itertools 0.13.0", "language", @@ -9829,7 +9832,7 @@ dependencies = [ "file_icons", "git", "gpui", - "indexmap 1.9.3", + "indexmap 2.7.0", "language", "menu", "pretty_assertions", @@ -11224,6 +11227,7 @@ checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" dependencies = [ "dyn-clone", "indexmap 1.9.3", + "indexmap 2.7.0", "schemars_derive", "serde", "serde_json", @@ -13025,7 +13029,7 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", - "indexmap 1.9.3", + "indexmap 2.7.0", "log", "palette", "parking_lot", @@ -13060,7 +13064,7 @@ dependencies = [ "anyhow", "clap", "gpui", - "indexmap 1.9.3", + "indexmap 2.7.0", "log", "palette", "rust-embed", diff --git a/Cargo.toml b/Cargo.toml index 3ada0ba5a4ac1..1a6c589079e04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -392,7 +392,7 @@ hyper = "0.14" http = "1.1" ignore = "0.4.22" image = "0.25.1" -indexmap = { version = "1.6.2", features = ["serde"] } +indexmap = { version = "2.7.0", features = ["serde"] } indoc = "2" itertools = "0.13.0" jsonwebtoken = "9.3" @@ -443,9 +443,10 @@ runtimelib = { version = "0.24.0", default-features = false, features = [ ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } +rustc-hash = "2.1.0" rustls = "0.21.12" rustls-native-certs = "0.8.0" -schemars = { version = "0.8", features = ["impl_json_schema"] } +schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] } semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 7ff8aa7a9fbb1..064d683b088df 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -643,7 +643,6 @@ impl Database { canonical_path: db_entry.canonical_path, is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, - git_status: db_entry.git_status.map(|status| status as i32), // This is only used in the summarization backlog, so if it's None, // that just means we won't be able to detect when to resummarize // based on total number of backlogged bytes - instead, we'd go diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index a3a99bee71a44..902c2e001fcea 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -662,7 +662,6 @@ impl Database { canonical_path: db_entry.canonical_path, is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, - git_status: db_entry.git_status.map(|status| status as i32), // This is only used in the summarization backlog, so if it's None, // that just means we won't be able to detect when to resummarize // based on total number of backlogged bytes - instead, we'd go diff --git a/crates/collections/Cargo.toml b/crates/collections/Cargo.toml index b16b4c1300e04..3daaf83c69bf0 100644 --- a/crates/collections/Cargo.toml +++ b/crates/collections/Cargo.toml @@ -16,4 +16,5 @@ doctest = false test-support = [] [dependencies] -rustc-hash = "1.1" +indexmap.workspace = true +rustc-hash.workspace = true diff --git a/crates/collections/src/collections.rs b/crates/collections/src/collections.rs index 25f6135c1f887..be7bbdb59f646 100644 --- a/crates/collections/src/collections.rs +++ b/crates/collections/src/collections.rs @@ -4,12 +4,24 @@ pub type HashMap = FxHashMap; #[cfg(feature = "test-support")] pub type HashSet = FxHashSet; +#[cfg(feature = "test-support")] +pub type IndexMap = indexmap::IndexMap; + +#[cfg(feature = "test-support")] +pub type IndexSet = indexmap::IndexSet; + #[cfg(not(feature = "test-support"))] pub type HashMap = std::collections::HashMap; #[cfg(not(feature = "test-support"))] pub type HashSet = std::collections::HashSet; +#[cfg(not(feature = "test-support"))] +pub type IndexMap = indexmap::IndexMap; + +#[cfg(not(feature = "test-support"))] +pub type IndexSet = indexmap::IndexSet; + pub use rustc_hash::FxHasher; pub use rustc_hash::{FxHashMap, FxHashSet}; pub use std::collections::*; diff --git a/crates/editor/src/git/project_diff.rs b/crates/editor/src/git/project_diff.rs index e76e5922dbe9a..2f791fb6b608f 100644 --- a/crates/editor/src/git/project_diff.rs +++ b/crates/editor/src/git/project_diff.rs @@ -194,14 +194,24 @@ impl ProjectDiffEditor { let open_tasks = project .update(&mut cx, |project, cx| { let worktree = project.worktree_for_id(id, cx)?; - let applicable_entries = worktree - .read(cx) - .entries(false, 0) - .filter(|entry| !entry.is_external) - .filter(|entry| entry.is_file()) - .filter_map(|entry| Some((entry.git_status?, entry))) - .filter_map(|(git_status, entry)| { - Some((git_status, entry.id, project.path_for_entry(entry.id, cx)?)) + let snapshot = worktree.read(cx).snapshot(); + let applicable_entries = snapshot + .repositories() + .flat_map(|entry| { + entry.status().map(|git_entry| { + (git_entry.git_status, entry.join(git_entry.repo_path)) + }) + }) + .filter_map(|(status, path)| { + let id = snapshot.entry_for_path(&path)?.id; + Some(( + status, + id, + ProjectPath { + worktree_id: snapshot.id(), + path: path.into(), + }, + )) }) .collect::>(); Some( diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 48b7af086bc7b..620fcd5ec40db 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -615,9 +615,20 @@ impl Item for Editor { .read(cx) .as_singleton() .and_then(|buffer| buffer.read(cx).project_path(cx)) - .and_then(|path| self.project.as_ref()?.read(cx).entry_for_path(&path, cx)) - .map(|entry| { - entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected) + .and_then(|path| { + let project = self.project.as_ref()?.read(cx); + let entry = project.entry_for_path(&path, cx)?; + let git_status = project + .worktree_for_id(path.worktree_id, cx)? + .read(cx) + .snapshot() + .status_for_file(path.path); + + Some(entry_git_aware_label_color( + git_status, + entry.is_ignored, + params.selected, + )) }) .unwrap_or_else(|| entry_label_color(params.selected)) } else { @@ -1559,10 +1570,10 @@ pub fn entry_git_aware_label_color( Color::Ignored } else { match git_status { - Some(GitFileStatus::Added) => Color::Created, + Some(GitFileStatus::Added) | Some(GitFileStatus::Untracked) => Color::Created, Some(GitFileStatus::Modified) => Color::Modified, Some(GitFileStatus::Conflict) => Color::Conflict, - None => entry_label_color(selected), + Some(GitFileStatus::Deleted) | None => entry_label_color(selected), } } } diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index cf07b74ac5d8d..c608c23cf357d 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -16,6 +16,7 @@ use std::sync::LazyLock; pub use crate::hosting_provider::*; pub use crate::remote::*; pub use git2 as libgit; +pub use repository::WORK_DIRECTORY_REPO_PATH; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index b37e517d43233..d2515237b3400 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -7,6 +7,8 @@ use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; use serde::{Deserialize, Serialize}; +use std::borrow::Borrow; +use std::sync::LazyLock; use std::{ cmp::Ordering, path::{Component, Path, PathBuf}, @@ -37,7 +39,7 @@ pub trait GitRepository: Send + Sync { /// Returns the SHA of the current HEAD. fn head_sha(&self) -> Option; - fn status(&self, path_prefixes: &[PathBuf]) -> Result; + fn status(&self, path_prefixes: &[RepoPath]) -> Result; fn branches(&self) -> Result>; fn change_branch(&self, _: &str) -> Result<()>; @@ -132,7 +134,7 @@ impl GitRepository for RealGitRepository { Some(self.repository.lock().head().ok()?.target()?.to_string()) } - fn status(&self, path_prefixes: &[PathBuf]) -> Result { + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let working_directory = self .repository .lock() @@ -289,8 +291,9 @@ impl GitRepository for FakeGitRepository { state.dot_git_dir.clone() } - fn status(&self, path_prefixes: &[PathBuf]) -> Result { + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let state = self.state.lock(); + let mut entries = state .worktree_statuses .iter() @@ -306,6 +309,7 @@ impl GitRepository for FakeGitRepository { }) .collect::>(); entries.sort_unstable_by(|a, b| a.0.cmp(&b.0)); + Ok(GitStatus { entries: entries.into(), }) @@ -394,6 +398,8 @@ pub enum GitFileStatus { Added, Modified, Conflict, + Deleted, + Untracked, } impl GitFileStatus { @@ -421,20 +427,30 @@ impl GitFileStatus { } } +pub static WORK_DIRECTORY_REPO_PATH: LazyLock = + LazyLock::new(|| RepoPath(Path::new("").into())); + #[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)] -pub struct RepoPath(pub PathBuf); +pub struct RepoPath(pub Arc); impl RepoPath { pub fn new(path: PathBuf) -> Self { debug_assert!(path.is_relative(), "Repo paths must be relative"); - RepoPath(path) + RepoPath(path.into()) + } + + pub fn from_str(path: &str) -> Self { + let path = Path::new(path); + debug_assert!(path.is_relative(), "Repo paths must be relative"); + + RepoPath(path.into()) } } impl From<&Path> for RepoPath { fn from(value: &Path) -> Self { - RepoPath::new(value.to_path_buf()) + RepoPath::new(value.into()) } } @@ -444,9 +460,15 @@ impl From for RepoPath { } } +impl From<&str> for RepoPath { + fn from(value: &str) -> Self { + Self::from_str(value) + } +} + impl Default for RepoPath { fn default() -> Self { - RepoPath(PathBuf::new()) + RepoPath(Path::new("").into()) } } @@ -457,13 +479,19 @@ impl AsRef for RepoPath { } impl std::ops::Deref for RepoPath { - type Target = PathBuf; + type Target = Path; fn deref(&self) -> &Self::Target { &self.0 } } +impl Borrow for RepoPath { + fn borrow(&self) -> &Path { + self.0.as_ref() + } +} + #[derive(Debug)] pub struct RepoPathDescendants<'a>(pub &'a Path); diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index f8ffdc6714b5b..0d62cfaae9df5 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -1,10 +1,6 @@ use crate::repository::{GitFileStatus, RepoPath}; use anyhow::{anyhow, Result}; -use std::{ - path::{Path, PathBuf}, - process::Stdio, - sync::Arc, -}; +use std::{path::Path, process::Stdio, sync::Arc}; #[derive(Clone)] pub struct GitStatus { @@ -15,7 +11,7 @@ impl GitStatus { pub(crate) fn new( git_binary: &Path, working_directory: &Path, - path_prefixes: &[PathBuf], + path_prefixes: &[RepoPath], ) -> Result { let child = util::command::new_std_command(git_binary) .current_dir(working_directory) @@ -27,7 +23,7 @@ impl GitStatus { "-z", ]) .args(path_prefixes.iter().map(|path_prefix| { - if *path_prefix == Path::new("") { + if path_prefix.0.as_ref() == Path::new("") { Path::new(".") } else { path_prefix @@ -55,10 +51,12 @@ impl GitStatus { let (status, path) = entry.split_at(3); let status = status.trim(); Some(( - RepoPath(PathBuf::from(path)), + RepoPath(Path::new(path).into()), match status { - "A" | "??" => GitFileStatus::Added, + "A" => GitFileStatus::Added, "M" => GitFileStatus::Modified, + "D" => GitFileStatus::Deleted, + "??" => GitFileStatus::Untracked, _ => return None, }, )) @@ -75,7 +73,7 @@ impl GitStatus { pub fn get(&self, path: &Path) -> Option { self.entries - .binary_search_by(|(repo_path, _)| repo_path.0.as_path().cmp(path)) + .binary_search_by(|(repo_path, _)| repo_path.0.as_ref().cmp(path)) .ok() .map(|index| self.entries[index].1) } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 69e70805e751e..38c50f0c8062f 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -14,9 +14,11 @@ path = "src/git_ui.rs" [dependencies] anyhow.workspace = true +collections.workspace = true db.workspace = true editor.workspace = true futures.workspace = true +git.workspace = true gpui.workspace = true language.workspace = true menu.workspace = true @@ -29,8 +31,7 @@ settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -git.workspace = true -collections.workspace = true +worktree.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index be3f4485fbcf7..b9dee054b575a 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1,11 +1,16 @@ +use crate::{git_status_icon, settings::GitPanelSettings}; +use crate::{CommitAllChanges, CommitStagedChanges, DiscardAll, StageAll, UnstageAll}; use anyhow::{Context as _, Result}; -use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use editor::{ scroll::{Autoscroll, AutoscrollStrategy}, Editor, MultiBuffer, DEFAULT_MULTIBUFFER_CONTEXT, }; -use git::{diff::DiffHunk, repository::GitFileStatus}; +use git::{ + diff::DiffHunk, + repository::{GitFileStatus, RepoPath}, +}; +use gpui::*; use gpui::{ actions, prelude::*, uniform_list, Action, AppContext, AsyncWindowContext, ClickEvent, CursorStyle, EventEmitter, FocusHandle, FocusableView, KeyContext, @@ -14,7 +19,7 @@ use gpui::{ }; use language::{Buffer, BufferRow, OffsetRangeExt}; use menu::{SelectNext, SelectPrev}; -use project::{Entry, EntryKind, Fs, Project, ProjectEntryId, WorktreeId}; +use project::{EntryKind, Fs, Project, ProjectEntryId, ProjectPath, WorktreeId}; use serde::{Deserialize, Serialize}; use settings::Settings as _; use std::{ @@ -22,7 +27,7 @@ use std::{ collections::HashSet, ffi::OsStr, ops::{Deref, Range}, - path::{Path, PathBuf}, + path::PathBuf, rc::Rc, sync::Arc, time::Duration, @@ -37,9 +42,7 @@ use workspace::{ dock::{DockPosition, Panel, PanelEvent}, ItemHandle, Workspace, }; - -use crate::{git_status_icon, settings::GitPanelSettings}; -use crate::{CommitAllChanges, CommitStagedChanges, DiscardAll, StageAll, UnstageAll}; +use worktree::StatusEntry; actions!(git_panel, [ToggleFocus]); @@ -69,7 +72,7 @@ pub struct GitStatusEntry {} struct EntryDetails { filename: String, display_name: String, - path: Arc, + path: RepoPath, kind: EntryKind, depth: usize, is_expanded: bool, @@ -101,7 +104,8 @@ pub struct GitPanel { scrollbar_state: ScrollbarState, selected_item: Option, show_scrollbar: bool, - expanded_dir_ids: HashMap>, + // todo!(): Reintroduce expanded directories, once we're deriving directories from paths + // expanded_dir_ids: HashMap>, // The entries that are currently shown in the panel, aka // not hidden by folding or such @@ -115,18 +119,20 @@ pub struct GitPanel { #[derive(Debug, Clone)] struct WorktreeEntries { worktree_id: WorktreeId, + // TODO support multiple repositories per worktree + work_directory: worktree::WorkDirectory, visible_entries: Vec, - paths: Rc>>>, + paths: Rc>>, } #[derive(Debug, Clone)] struct GitPanelEntry { - entry: Entry, + entry: worktree::StatusEntry, hunks: Rc>>, } impl Deref for GitPanelEntry { - type Target = Entry; + type Target = worktree::StatusEntry; fn deref(&self) -> &Self::Target { &self.entry @@ -134,11 +140,11 @@ impl Deref for GitPanelEntry { } impl WorktreeEntries { - fn paths(&self) -> &HashSet> { + fn paths(&self) -> &HashSet { self.paths.get_or_init(|| { self.visible_entries .iter() - .map(|e| (e.entry.path.clone())) + .map(|e| (e.entry.repo_path.clone())) .collect() }) } @@ -165,8 +171,11 @@ impl GitPanel { }) .detach(); cx.subscribe(&project, |this, _, event, cx| match event { - project::Event::WorktreeRemoved(id) => { - this.expanded_dir_ids.remove(id); + project::Event::GitRepositoryUpdated => { + this.update_visible_entries(None, None, cx); + } + project::Event::WorktreeRemoved(_id) => { + // this.expanded_dir_ids.remove(id); this.update_visible_entries(None, None, cx); cx.notify(); } @@ -183,7 +192,7 @@ impl GitPanel { project::Event::Closed => { this.git_diff_editor_updates = Task::ready(()); this.reveal_in_editor = Task::ready(()); - this.expanded_dir_ids.clear(); + // this.expanded_dir_ids.clear(); this.visible_entries.clear(); this.git_diff_editor = None; } @@ -200,8 +209,7 @@ impl GitPanel { pending_serialization: Task::ready(None), visible_entries: Vec::new(), current_modifiers: cx.modifiers(), - expanded_dir_ids: Default::default(), - + // expanded_dir_ids: Default::default(), width: Some(px(360.)), scrollbar_state: ScrollbarState::new(scroll_handle.clone()).parent_view(cx.view()), scroll_handle, @@ -288,16 +296,16 @@ impl GitPanel { } fn calculate_depth_and_difference( - entry: &Entry, - visible_worktree_entries: &HashSet>, + entry: &StatusEntry, + visible_worktree_entries: &HashSet, ) -> (usize, usize) { let (depth, difference) = entry - .path + .repo_path .ancestors() .skip(1) // Skip the entry itself .find_map(|ancestor| { if let Some(parent_entry) = visible_worktree_entries.get(ancestor) { - let entry_path_components_count = entry.path.components().count(); + let entry_path_components_count = entry.repo_path.components().count(); let parent_path_components_count = parent_entry.components().count(); let difference = entry_path_components_count - parent_path_components_count; let depth = parent_entry @@ -432,13 +440,7 @@ impl GitPanel { fn entry_count(&self) -> usize { self.visible_entries .iter() - .map(|worktree_entries| { - worktree_entries - .visible_entries - .iter() - .filter(|entry| entry.git_status.is_some()) - .count() - }) + .map(|worktree_entries| worktree_entries.visible_entries.len()) .sum() } @@ -446,7 +448,7 @@ impl GitPanel { &self, range: Range, cx: &mut ViewContext, - mut callback: impl FnMut(ProjectEntryId, EntryDetails, &mut ViewContext), + mut callback: impl FnMut(usize, EntryDetails, &mut ViewContext), ) { let mut ix = 0; for worktree_entries in &self.visible_entries { @@ -468,11 +470,11 @@ impl GitPanel { { let snapshot = worktree.read(cx).snapshot(); let root_name = OsStr::new(snapshot.root_name()); - let expanded_entry_ids = self - .expanded_dir_ids - .get(&snapshot.id()) - .map(Vec::as_slice) - .unwrap_or(&[]); + // let expanded_entry_ids = self + // .expanded_dir_ids + // .get(&snapshot.id()) + // .map(Vec::as_slice) + // .unwrap_or(&[]); let entry_range = range.start.saturating_sub(ix)..end_ix - ix; let entries = worktree_entries.paths(); @@ -484,21 +486,21 @@ impl GitPanel { { let index = index_start + i; let status = entry.git_status; - let is_expanded = expanded_entry_ids.binary_search(&entry.id).is_ok(); + let is_expanded = true; //expanded_entry_ids.binary_search(&entry.id).is_ok(); let (depth, difference) = Self::calculate_depth_and_difference(entry, entries); let filename = match difference { diff if diff > 1 => entry - .path + .repo_path .iter() - .skip(entry.path.components().count() - diff) + .skip(entry.repo_path.components().count() - diff) .collect::() .to_str() .unwrap_or_default() .to_string(), _ => entry - .path + .repo_path .file_name() .map(|name| name.to_string_lossy().into_owned()) .unwrap_or_else(|| root_name.to_string_lossy().to_string()), @@ -506,16 +508,17 @@ impl GitPanel { let details = EntryDetails { filename, - display_name: entry.path.to_string_lossy().into_owned(), - kind: entry.kind, + display_name: entry.repo_path.to_string_lossy().into_owned(), + // FIXME get it from StatusEntry? + kind: EntryKind::File, is_expanded, - path: entry.path.clone(), - status, + path: entry.repo_path.clone(), + status: Some(status), hunks: entry.hunks.clone(), depth, index, }; - callback(entry.id, details, cx); + callback(ix, details, cx); } } ix = end_ix; @@ -527,7 +530,7 @@ impl GitPanel { fn update_visible_entries( &mut self, for_worktree: Option, - new_selected_entry: Option<(WorktreeId, ProjectEntryId)>, + _new_selected_entry: Option<(WorktreeId, ProjectEntryId)>, cx: &mut ViewContext, ) { let project = self.project.read(cx); @@ -549,24 +552,35 @@ impl GitPanel { None => false, }); for worktree in project.visible_worktrees(cx) { - let worktree_id = worktree.read(cx).id(); + let snapshot = worktree.read(cx).snapshot(); + let worktree_id = snapshot.id(); + if for_worktree.is_some() && for_worktree != Some(worktree_id) { continue; } - let snapshot = worktree.read(cx).snapshot(); - let mut visible_worktree_entries = snapshot - .entries(false, 0) - .filter(|entry| !entry.is_external) - .filter(|entry| entry.git_status.is_some()) - .cloned() - .collect::>(); - snapshot.propagate_git_statuses(&mut visible_worktree_entries); - project::sort_worktree_entries(&mut visible_worktree_entries); + let mut visible_worktree_entries = Vec::new(); + // Only use the first repository for now + let repositories = snapshot.repositories().take(1); + let mut work_directory = None; + for repository in repositories { + visible_worktree_entries.extend(repository.status()); + work_directory = Some(worktree::WorkDirectory::clone(repository)); + } + + // let mut visible_worktree_entries = snapshot + // .entries(false, 0) + // .filter(|entry| !entry.is_external) + // .filter(|entry| entry.git_status.is_some()) + // .cloned() + // .collect::>(); + // snapshot.propagate_git_statuses(&mut visible_worktree_entries); + // project::sort_worktree_entries(&mut visible_worktree_entries); if !visible_worktree_entries.is_empty() { self.visible_entries.push(WorktreeEntries { worktree_id, + work_directory: work_directory.unwrap(), visible_entries: visible_worktree_entries .into_iter() .map(|entry| GitPanelEntry { @@ -580,24 +594,25 @@ impl GitPanel { } self.visible_entries.extend(after_update); - if let Some((worktree_id, entry_id)) = new_selected_entry { - self.selected_item = self.visible_entries.iter().enumerate().find_map( - |(worktree_index, worktree_entries)| { - if worktree_entries.worktree_id == worktree_id { - worktree_entries - .visible_entries - .iter() - .position(|entry| entry.id == entry_id) - .map(|entry_index| { - worktree_index * worktree_entries.visible_entries.len() - + entry_index - }) - } else { - None - } - }, - ); - } + // todo!(): re-implement this + // if let Some((worktree_id, entry_id)) = new_selected_entry { + // self.selected_item = self.visible_entries.iter().enumerate().find_map( + // |(worktree_index, worktree_entries)| { + // if worktree_entries.worktree_id == worktree_id { + // worktree_entries + // .visible_entries + // .iter() + // .position(|entry| entry.id == entry_id) + // .map(|entry_index| { + // worktree_index * worktree_entries.visible_entries.len() + // + entry_index + // }) + // } else { + // None + // } + // }, + // ); + // } let project = self.project.downgrade(); self.git_diff_editor_updates = cx.spawn(|git_panel, mut cx| async move { @@ -612,12 +627,14 @@ impl GitPanel { .visible_entries .iter() .filter_map(|entry| { - let git_status = entry.git_status()?; + let git_status = entry.git_status; let entry_hunks = entry.hunks.clone(); let (entry_path, unstaged_changes_task) = project.update(cx, |project, cx| { - let entry_path = - project.path_for_entry(entry.id, cx)?; + let entry_path = ProjectPath { + worktree_id: worktree_entries.worktree_id, + path: worktree_entries.work_directory.unrelativize(&entry.repo_path)?, + }; let open_task = project.open_path(entry_path.clone(), cx); let unstaged_changes_task = @@ -682,8 +699,8 @@ impl GitPanel { ) .collect() } - // TODO support conflicts display - GitFileStatus::Conflict => Vec::new(), + // TODO support these + GitFileStatus::Conflict | GitFileStatus::Deleted | GitFileStatus::Untracked => Vec::new(), } }).clone() })?; @@ -992,18 +1009,17 @@ impl GitPanel { fn render_entry( &self, - id: ProjectEntryId, + ix: usize, selected: bool, details: EntryDetails, cx: &ViewContext, ) -> impl IntoElement { - let id = id.to_proto() as usize; - let checkbox_id = ElementId::Name(format!("checkbox_{}", id).into()); + let checkbox_id = ElementId::Name(format!("checkbox_{}", ix).into()); let is_staged = ToggleState::Selected; let handle = cx.view().downgrade(); h_flex() - .id(id) + .id(("git-panel-entry", ix)) .h(px(28.)) .w_full() .pl(px(12. + 12. * details.depth as f32)) @@ -1019,7 +1035,8 @@ impl GitPanel { this.child(git_status_icon(status)) }) .child( - ListItem::new(("label", id)) + // FIXME is it okay to use ix here? do we need a proper ID for git status entries? + ListItem::new(("label", ix)) .toggle_state(selected) .child(h_flex().gap_1p5().child(details.display_name.clone())) .on_click(move |e, cx| { diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 5aa9a361fa11d..c1c3bd3ac0b8f 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -44,10 +44,13 @@ const REMOVED_COLOR: Hsla = Hsla { // TODO: Add updated status colors to theme pub fn git_status_icon(status: GitFileStatus) -> impl IntoElement { match status { - GitFileStatus::Added => Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR)), + GitFileStatus::Added | GitFileStatus::Untracked => { + Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR)) + } GitFileStatus::Modified => { Icon::new(IconName::SquareDot).color(Color::Custom(MODIFIED_COLOR)) } GitFileStatus::Conflict => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)), + GitFileStatus::Deleted => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)), } } diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 570948a82282f..b78f1bd085cae 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -96,12 +96,18 @@ impl Item for ImageView { fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { let project_path = self.image_item.read(cx).project_path(cx); + let label_color = if ItemSettings::get_global(cx).git_status { + let git_status = self + .project + .read(cx) + .project_path_git_status(&project_path, cx); + self.project .read(cx) .entry_for_path(&project_path, cx) .map(|entry| { - entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected) + entry_git_aware_label_color(git_status, entry.is_ignored, params.selected) }) .unwrap_or_else(|| params.text_color()) } else { diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index 6dfe1ceccc052..51e80d8fdfdf5 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -19,8 +19,9 @@ db.workspace = true editor.workspace = true file_icons.workspace = true fuzzy.workspace = true -itertools.workspace = true +git.workspace = true gpui.workspace = true +itertools.workspace = true language.workspace = true log.workspace = true menu.workspace = true @@ -36,8 +37,8 @@ smol.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -worktree.workspace = true workspace.workspace = true +worktree.workspace = true [dev-dependencies] search = { workspace = true, features = ["test-support"] } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index d43b76671a0d2..72763ceac7d10 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -56,7 +56,7 @@ use workspace::{ }, OpenInTerminal, WeakItemHandle, Workspace, }; -use worktree::{Entry, ProjectEntryId, WorktreeId}; +use worktree::{Entry, GitEntry, ProjectEntryId, WorktreeId}; actions!( outline_panel, @@ -348,10 +348,17 @@ enum ExcerptOutlines { NotFetched, } +#[derive(Clone, Debug, PartialEq, Eq)] +struct FoldedDirsEntry { + worktree_id: WorktreeId, + entries: Vec, +} + +// TODO: collapse the inner enums into panel entry #[derive(Clone, Debug)] enum PanelEntry { Fs(FsEntry), - FoldedDirs(WorktreeId, Vec), + FoldedDirs(FoldedDirsEntry), Outline(OutlineEntry), Search(SearchEntry), } @@ -383,7 +390,18 @@ impl PartialEq for PanelEntry { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::Fs(a), Self::Fs(b)) => a == b, - (Self::FoldedDirs(a1, a2), Self::FoldedDirs(b1, b2)) => a1 == b1 && a2 == b2, + ( + Self::FoldedDirs(FoldedDirsEntry { + worktree_id: worktree_id_a, + entries: entries_a, + .. + }), + Self::FoldedDirs(FoldedDirsEntry { + worktree_id: worktree_id_b, + entries: entries_b, + .. + }), + ) => worktree_id_a == worktree_id_b && entries_a == entries_b, (Self::Outline(a), Self::Outline(b)) => a == b, ( Self::Search(SearchEntry { @@ -505,54 +523,124 @@ impl SearchData { } } -#[derive(Clone, Debug, PartialEq, Eq)] -enum OutlineEntry { - Excerpt(BufferId, ExcerptId, ExcerptRange), - Outline(BufferId, ExcerptId, Outline), +#[derive(Clone, Debug, Eq)] +struct OutlineEntryExcerpt { + id: ExcerptId, + buffer_id: BufferId, + range: ExcerptRange, +} + +impl PartialEq for OutlineEntryExcerpt { + fn eq(&self, other: &Self) -> bool { + self.buffer_id == other.buffer_id && self.id == other.id + } +} + +impl Hash for OutlineEntryExcerpt { + fn hash(&self, state: &mut H) { + (self.buffer_id, self.id).hash(state) + } } #[derive(Clone, Debug, Eq)] -enum FsEntry { - ExternalFile(BufferId, Vec), - Directory(WorktreeId, Entry), - File(WorktreeId, Entry, BufferId, Vec), +struct OutlineEntryOutline { + buffer_id: BufferId, + excerpt_id: ExcerptId, + outline: Outline, } -impl PartialEq for FsEntry { +impl PartialEq for OutlineEntryOutline { fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::ExternalFile(id_a, _), Self::ExternalFile(id_b, _)) => id_a == id_b, - (Self::Directory(id_a, entry_a), Self::Directory(id_b, entry_b)) => { - id_a == id_b && entry_a.id == entry_b.id - } - ( - Self::File(worktree_a, entry_a, id_a, ..), - Self::File(worktree_b, entry_b, id_b, ..), - ) => worktree_a == worktree_b && entry_a.id == entry_b.id && id_a == id_b, - _ => false, - } + self.buffer_id == other.buffer_id && self.excerpt_id == other.excerpt_id } } -impl Hash for FsEntry { +impl Hash for OutlineEntryOutline { fn hash(&self, state: &mut H) { + (self.buffer_id, self.excerpt_id).hash(state); + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +enum OutlineEntry { + Excerpt(OutlineEntryExcerpt), + Outline(OutlineEntryOutline), +} + +impl OutlineEntry { + fn ids(&self) -> (BufferId, ExcerptId) { match self { - Self::ExternalFile(buffer_id, _) => { - buffer_id.hash(state); - } - Self::Directory(worktree_id, entry) => { - worktree_id.hash(state); - entry.id.hash(state); - } - Self::File(worktree_id, entry, buffer_id, _) => { - worktree_id.hash(state); - entry.id.hash(state); - buffer_id.hash(state); - } + OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id), + OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id), } } } +#[derive(Debug, Clone, Eq)] +struct FsEntryFile { + worktree_id: WorktreeId, + entry: GitEntry, + buffer_id: BufferId, + excerpts: Vec, +} + +impl PartialEq for FsEntryFile { + fn eq(&self, other: &Self) -> bool { + self.worktree_id == other.worktree_id + && self.entry.id == other.entry.id + && self.buffer_id == other.buffer_id + } +} + +impl Hash for FsEntryFile { + fn hash(&self, state: &mut H) { + (self.buffer_id, self.entry.id, self.worktree_id).hash(state); + } +} + +#[derive(Debug, Clone, Eq)] +struct FsEntryDirectory { + worktree_id: WorktreeId, + entry: GitEntry, +} + +impl PartialEq for FsEntryDirectory { + fn eq(&self, other: &Self) -> bool { + self.worktree_id == other.worktree_id && self.entry.id == other.entry.id + } +} + +impl Hash for FsEntryDirectory { + fn hash(&self, state: &mut H) { + (self.worktree_id, self.entry.id).hash(state); + } +} + +#[derive(Debug, Clone, Eq)] +struct FsEntryExternalFile { + buffer_id: BufferId, + excerpts: Vec, +} + +impl PartialEq for FsEntryExternalFile { + fn eq(&self, other: &Self) -> bool { + self.buffer_id == other.buffer_id + } +} + +impl Hash for FsEntryExternalFile { + fn hash(&self, state: &mut H) { + self.buffer_id.hash(state); + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +enum FsEntry { + ExternalFile(FsEntryExternalFile), + Directory(FsEntryDirectory), + File(FsEntryFile), +} + struct ActiveItem { item_handle: Box, active_editor: WeakView, @@ -775,7 +863,12 @@ impl OutlinePanel { } fn unfold_directory(&mut self, _: &UnfoldDirectory, cx: &mut ViewContext) { - if let Some(PanelEntry::FoldedDirs(worktree_id, entries)) = self.selected_entry().cloned() { + if let Some(PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + })) = self.selected_entry().cloned() + { self.unfolded_dirs .entry(worktree_id) .or_default() @@ -786,11 +879,11 @@ impl OutlinePanel { fn fold_directory(&mut self, _: &FoldDirectory, cx: &mut ViewContext) { let (worktree_id, entry) = match self.selected_entry().cloned() { - Some(PanelEntry::Fs(FsEntry::Directory(worktree_id, entry))) => { - (worktree_id, Some(entry)) + Some(PanelEntry::Fs(FsEntry::Directory(directory))) => { + (directory.worktree_id, Some(directory.entry)) } - Some(PanelEntry::FoldedDirs(worktree_id, entries)) => { - (worktree_id, entries.last().cloned()) + Some(PanelEntry::FoldedDirs(folded_dirs)) => { + (folded_dirs.worktree_id, folded_dirs.entries.last().cloned()) } _ => return, }; @@ -875,12 +968,12 @@ impl OutlinePanel { let mut scroll_to_buffer = None; let scroll_target = match entry { PanelEntry::FoldedDirs(..) | PanelEntry::Fs(FsEntry::Directory(..)) => None, - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { + PanelEntry::Fs(FsEntry::ExternalFile(file)) => { change_selection = false; - scroll_to_buffer = Some(*buffer_id); + scroll_to_buffer = Some(file.buffer_id); multi_buffer_snapshot.excerpts().find_map( |(excerpt_id, buffer_snapshot, excerpt_range)| { - if &buffer_snapshot.remote_id() == buffer_id { + if buffer_snapshot.remote_id() == file.buffer_id { multi_buffer_snapshot .anchor_in_excerpt(excerpt_id, excerpt_range.context.start) } else { @@ -889,13 +982,14 @@ impl OutlinePanel { }, ) } - PanelEntry::Fs(FsEntry::File(_, file_entry, buffer_id, _)) => { + + PanelEntry::Fs(FsEntry::File(file)) => { change_selection = false; - scroll_to_buffer = Some(*buffer_id); + scroll_to_buffer = Some(file.buffer_id); self.project .update(cx, |project, cx| { project - .path_for_entry(file_entry.id, cx) + .path_for_entry(file.entry.id, cx) .and_then(|path| project.get_open_buffer(&path, cx)) }) .map(|buffer| { @@ -909,18 +1003,17 @@ impl OutlinePanel { .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) }) } - PanelEntry::Outline(OutlineEntry::Outline(_, excerpt_id, outline)) => { - multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, outline.range.start) - .or_else(|| { - multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, outline.range.end) - }) - } - PanelEntry::Outline(OutlineEntry::Excerpt(_, excerpt_id, excerpt_range)) => { + PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot + .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start) + .or_else(|| { + multi_buffer_snapshot + .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end) + }), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { change_selection = false; - multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) + multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start) } - PanelEntry::Search(SearchEntry { match_range, .. }) => Some(match_range.start), + PanelEntry::Search(search_entry) => Some(search_entry.match_range.start), }; if let Some(anchor) = scroll_target { @@ -960,8 +1053,10 @@ impl OutlinePanel { .iter() .rev() .filter_map(|entry| match entry { - FsEntry::File(_, _, buffer_id, _) - | FsEntry::ExternalFile(buffer_id, _) => Some(*buffer_id), + FsEntry::File(file) => Some(file.buffer_id), + FsEntry::ExternalFile(external_file) => { + Some(external_file.buffer_id) + } FsEntry::Directory(..) => None, }) .skip_while(|id| *id != buffer_id) @@ -1044,69 +1139,68 @@ impl OutlinePanel { match &selected_entry { PanelEntry::Fs(fs_entry) => match fs_entry { FsEntry::ExternalFile(..) => None, - FsEntry::File(worktree_id, entry, ..) - | FsEntry::Directory(worktree_id, entry) => { - entry.path.parent().and_then(|parent_path| { - previous_entries.find(|entry| match entry { - PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) => { - dir_worktree_id == worktree_id - && dir_entry.path.as_ref() == parent_path - } - PanelEntry::FoldedDirs(dirs_worktree_id, dirs) => { - dirs_worktree_id == worktree_id - && dirs - .last() - .map_or(false, |dir| dir.path.as_ref() == parent_path) - } - _ => false, - }) + FsEntry::File(FsEntryFile { + worktree_id, entry, .. + }) + | FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => entry.path.parent().and_then(|parent_path| { + previous_entries.find(|entry| match entry { + PanelEntry::Fs(FsEntry::Directory(directory)) => { + directory.worktree_id == *worktree_id + && directory.entry.path.as_ref() == parent_path + } + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id: dirs_worktree_id, + entries: dirs, + .. + }) => { + dirs_worktree_id == worktree_id + && dirs + .last() + .map_or(false, |dir| dir.path.as_ref() == parent_path) + } + _ => false, }) - } + }), }, - PanelEntry::FoldedDirs(worktree_id, entries) => entries + PanelEntry::FoldedDirs(folded_dirs) => folded_dirs + .entries .first() .and_then(|entry| entry.path.parent()) .and_then(|parent_path| { previous_entries.find(|entry| { - if let PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) = - entry - { - dir_worktree_id == worktree_id - && dir_entry.path.as_ref() == parent_path + if let PanelEntry::Fs(FsEntry::Directory(directory)) = entry { + directory.worktree_id == folded_dirs.worktree_id + && directory.entry.path.as_ref() == parent_path } else { false } }) }), - PanelEntry::Outline(OutlineEntry::Excerpt(excerpt_buffer_id, excerpt_id, _)) => { + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { previous_entries.find(|entry| match entry { - PanelEntry::Fs(FsEntry::File(_, _, file_buffer_id, file_excerpts)) => { - file_buffer_id == excerpt_buffer_id - && file_excerpts.contains(excerpt_id) + PanelEntry::Fs(FsEntry::File(file)) => { + file.buffer_id == excerpt.buffer_id + && file.excerpts.contains(&excerpt.id) } - PanelEntry::Fs(FsEntry::ExternalFile(file_buffer_id, file_excerpts)) => { - file_buffer_id == excerpt_buffer_id - && file_excerpts.contains(excerpt_id) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + external_file.buffer_id == excerpt.buffer_id + && external_file.excerpts.contains(&excerpt.id) } _ => false, }) } - PanelEntry::Outline(OutlineEntry::Outline( - outline_buffer_id, - outline_excerpt_id, - _, - )) => previous_entries.find(|entry| { - if let PanelEntry::Outline(OutlineEntry::Excerpt( - excerpt_buffer_id, - excerpt_id, - _, - )) = entry - { - outline_buffer_id == excerpt_buffer_id && outline_excerpt_id == excerpt_id - } else { - false - } - }), + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + previous_entries.find(|entry| { + if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry { + outline.buffer_id == excerpt.buffer_id + && outline.excerpt_id == excerpt.id + } else { + false + } + }) + } PanelEntry::Search(_) => { previous_entries.find(|entry| !matches!(entry, PanelEntry::Search(_))) } @@ -1164,8 +1258,12 @@ impl OutlinePanel { ) { self.select_entry(entry.clone(), true, cx); let is_root = match &entry { - PanelEntry::Fs(FsEntry::File(worktree_id, entry, ..)) - | PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, entry, .. + })) + | PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => self .project .read(cx) .worktree_for_id(*worktree_id, cx) @@ -1173,7 +1271,11 @@ impl OutlinePanel { worktree.read(cx).root_entry().map(|entry| entry.id) == Some(entry.id) }) .unwrap_or(false), - PanelEntry::FoldedDirs(worktree_id, entries) => entries + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => entries .first() .and_then(|entry| { self.project @@ -1232,9 +1334,11 @@ impl OutlinePanel { fn is_foldable(&self, entry: &PanelEntry) -> bool { let (directory_worktree, directory_entry) = match entry { - PanelEntry::Fs(FsEntry::Directory(directory_worktree, directory_entry)) => { - (*directory_worktree, Some(directory_entry)) - } + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry: directory_entry, + .. + })) => (*worktree_id, Some(directory_entry)), _ => return false, }; let Some(directory_entry) = directory_entry else { @@ -1270,24 +1374,34 @@ impl OutlinePanel { }; let mut buffers_to_unfold = HashSet::default(); let entry_to_expand = match &selected_entry { - PanelEntry::FoldedDirs(worktree_id, dir_entries) => dir_entries.last().map(|entry| { + PanelEntry::FoldedDirs(FoldedDirsEntry { + entries: dir_entries, + worktree_id, + .. + }) => dir_entries.last().map(|entry| { buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry)); CollapsedEntry::Dir(*worktree_id, entry.id) }), - PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => { - buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, dir_entry)); - Some(CollapsedEntry::Dir(*worktree_id, dir_entry.id)) + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => { + buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry)); + Some(CollapsedEntry::Dir(*worktree_id, entry.id)) } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { buffers_to_unfold.insert(*buffer_id); Some(CollapsedEntry::File(*worktree_id, *buffer_id)) } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - buffers_to_unfold.insert(*buffer_id); - Some(CollapsedEntry::ExternalFile(*buffer_id)) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_unfold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) } PanelEntry::Search(_) | PanelEntry::Outline(..) => return, }; @@ -1330,19 +1444,24 @@ impl OutlinePanel { let mut buffers_to_fold = HashSet::default(); let collapsed = match &selected_entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, selected_dir_entry)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => { if self .collapsed_entries - .insert(CollapsedEntry::Dir(*worktree_id, selected_dir_entry.id)) + .insert(CollapsedEntry::Dir(*worktree_id, entry.id)) { - buffers_to_fold - .extend(self.buffers_inside_directory(*worktree_id, selected_dir_entry)); + buffers_to_fold.extend(self.buffers_inside_directory(*worktree_id, entry)); true } else { false } } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { if self .collapsed_entries .insert(CollapsedEntry::File(*worktree_id, *buffer_id)) @@ -1353,34 +1472,35 @@ impl OutlinePanel { false } } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { if self .collapsed_entries - .insert(CollapsedEntry::ExternalFile(*buffer_id)) + .insert(CollapsedEntry::ExternalFile(external_file.buffer_id)) { - buffers_to_fold.insert(*buffer_id); + buffers_to_fold.insert(external_file.buffer_id); true } else { false } } - PanelEntry::FoldedDirs(worktree_id, dir_entries) => { + PanelEntry::FoldedDirs(folded_dirs) => { let mut folded = false; - if let Some(dir_entry) = dir_entries.last() { + if let Some(dir_entry) = folded_dirs.entries.last() { if self .collapsed_entries - .insert(CollapsedEntry::Dir(*worktree_id, dir_entry.id)) + .insert(CollapsedEntry::Dir(folded_dirs.worktree_id, dir_entry.id)) { folded = true; - buffers_to_fold - .extend(self.buffers_inside_directory(*worktree_id, dir_entry)); + buffers_to_fold.extend( + self.buffers_inside_directory(folded_dirs.worktree_id, dir_entry), + ); } } folded } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => self + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self .collapsed_entries - .insert(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)), + .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)), PanelEntry::Search(_) | PanelEntry::Outline(..) => false, }; @@ -1409,31 +1529,42 @@ impl OutlinePanel { .iter() .fold(HashSet::default(), |mut entries, fs_entry| { match fs_entry { - FsEntry::ExternalFile(buffer_id, _) => { - buffers_to_unfold.insert(*buffer_id); - entries.insert(CollapsedEntry::ExternalFile(*buffer_id)); - entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.iter().map(|(excerpt_id, _)| { - CollapsedEntry::Excerpt(*buffer_id, *excerpt_id) - }) - }, - )); - } - FsEntry::Directory(worktree_id, entry) => { - entries.insert(CollapsedEntry::Dir(*worktree_id, entry.id)); + FsEntry::ExternalFile(external_file) => { + buffers_to_unfold.insert(external_file.buffer_id); + entries.insert(CollapsedEntry::ExternalFile(external_file.buffer_id)); + entries.extend( + self.excerpts + .get(&external_file.buffer_id) + .into_iter() + .flat_map(|excerpts| { + excerpts.iter().map(|(excerpt_id, _)| { + CollapsedEntry::Excerpt( + external_file.buffer_id, + *excerpt_id, + ) + }) + }), + ); } - FsEntry::File(worktree_id, _, buffer_id, _) => { - buffers_to_unfold.insert(*buffer_id); - entries.insert(CollapsedEntry::File(*worktree_id, *buffer_id)); - entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.iter().map(|(excerpt_id, _)| { - CollapsedEntry::Excerpt(*buffer_id, *excerpt_id) - }) - }, + FsEntry::Directory(directory) => { + entries.insert(CollapsedEntry::Dir( + directory.worktree_id, + directory.entry.id, )); } + FsEntry::File(file) => { + buffers_to_unfold.insert(file.buffer_id); + entries.insert(CollapsedEntry::File(file.worktree_id, file.buffer_id)); + entries.extend( + self.excerpts.get(&file.buffer_id).into_iter().flat_map( + |excerpts| { + excerpts.iter().map(|(excerpt_id, _)| { + CollapsedEntry::Excerpt(file.buffer_id, *excerpt_id) + }) + }, + ), + ); + } }; entries }); @@ -1459,22 +1590,28 @@ impl OutlinePanel { .cached_entries .iter() .flat_map(|cached_entry| match &cached_entry.entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => { - Some(CollapsedEntry::Dir(*worktree_id, entry.id)) - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)), + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { buffers_to_fold.insert(*buffer_id); Some(CollapsedEntry::File(*worktree_id, *buffer_id)) } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - buffers_to_fold.insert(*buffer_id); - Some(CollapsedEntry::ExternalFile(*buffer_id)) - } - PanelEntry::FoldedDirs(worktree_id, entries) => { - Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_fold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) @@ -1498,7 +1635,11 @@ impl OutlinePanel { let mut fold = false; let mut buffers_to_toggle = HashSet::default(); match entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry: dir_entry, + .. + })) => { let entry_id = dir_entry.id; let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id); buffers_to_toggle.extend(self.buffers_inside_directory(*worktree_id, dir_entry)); @@ -1514,7 +1655,11 @@ impl OutlinePanel { fold = true; } } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { let collapsed_entry = CollapsedEntry::File(*worktree_id, *buffer_id); buffers_to_toggle.insert(*buffer_id); if !self.collapsed_entries.remove(&collapsed_entry) { @@ -1522,15 +1667,19 @@ impl OutlinePanel { fold = true; } } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - let collapsed_entry = CollapsedEntry::ExternalFile(*buffer_id); - buffers_to_toggle.insert(*buffer_id); + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + let collapsed_entry = CollapsedEntry::ExternalFile(external_file.buffer_id); + buffers_to_toggle.insert(external_file.buffer_id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); fold = true; } } - PanelEntry::FoldedDirs(worktree_id, dir_entries) => { + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries: dir_entries, + .. + }) => { if let Some(dir_entry) = dir_entries.first() { let entry_id = dir_entry.id; let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id); @@ -1549,8 +1698,8 @@ impl OutlinePanel { } } } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - let collapsed_entry = CollapsedEntry::Excerpt(*buffer_id, *excerpt_id); + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } @@ -1625,7 +1774,9 @@ impl OutlinePanel { .selected_entry() .and_then(|entry| match entry { PanelEntry::Fs(entry) => self.relative_path(entry, cx), - PanelEntry::FoldedDirs(_, dirs) => dirs.last().map(|entry| entry.path.clone()), + PanelEntry::FoldedDirs(folded_dirs) => { + folded_dirs.entries.last().map(|entry| entry.path.clone()) + } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) .map(|p| p.to_string_lossy().to_string()) @@ -1679,23 +1830,24 @@ impl OutlinePanel { return Ok(()); }; let related_buffer_entry = match &entry_with_selection { - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - project.update(&mut cx, |project, cx| { - let entry_id = project - .buffer_for_id(*buffer_id, cx) - .and_then(|buffer| buffer.read(cx).entry_id(cx)); - project - .worktree_for_id(*worktree_id, cx) - .zip(entry_id) - .and_then(|(worktree, entry_id)| { - let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); - Some((worktree, entry)) - }) - })? - } + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => project.update(&mut cx, |project, cx| { + let entry_id = project + .buffer_for_id(*buffer_id, cx) + .and_then(|buffer| buffer.read(cx).entry_id(cx)); + project + .worktree_for_id(*worktree_id, cx) + .zip(entry_id) + .and_then(|(worktree, entry_id)| { + let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); + Some((worktree, entry)) + }) + })?, PanelEntry::Outline(outline_entry) => { - let &(OutlineEntry::Outline(buffer_id, excerpt_id, _) - | OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) = outline_entry; + let (buffer_id, excerpt_id) = outline_entry.ids(); outline_panel.update(&mut cx, |outline_panel, cx| { outline_panel .collapsed_entries @@ -1808,25 +1960,21 @@ impl OutlinePanel { fn render_excerpt( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - range: &ExcerptRange, + excerpt: &OutlineEntryExcerpt, depth: usize, cx: &mut ViewContext, ) -> Option> { - let item_id = ElementId::from(excerpt_id.to_proto() as usize); + let item_id = ElementId::from(excerpt.id.to_proto() as usize); let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Excerpt( - selected_buffer_id, - selected_excerpt_id, - _, - ))) => selected_buffer_id == &buffer_id && selected_excerpt_id == &excerpt_id, + Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => { + selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id + } _ => false, }; let has_outlines = self .excerpts - .get(&buffer_id) - .and_then(|excerpts| match &excerpts.get(&excerpt_id)?.outlines { + .get(&excerpt.buffer_id) + .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines { ExcerptOutlines::Outlines(outlines) => Some(outlines), ExcerptOutlines::Invalidated(outlines) => Some(outlines), ExcerptOutlines::NotFetched => None, @@ -1834,7 +1982,7 @@ impl OutlinePanel { .map_or(false, |outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries - .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); let color = entry_git_aware_label_color(None, false, is_active); let icon = if has_outlines { FileIcons::get_chevron_icon(is_expanded, cx) @@ -1844,14 +1992,14 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let label = self.excerpt_label(buffer_id, range, cx)?; + let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?; let label_element = Label::new(label) .single_line() .color(color) .into_any_element(); Some(self.entry_element( - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, range.clone())), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())), item_id, depth, Some(icon), @@ -1878,50 +2026,40 @@ impl OutlinePanel { fn render_outline( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - rendered_outline: &Outline, + outline: &OutlineEntryOutline, depth: usize, string_match: Option<&StringMatch>, cx: &mut ViewContext, ) -> Stateful
{ - let (item_id, label_element) = ( - ElementId::from(SharedString::from(format!( - "{buffer_id:?}|{excerpt_id:?}{:?}|{:?}", - rendered_outline.range, &rendered_outline.text, - ))), - outline::render_item( - rendered_outline, - string_match - .map(|string_match| string_match.ranges().collect::>()) - .unwrap_or_default(), - cx, - ) - .into_any_element(), - ); + let item_id = ElementId::from(SharedString::from(format!( + "{:?}|{:?}{:?}|{:?}", + outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text, + ))); + + let label_element = outline::render_item( + &outline.outline, + string_match + .map(|string_match| string_match.ranges().collect::>()) + .unwrap_or_default(), + cx, + ) + .into_any_element(); + let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Outline( - selected_buffer_id, - selected_excerpt_id, - selected_entry, - ))) => { - selected_buffer_id == &buffer_id - && selected_excerpt_id == &excerpt_id - && selected_entry == rendered_outline + Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => { + outline == selected && outline.outline == selected.outline } _ => false, }; + let icon = if self.is_singleton_active(cx) { None } else { Some(empty_icon()) }; + self.entry_element( - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - rendered_outline.clone(), - )), + PanelEntry::Outline(OutlineEntry::Outline(outline.clone())), item_id, depth, icon, @@ -1944,7 +2082,9 @@ impl OutlinePanel { _ => false, }; let (item_id, label_element, icon) = match rendered_entry { - FsEntry::File(worktree_id, entry, ..) => { + FsEntry::File(FsEntryFile { + worktree_id, entry, .. + }) => { let name = self.entry_name(worktree_id, entry, cx); let color = entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active); @@ -1967,14 +2107,18 @@ impl OutlinePanel { icon.unwrap_or_else(empty_icon), ) } - FsEntry::Directory(worktree_id, entry) => { - let name = self.entry_name(worktree_id, entry, cx); - - let is_expanded = !self - .collapsed_entries - .contains(&CollapsedEntry::Dir(*worktree_id, entry.id)); - let color = - entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active); + FsEntry::Directory(directory) => { + let name = self.entry_name(&directory.worktree_id, &directory.entry, cx); + + let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Dir( + directory.worktree_id, + directory.entry.id, + )); + let color = entry_git_aware_label_color( + directory.entry.git_status, + directory.entry.is_ignored, + is_active, + ); let icon = if settings.folder_icons { FileIcons::get_folder_icon(is_expanded, cx) } else { @@ -1983,7 +2127,7 @@ impl OutlinePanel { .map(Icon::from_path) .map(|icon| icon.color(color).into_any_element()); ( - ElementId::from(entry.id.to_proto() as usize), + ElementId::from(directory.entry.id.to_proto() as usize), HighlightedLabel::new( name, string_match @@ -1995,9 +2139,9 @@ impl OutlinePanel { icon.unwrap_or_else(empty_icon), ) } - FsEntry::ExternalFile(buffer_id, _) => { + FsEntry::ExternalFile(external_file) => { let color = entry_label_color(is_active); - let (icon, name) = match self.buffer_snapshot_for_id(*buffer_id, cx) { + let (icon, name) = match self.buffer_snapshot_for_id(external_file.buffer_id, cx) { Some(buffer_snapshot) => match buffer_snapshot.file() { Some(file) => { let path = file.path(); @@ -2015,7 +2159,7 @@ impl OutlinePanel { None => (None, "Unknown buffer".to_string()), }; ( - ElementId::from(buffer_id.to_proto() as usize), + ElementId::from(external_file.buffer_id.to_proto() as usize), HighlightedLabel::new( name, string_match @@ -2042,29 +2186,32 @@ impl OutlinePanel { fn render_folded_dirs( &self, - worktree_id: WorktreeId, - dir_entries: &[Entry], + folded_dir: &FoldedDirsEntry, depth: usize, string_match: Option<&StringMatch>, cx: &mut ViewContext, ) -> Stateful
{ let settings = OutlinePanelSettings::get_global(cx); let is_active = match self.selected_entry() { - Some(PanelEntry::FoldedDirs(selected_worktree_id, selected_entries)) => { - selected_worktree_id == &worktree_id && selected_entries == dir_entries + Some(PanelEntry::FoldedDirs(selected_dirs)) => { + selected_dirs.worktree_id == folded_dir.worktree_id + && selected_dirs.entries == folded_dir.entries } _ => false, }; let (item_id, label_element, icon) = { - let name = self.dir_names_string(dir_entries, worktree_id, cx); + let name = self.dir_names_string(&folded_dir.entries, folded_dir.worktree_id, cx); - let is_expanded = dir_entries.iter().all(|dir| { + let is_expanded = folded_dir.entries.iter().all(|dir| { !self .collapsed_entries - .contains(&CollapsedEntry::Dir(worktree_id, dir.id)) + .contains(&CollapsedEntry::Dir(folded_dir.worktree_id, dir.id)) }); - let is_ignored = dir_entries.iter().any(|entry| entry.is_ignored); - let git_status = dir_entries.first().and_then(|entry| entry.git_status); + let is_ignored = folded_dir.entries.iter().any(|entry| entry.is_ignored); + let git_status = folded_dir + .entries + .first() + .and_then(|entry| entry.git_status); let color = entry_git_aware_label_color(git_status, is_ignored, is_active); let icon = if settings.folder_icons { FileIcons::get_folder_icon(is_expanded, cx) @@ -2075,10 +2222,12 @@ impl OutlinePanel { .map(|icon| icon.color(color).into_any_element()); ( ElementId::from( - dir_entries + folded_dir + .entries .last() .map(|entry| entry.id.to_proto()) - .unwrap_or_else(|| worktree_id.to_proto()) as usize, + .unwrap_or_else(|| folded_dir.worktree_id.to_proto()) + as usize, ), HighlightedLabel::new( name, @@ -2093,7 +2242,7 @@ impl OutlinePanel { }; self.entry_element( - PanelEntry::FoldedDirs(worktree_id, dir_entries.to_vec()), + PanelEntry::FoldedDirs(folded_dir.clone()), item_id, depth, Some(icon), @@ -2368,7 +2517,7 @@ impl OutlinePanel { let mut processed_external_buffers = HashSet::default(); let mut new_worktree_entries = HashMap::< WorktreeId, - (worktree::Snapshot, HashMap), + (worktree::Snapshot, HashMap), >::default(); let mut worktree_excerpts = HashMap::< WorktreeId, @@ -2409,12 +2558,13 @@ impl OutlinePanel { match entry_id.and_then(|id| worktree.entry_for_id(id)).cloned() { Some(entry) => { - let mut traversal = worktree.traverse_from_path( - true, - true, - true, - entry.path.as_ref(), - ); + let entry = GitEntry { + git_status: worktree.status_for_file(&entry.path), + entry, + }; + let mut traversal = worktree + .traverse_from_path(true, true, true, entry.path.as_ref()) + .with_git_statuses(); let mut entries_to_add = HashMap::default(); worktree_excerpts @@ -2446,7 +2596,7 @@ impl OutlinePanel { .is_none(); if new_entry_added && traversal.back_to_parent() { if let Some(parent_entry) = traversal.entry() { - current_entry = parent_entry.clone(); + current_entry = parent_entry.to_owned(); continue; } } @@ -2508,19 +2658,22 @@ impl OutlinePanel { } if entry.is_dir() { - Some(FsEntry::Directory(worktree_id, entry)) + Some(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry, + })) } else { let (buffer_id, excerpts) = worktree_excerpts .get_mut(&worktree_id) .and_then(|worktree_excerpts| { worktree_excerpts.remove(&entry.id) })?; - Some(FsEntry::File( + Some(FsEntry::File(FsEntryFile { worktree_id, - entry, buffer_id, + entry, excerpts, - )) + })) } }) .collect::>() @@ -2533,25 +2686,29 @@ impl OutlinePanel { let new_visible_entries = external_excerpts .into_iter() .sorted_by_key(|(id, _)| *id) - .map(|(buffer_id, excerpts)| FsEntry::ExternalFile(buffer_id, excerpts)) + .map(|(buffer_id, excerpts)| { + FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts, + }) + }) .chain(worktree_entries) .filter(|visible_item| { match visible_item { - FsEntry::Directory(worktree_id, dir_entry) => { + FsEntry::Directory(directory) => { let parent_id = back_to_common_visited_parent( &mut visited_dirs, - worktree_id, - dir_entry, + &directory.worktree_id, + &directory.entry, ); - let depth = if root_entries.contains(&dir_entry.id) { - 0 - } else { + let mut depth = 0; + if !root_entries.contains(&directory.entry.id) { if auto_fold_dirs { let children = new_children_count - .get(worktree_id) + .get(&directory.worktree_id) .and_then(|children_count| { - children_count.get(&dir_entry.path) + children_count.get(&directory.entry.path) }) .copied() .unwrap_or_default(); @@ -2562,7 +2719,7 @@ impl OutlinePanel { .last() .map(|(parent_dir_id, _)| { new_unfolded_dirs - .get(worktree_id) + .get(&directory.worktree_id) .map_or(true, |unfolded_dirs| { unfolded_dirs .contains(parent_dir_id) @@ -2571,23 +2728,29 @@ impl OutlinePanel { .unwrap_or(true)) { new_unfolded_dirs - .entry(*worktree_id) + .entry(directory.worktree_id) .or_default() - .insert(dir_entry.id); + .insert(directory.entry.id); } } - parent_id + depth = parent_id .and_then(|(worktree_id, id)| { new_depth_map.get(&(worktree_id, id)).copied() }) .unwrap_or(0) - + 1 + + 1; }; - visited_dirs.push((dir_entry.id, dir_entry.path.clone())); - new_depth_map.insert((*worktree_id, dir_entry.id), depth); + visited_dirs + .push((directory.entry.id, directory.entry.path.clone())); + new_depth_map + .insert((directory.worktree_id, directory.entry.id), depth); } - FsEntry::File(worktree_id, file_entry, ..) => { + FsEntry::File(FsEntryFile { + worktree_id, + entry: file_entry, + .. + }) => { let parent_id = back_to_common_visited_parent( &mut visited_dirs, worktree_id, @@ -2718,8 +2881,10 @@ impl OutlinePanel { .iter() .find(|fs_entry| match fs_entry { FsEntry::Directory(..) => false, - FsEntry::File(_, _, file_buffer_id, _) - | FsEntry::ExternalFile(file_buffer_id, _) => *file_buffer_id == buffer_id, + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + buffer_id == buffer_id + } }) .cloned() .map(PanelEntry::Fs); @@ -2869,26 +3034,31 @@ impl OutlinePanel { .cloned(); let closest_container = match outline_item { - Some(outline) => { - PanelEntry::Outline(OutlineEntry::Outline(buffer_id, excerpt_id, outline)) - } + Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { + buffer_id, + excerpt_id, + outline, + })), None => { self.cached_entries.iter().rev().find_map(|cached_entry| { match &cached_entry.entry { - PanelEntry::Outline(OutlineEntry::Excerpt( - entry_buffer_id, - entry_excerpt_id, - _, - )) => { - if entry_buffer_id == &buffer_id && entry_excerpt_id == &excerpt_id { + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id { Some(cached_entry.entry.clone()) } else { None } } PanelEntry::Fs( - FsEntry::ExternalFile(file_buffer_id, file_excerpts) - | FsEntry::File(_, _, file_buffer_id, file_excerpts), + FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id: file_buffer_id, + excerpts: file_excerpts, + }) + | FsEntry::File(FsEntryFile { + buffer_id: file_buffer_id, + excerpts: file_excerpts, + .. + }), ) => { if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) { Some(cached_entry.entry.clone()) @@ -2987,8 +3157,15 @@ impl OutlinePanel { .iter() .fold(HashMap::default(), |mut excerpts_to_fetch, fs_entry| { match fs_entry { - FsEntry::File(_, _, buffer_id, file_excerpts) - | FsEntry::ExternalFile(buffer_id, file_excerpts) => { + FsEntry::File(FsEntryFile { + buffer_id, + excerpts: file_excerpts, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts: file_excerpts, + }) => { let excerpts = self.excerpts.get(buffer_id); for &file_excerpt in file_excerpts { if let Some(excerpt) = excerpts @@ -3038,21 +3215,28 @@ impl OutlinePanel { fn abs_path(&self, entry: &PanelEntry, cx: &AppContext) -> Option { match entry { PanelEntry::Fs( - FsEntry::File(_, _, buffer_id, _) | FsEntry::ExternalFile(buffer_id, _), + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }), ) => self .buffer_snapshot_for_id(*buffer_id, cx) .and_then(|buffer_snapshot| { let file = File::from_dyn(buffer_snapshot.file())?; file.worktree.read(cx).absolutize(&file.path).ok() }), - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => self .project .read(cx) .worktree_for_id(*worktree_id, cx)? .read(cx) .absolutize(&entry.path) .ok(), - PanelEntry::FoldedDirs(worktree_id, dirs) => dirs.last().and_then(|entry| { + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries: dirs, + .. + }) => dirs.last().and_then(|entry| { self.project .read(cx) .worktree_for_id(*worktree_id, cx) @@ -3064,12 +3248,12 @@ impl OutlinePanel { fn relative_path(&self, entry: &FsEntry, cx: &AppContext) -> Option> { match entry { - FsEntry::ExternalFile(buffer_id, _) => { + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { let buffer_snapshot = self.buffer_snapshot_for_id(*buffer_id, cx)?; Some(buffer_snapshot.file()?.path().clone()) } - FsEntry::Directory(_, entry) => Some(entry.path.clone()), - FsEntry::File(_, entry, ..) => Some(entry.path.clone()), + FsEntry::Directory(FsEntryDirectory { entry, .. }) => Some(entry.path.clone()), + FsEntry::File(FsEntryFile { entry, .. }) => Some(entry.path.clone()), } } @@ -3135,7 +3319,7 @@ impl OutlinePanel { let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; - let mut folded_dirs_entry = None::<(usize, WorktreeId, Vec)>; + let mut folded_dirs_entry = None::<(usize, FoldedDirsEntry)>; let track_matches = query.is_some(); #[derive(Debug)] @@ -3149,29 +3333,29 @@ impl OutlinePanel { for entry in outline_panel.fs_entries.clone() { let is_expanded = outline_panel.is_expanded(&entry); let (depth, should_add) = match &entry { - FsEntry::Directory(worktree_id, dir_entry) => { + FsEntry::Directory(directory_entry) => { let mut should_add = true; let is_root = project .read(cx) - .worktree_for_id(*worktree_id, cx) + .worktree_for_id(directory_entry.worktree_id, cx) .map_or(false, |worktree| { - worktree.read(cx).root_entry() == Some(dir_entry) + worktree.read(cx).root_entry() == Some(&directory_entry.entry) }); let folded = auto_fold_dirs && !is_root && outline_panel .unfolded_dirs - .get(worktree_id) + .get(&directory_entry.worktree_id) .map_or(true, |unfolded_dirs| { - !unfolded_dirs.contains(&dir_entry.id) + !unfolded_dirs.contains(&directory_entry.entry.id) }); let fs_depth = outline_panel .fs_entries_depth - .get(&(*worktree_id, dir_entry.id)) + .get(&(directory_entry.worktree_id, directory_entry.entry.id)) .copied() .unwrap_or(0); while let Some(parent) = parent_dirs.last() { - if dir_entry.path.starts_with(&parent.path) { + if directory_entry.entry.path.starts_with(&parent.path) { break; } parent_dirs.pop(); @@ -3179,11 +3363,14 @@ impl OutlinePanel { let auto_fold = match parent_dirs.last() { Some(parent) => { parent.folded - && Some(parent.path.as_ref()) == dir_entry.path.parent() + && Some(parent.path.as_ref()) + == directory_entry.entry.path.parent() && outline_panel .fs_children_count - .get(worktree_id) - .and_then(|entries| entries.get(&dir_entry.path)) + .get(&directory_entry.worktree_id) + .and_then(|entries| { + entries.get(&directory_entry.entry.path) + }) .copied() .unwrap_or_default() .may_be_fold_part() @@ -3201,7 +3388,7 @@ impl OutlinePanel { parent.depth + 1 }; parent_dirs.push(ParentStats { - path: dir_entry.path.clone(), + path: directory_entry.entry.path.clone(), folded, expanded: parent_expanded && is_expanded, depth: new_depth, @@ -3210,7 +3397,7 @@ impl OutlinePanel { } None => { parent_dirs.push(ParentStats { - path: dir_entry.path.clone(), + path: directory_entry.entry.path.clone(), folded, expanded: is_expanded, depth: fs_depth, @@ -3219,37 +3406,38 @@ impl OutlinePanel { } }; - if let Some((folded_depth, folded_worktree_id, mut folded_dirs)) = - folded_dirs_entry.take() + if let Some((folded_depth, mut folded_dirs)) = folded_dirs_entry.take() { if folded - && worktree_id == &folded_worktree_id - && dir_entry.path.parent() - == folded_dirs.last().map(|entry| entry.path.as_ref()) + && directory_entry.worktree_id == folded_dirs.worktree_id + && directory_entry.entry.path.parent() + == folded_dirs + .entries + .last() + .map(|entry| entry.path.as_ref()) { - folded_dirs.push(dir_entry.clone()); - folded_dirs_entry = - Some((folded_depth, folded_worktree_id, folded_dirs)) + folded_dirs.entries.push(directory_entry.entry.clone()); + folded_dirs_entry = Some((folded_depth, folded_dirs)) } else { if !is_singleton { let start_of_collapsed_dir_sequence = !parent_expanded && parent_dirs .iter() .rev() - .nth(folded_dirs.len() + 1) + .nth(folded_dirs.entries.len() + 1) .map_or(true, |parent| parent.expanded); if start_of_collapsed_dir_sequence || parent_expanded || query.is_some() { if parent_folded { - folded_dirs.push(dir_entry.clone()); + folded_dirs + .entries + .push(directory_entry.entry.clone()); should_add = false; } - let new_folded_dirs = PanelEntry::FoldedDirs( - folded_worktree_id, - folded_dirs, - ); + let new_folded_dirs = + PanelEntry::FoldedDirs(folded_dirs.clone()); outline_panel.push_entry( &mut generation_state, track_matches, @@ -3263,12 +3451,23 @@ impl OutlinePanel { folded_dirs_entry = if parent_folded { None } else { - Some((depth, *worktree_id, vec![dir_entry.clone()])) + Some(( + depth, + FoldedDirsEntry { + worktree_id: directory_entry.worktree_id, + entries: vec![directory_entry.entry.clone()], + }, + )) }; } } else if folded { - folded_dirs_entry = - Some((depth, *worktree_id, vec![dir_entry.clone()])); + folded_dirs_entry = Some(( + depth, + FoldedDirsEntry { + worktree_id: directory_entry.worktree_id, + entries: vec![directory_entry.entry.clone()], + }, + )); } let should_add = @@ -3276,21 +3475,22 @@ impl OutlinePanel { (depth, should_add) } FsEntry::ExternalFile(..) => { - if let Some((folded_depth, worktree_id, folded_dirs)) = - folded_dirs_entry.take() - { + if let Some((folded_depth, folded_dir)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() .find(|parent| { - folded_dirs.iter().all(|entry| entry.path != parent.path) + folded_dir + .entries + .iter() + .all(|entry| entry.path != parent.path) }) .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dir), folded_depth, cx, ); @@ -3299,22 +3499,23 @@ impl OutlinePanel { parent_dirs.clear(); (0, true) } - FsEntry::File(worktree_id, file_entry, ..) => { - if let Some((folded_depth, worktree_id, folded_dirs)) = - folded_dirs_entry.take() - { + FsEntry::File(file) => { + if let Some((folded_depth, folded_dirs)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() .find(|parent| { - folded_dirs.iter().all(|entry| entry.path != parent.path) + folded_dirs + .entries + .iter() + .all(|entry| entry.path != parent.path) }) .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dirs), folded_depth, cx, ); @@ -3323,23 +3524,22 @@ impl OutlinePanel { let fs_depth = outline_panel .fs_entries_depth - .get(&(*worktree_id, file_entry.id)) + .get(&(file.worktree_id, file.entry.id)) .copied() .unwrap_or(0); while let Some(parent) = parent_dirs.last() { - if file_entry.path.starts_with(&parent.path) { + if file.entry.path.starts_with(&parent.path) { break; } parent_dirs.pop(); } - let (depth, should_add) = match parent_dirs.last() { + match parent_dirs.last() { Some(parent) => { let new_depth = parent.depth + 1; (new_depth, parent.expanded) } None => (fs_depth, true), - }; - (depth, should_add) + } } }; @@ -3373,12 +3573,16 @@ impl OutlinePanel { let excerpts_to_consider = if is_singleton || query.is_some() || (should_add && is_expanded) { match &entry { - FsEntry::File(_, _, buffer_id, entry_excerpts) => { - Some((*buffer_id, entry_excerpts)) - } - FsEntry::ExternalFile(buffer_id, entry_excerpts) => { - Some((*buffer_id, entry_excerpts)) - } + FsEntry::File(FsEntryFile { + buffer_id, + excerpts, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts, + .. + }) => Some((*buffer_id, excerpts)), _ => None, } } else { @@ -3417,17 +3621,22 @@ impl OutlinePanel { } } - if let Some((folded_depth, worktree_id, folded_dirs)) = folded_dirs_entry.take() { + if let Some((folded_depth, folded_dirs)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() - .find(|parent| folded_dirs.iter().all(|entry| entry.path != parent.path)) + .find(|parent| { + folded_dirs + .entries + .iter() + .all(|entry| entry.path != parent.path) + }) .map_or(true, |parent| parent.expanded); if parent_expanded || query.is_some() { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dirs), folded_depth, cx, ); @@ -3490,13 +3699,16 @@ impl OutlinePanel { depth: usize, cx: &mut WindowContext, ) { - let entry = if let PanelEntry::FoldedDirs(worktree_id, entries) = &entry { - match entries.len() { + let entry = if let PanelEntry::FoldedDirs(folded_dirs_entry) = &entry { + match folded_dirs_entry.entries.len() { 0 => { debug_panic!("Empty folded dirs receiver"); return; } - 1 => PanelEntry::Fs(FsEntry::Directory(*worktree_id, entries[0].clone())), + 1 => PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id: folded_dirs_entry.worktree_id, + entry: folded_dirs_entry.entries[0].clone(), + })), _ => entry, } } else { @@ -3515,22 +3727,22 @@ impl OutlinePanel { .push(StringMatchCandidate::new(id, &file_name)); } } - PanelEntry::FoldedDirs(worktree_id, entries) => { - let dir_names = self.dir_names_string(entries, *worktree_id, cx); + PanelEntry::FoldedDirs(folded_dir_entry) => { + let dir_names = self.dir_names_string( + &folded_dir_entry.entries, + folded_dir_entry.worktree_id, + cx, + ); { state .match_candidates .push(StringMatchCandidate::new(id, &dir_names)); } } - PanelEntry::Outline(outline_entry) => match outline_entry { - OutlineEntry::Outline(_, _, outline) => { - state - .match_candidates - .push(StringMatchCandidate::new(id, &outline.text)); - } - OutlineEntry::Excerpt(..) => {} - }, + PanelEntry::Outline(OutlineEntry::Outline(outline_entry)) => state + .match_candidates + .push(StringMatchCandidate::new(id, &outline_entry.outline.text)), + PanelEntry::Outline(OutlineEntry::Excerpt(_)) => {} PanelEntry::Search(new_search_entry) => { if let Some(search_data) = new_search_entry.render_data.get() { state @@ -3558,7 +3770,7 @@ impl OutlinePanel { fn dir_names_string( &self, - entries: &[Entry], + entries: &[GitEntry], worktree_id: WorktreeId, cx: &AppContext, ) -> String { @@ -3580,11 +3792,17 @@ impl OutlinePanel { fn is_expanded(&self, entry: &FsEntry) -> bool { let entry_to_check = match entry { - FsEntry::ExternalFile(buffer_id, _) => CollapsedEntry::ExternalFile(*buffer_id), - FsEntry::File(worktree_id, _, buffer_id, _) => { - CollapsedEntry::File(*worktree_id, *buffer_id) + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + CollapsedEntry::ExternalFile(*buffer_id) } - FsEntry::Directory(worktree_id, entry) => CollapsedEntry::Dir(*worktree_id, entry.id), + FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + }) => CollapsedEntry::File(*worktree_id, *buffer_id), + FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => CollapsedEntry::Dir(*worktree_id, entry.id), }; !self.collapsed_entries.contains(&entry_to_check) } @@ -3708,11 +3926,11 @@ impl OutlinePanel { self.push_entry( state, track_matches, - PanelEntry::Outline(OutlineEntry::Excerpt( + PanelEntry::Outline(OutlineEntry::Excerpt(OutlineEntryExcerpt { buffer_id, - excerpt_id, - excerpt.range.clone(), - )), + id: excerpt_id, + range: excerpt.range.clone(), + })), excerpt_depth, cx, ); @@ -3733,11 +3951,11 @@ impl OutlinePanel { self.push_entry( state, track_matches, - PanelEntry::Outline(OutlineEntry::Outline( + PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { buffer_id, excerpt_id, - outline.clone(), - )), + outline: outline.clone(), + })), outline_base_depth + outline.depth, cx, ); @@ -3763,9 +3981,9 @@ impl OutlinePanel { let kind = search_state.kind; let related_excerpts = match &parent_entry { - FsEntry::Directory(_, _) => return, - FsEntry::ExternalFile(_, excerpts) => excerpts, - FsEntry::File(_, _, _, excerpts) => excerpts, + FsEntry::Directory(_) => return, + FsEntry::ExternalFile(external) => &external.excerpts, + FsEntry::File(file) => &file.excerpts, } .iter() .copied() @@ -4031,24 +4249,28 @@ impl OutlinePanel { fn width_estimate(&self, depth: usize, entry: &PanelEntry, cx: &AppContext) -> u64 { let item_text_chars = match entry { - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => self - .buffer_snapshot_for_id(*buffer_id, cx) + PanelEntry::Fs(FsEntry::ExternalFile(external)) => self + .buffer_snapshot_for_id(external.buffer_id, cx) .and_then(|snapshot| { Some(snapshot.file()?.path().file_name()?.to_string_lossy().len()) }) .unwrap_or_default(), - PanelEntry::Fs(FsEntry::Directory(_, directory)) => directory + PanelEntry::Fs(FsEntry::Directory(directory)) => directory + .entry .path .file_name() .map(|name| name.to_string_lossy().len()) .unwrap_or_default(), - PanelEntry::Fs(FsEntry::File(_, file, _, _)) => file + PanelEntry::Fs(FsEntry::File(file)) => file + .entry .path .file_name() .map(|name| name.to_string_lossy().len()) .unwrap_or_default(), - PanelEntry::FoldedDirs(_, dirs) => { - dirs.iter() + PanelEntry::FoldedDirs(folded_dirs) => { + folded_dirs + .entries + .iter() .map(|dir| { dir.path .file_name() @@ -4056,13 +4278,13 @@ impl OutlinePanel { .unwrap_or_default() }) .sum::() - + dirs.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() + + folded_dirs.entries.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, _, range)) => self - .excerpt_label(*buffer_id, range, cx) + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self + .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) .map(|label| label.len()) .unwrap_or_default(), - PanelEntry::Outline(OutlineEntry::Outline(_, _, outline)) => outline.text.len(), + PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.outline.text.len(), PanelEntry::Search(search) => search .render_data .get() @@ -4136,38 +4358,25 @@ impl OutlinePanel { cached_entry.string_match.as_ref(), cx, )), - PanelEntry::FoldedDirs(worktree_id, entries) => { + PanelEntry::FoldedDirs(folded_dirs_entry) => { Some(outline_panel.render_folded_dirs( - worktree_id, - &entries, + &folded_dirs_entry, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )) + } + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + outline_panel.render_excerpt(&excerpt, cached_entry.depth, cx) + } + PanelEntry::Outline(OutlineEntry::Outline(entry)) => { + Some(outline_panel.render_outline( + &entry, cached_entry.depth, cached_entry.string_match.as_ref(), cx, )) } - PanelEntry::Outline(OutlineEntry::Excerpt( - buffer_id, - excerpt_id, - excerpt, - )) => outline_panel.render_excerpt( - buffer_id, - excerpt_id, - &excerpt, - cached_entry.depth, - cx, - ), - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - outline, - )) => Some(outline_panel.render_outline( - buffer_id, - excerpt_id, - &outline, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), PanelEntry::Search(SearchEntry { match_range, render_data, @@ -4304,7 +4513,7 @@ impl OutlinePanel { fn buffers_inside_directory( &self, dir_worktree: WorktreeId, - dir_entry: &Entry, + dir_entry: &GitEntry, ) -> HashSet { if !dir_entry.is_dir() { debug_panic!("buffers_inside_directory called on a non-directory entry {dir_entry:?}"); @@ -4314,23 +4523,24 @@ impl OutlinePanel { self.fs_entries .iter() .skip_while(|fs_entry| match fs_entry { - FsEntry::Directory(worktree_id, entry) => { - *worktree_id != dir_worktree || entry != dir_entry + FsEntry::Directory(directory) => { + directory.worktree_id != dir_worktree || &directory.entry != dir_entry } _ => true, }) .skip(1) .take_while(|fs_entry| match fs_entry { FsEntry::ExternalFile(..) => false, - FsEntry::Directory(worktree_id, entry) => { - *worktree_id == dir_worktree && entry.path.starts_with(&dir_entry.path) + FsEntry::Directory(directory) => { + directory.worktree_id == dir_worktree + && directory.entry.path.starts_with(&dir_entry.path) } - FsEntry::File(worktree_id, entry, ..) => { - *worktree_id == dir_worktree && entry.path.starts_with(&dir_entry.path) + FsEntry::File(file) => { + file.worktree_id == dir_worktree && file.entry.path.starts_with(&dir_entry.path) } }) .filter_map(|fs_entry| match fs_entry { - FsEntry::File(_, _, buffer_id, _) => Some(*buffer_id), + FsEntry::File(file) => Some(file.buffer_id), _ => None, }) .collect() @@ -4678,14 +4888,14 @@ fn subscribe_for_editor_events( .fs_entries .iter() .find_map(|fs_entry| match fs_entry { - FsEntry::ExternalFile(buffer_id, _) => { - if *buffer_id == toggled_buffer_id { + FsEntry::ExternalFile(external) => { + if external.buffer_id == toggled_buffer_id { Some(fs_entry.clone()) } else { None } } - FsEntry::File(_, _, buffer_id, _) => { + FsEntry::File(FsEntryFile { buffer_id, .. }) => { if *buffer_id == toggled_buffer_id { Some(fs_entry.clone()) } else { @@ -5545,41 +5755,46 @@ mod tests { } display_string += &match &entry.entry { PanelEntry::Fs(entry) => match entry { - FsEntry::ExternalFile(_, _) => { + FsEntry::ExternalFile(_) => { panic!("Did not cover external files with tests") } - FsEntry::Directory(_, dir_entry) => format!( + FsEntry::Directory(directory) => format!( "{}/", - dir_entry + directory + .entry .path .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_default() ), - FsEntry::File(_, file_entry, ..) => file_entry + FsEntry::File(file) => file + .entry .path .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_default(), }, - PanelEntry::FoldedDirs(_, dirs) => dirs + PanelEntry::FoldedDirs(folded_dirs) => folded_dirs + .entries .iter() .filter_map(|dir| dir.path.file_name()) .map(|name| name.to_string_lossy().to_string() + "/") .collect(), PanelEntry::Outline(outline_entry) => match outline_entry { - OutlineEntry::Excerpt(_, _, _) => continue, - OutlineEntry::Outline(_, _, outline) => format!("outline: {}", outline.text), + OutlineEntry::Excerpt(_) => continue, + OutlineEntry::Outline(outline_entry) => { + format!("outline: {}", outline_entry.outline.text) + } }, - PanelEntry::Search(SearchEntry { - render_data, - match_range, - .. - }) => { + PanelEntry::Search(search_entry) => { format!( "search: {}", - render_data - .get_or_init(|| SearchData::new(match_range, &multi_buffer_snapshot)) + search_entry + .render_data + .get_or_init(|| SearchData::new( + &search_entry.match_range, + &multi_buffer_snapshot + )) .context_text ) } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index c88383b193f82..e5e0ce85a2797 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -569,9 +569,9 @@ impl LocalBufferStore { buffer_change_sets .into_iter() .filter_map(|(change_set, buffer_snapshot, path)| { - let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; - let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; - let base_text = local_repo_entry.repo().load_index_text(&relative_path); + let local_repo = snapshot.local_repo_for_path(&path)?; + let relative_path = local_repo.relativize(&path).ok()?; + let base_text = local_repo.repo().load_index_text(&relative_path); Some((change_set, buffer_snapshot, base_text)) }) .collect::>() @@ -1161,16 +1161,16 @@ impl BufferStore { Worktree::Local(worktree) => { let worktree = worktree.snapshot(); let blame_params = maybe!({ - let (repo_entry, local_repo_entry) = match worktree.repo_for_path(&file.path) { + let local_repo = match worktree.local_repo_for_path(&file.path) { Some(repo_for_path) => repo_for_path, None => return Ok(None), }; - let relative_path = repo_entry - .relativize(&worktree, &file.path) + let relative_path = local_repo + .relativize(&file.path) .context("failed to relativize buffer path")?; - let repo = local_repo_entry.repo().clone(); + let repo = local_repo.repo().clone(); let content = match version { Some(version) => buffer.rope_for_version(&version).clone(), @@ -1247,7 +1247,7 @@ impl BufferStore { }); }; - let path = match repo_entry.relativize(worktree, file.path()) { + let path = match repo_entry.relativize(file.path()) { Ok(RepoPath(path)) => path, Err(e) => return Task::ready(Err(e)), }; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 940ddce2d0943..e7d8e3a199b4d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -87,9 +87,8 @@ pub use language::Location; #[cfg(any(test, feature = "test-support"))] pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; pub use worktree::{ - Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry, - UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, - FS_WATCH_LATENCY, + Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet, + UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY, }; const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); @@ -3098,6 +3097,7 @@ impl LspStore { WorktreeStoreEvent::WorktreeUpdateSent(worktree) => { worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree)); } + WorktreeStoreEvent::GitRepositoryUpdated => {} } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 177e05bd62434..b729b72e50f9c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -39,7 +39,10 @@ use futures::{ pub use image_store::{ImageItem, ImageStore}; use image_store::{ImageItemEvent, ImageStoreEvent}; -use git::{blame::Blame, repository::GitRepository}; +use git::{ + blame::Blame, + repository::{GitFileStatus, GitRepository}, +}; use gpui::{ AnyModel, AppContext, AsyncAppContext, BorrowAppContext, Context as _, EventEmitter, Hsla, Model, ModelContext, SharedString, Task, WeakModel, WindowContext, @@ -95,9 +98,8 @@ pub use task_inventory::{ BasicContextProvider, ContextProviderWithTasks, Inventory, TaskSourceKind, }; pub use worktree::{ - Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry, - UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, - FS_WATCH_LATENCY, + Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet, + UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY, }; pub use buffer_store::ProjectTransaction; @@ -242,6 +244,7 @@ pub enum Event { ActivateProjectPanel, WorktreeAdded(WorktreeId), WorktreeOrderChanged, + GitRepositoryUpdated, WorktreeRemoved(WorktreeId), WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet), WorktreeUpdatedGitRepositories(WorktreeId), @@ -1433,6 +1436,15 @@ impl Project { .unwrap_or(false) } + pub fn project_path_git_status( + &self, + project_path: &ProjectPath, + cx: &AppContext, + ) -> Option { + self.worktree_for_id(project_path.worktree_id, cx) + .and_then(|worktree| worktree.read(cx).status_for_file(&project_path.path)) + } + pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option { paths .iter() @@ -2295,6 +2307,7 @@ impl Project { } WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged), WorktreeStoreEvent::WorktreeUpdateSent(_) => {} + WorktreeStoreEvent::GitRepositoryUpdated => cx.emit(Event::GitRepositoryUpdated), } } @@ -3516,17 +3529,6 @@ impl Project { ) } - pub fn get_repo( - &self, - project_path: &ProjectPath, - cx: &AppContext, - ) -> Option> { - self.worktree_for_id(project_path.worktree_id, cx)? - .read(cx) - .as_local()? - .local_git_repo(&project_path.path) - } - pub fn get_first_worktree_root_repo(&self, cx: &AppContext) -> Option> { let worktree = self.visible_worktrees(cx).next()?.read(cx).as_local()?; let root_entry = worktree.root_git_entry()?; @@ -4426,8 +4428,10 @@ impl Completion { } } -pub fn sort_worktree_entries(entries: &mut [Entry]) { +pub fn sort_worktree_entries(entries: &mut [impl AsRef]) { entries.sort_by(|entry_a, entry_b| { + let entry_a = entry_a.as_ref(); + let entry_b = entry_b.as_ref(); compare_paths( (&entry_a.path, entry_a.is_file()), (&entry_b.path, entry_b.is_file()), diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index c39b88cd40f40..64f31ee7bd7e4 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -62,6 +62,7 @@ pub enum WorktreeStoreEvent { WorktreeReleased(EntityId, WorktreeId), WorktreeOrderChanged, WorktreeUpdateSent(Model), + GitRepositoryUpdated, } impl EventEmitter for WorktreeStore {} @@ -322,6 +323,7 @@ impl WorktreeStore { let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await; let worktree = worktree?; + this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; if visible { @@ -374,6 +376,17 @@ impl WorktreeStore { this.send_project_updates(cx); }) .detach(); + + cx.subscribe( + worktree, + |_this, _, event: &worktree::Event, cx| match event { + worktree::Event::UpdatedGitRepositories(_) => { + cx.emit(WorktreeStoreEvent::GitRepositoryUpdated); + } + worktree::Event::DeletedEntry(_) | worktree::Event::UpdatedEntries(_) => {} + }, + ) + .detach(); } pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c2afc7aefd62a..bd4de28265a50 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -63,7 +63,7 @@ use workspace::{ notifications::{DetachAndPromptErr, NotifyTaskExt}, DraggedSelection, OpenInTerminal, PreviewTabsSettings, SelectedEntry, Workspace, }; -use worktree::CreatedEntry; +use worktree::{CreatedEntry, GitEntry, GitEntryRef}; const PROJECT_PANEL_KEY: &str = "ProjectPanel"; const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX; @@ -76,7 +76,7 @@ pub struct ProjectPanel { // An update loop that keeps incrementing/decrementing scroll offset while there is a dragged entry that's // hovered over the start/end of a list. hover_scroll_task: Option>, - visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, + visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, /// Maps from leaf project entry ID to the currently selected ancestor. /// Relevant only for auto-fold dirs, where a single project panel entry may actually consist of several /// project entries (and all non-leaf nodes are guaranteed to be directories). @@ -1359,9 +1359,10 @@ impl ProjectPanel { let parent_entry = worktree.entry_for_path(parent_path)?; // Remove all siblings that are being deleted except the last marked entry - let mut siblings: Vec = worktree + let mut siblings: Vec<_> = worktree .snapshot() .child_entries(parent_path) + .with_git_statuses() .filter(|sibling| { sibling.id == latest_entry.id || !marked_entries_in_worktree.contains(&&SelectedEntry { @@ -1369,7 +1370,7 @@ impl ProjectPanel { entry_id: sibling.id, }) }) - .cloned() + .map(|entry| entry.to_owned()) .collect(); project::sort_worktree_entries(&mut siblings); @@ -2307,7 +2308,7 @@ impl ProjectPanel { } let mut visible_worktree_entries = Vec::new(); - let mut entry_iter = snapshot.entries(true, 0); + let mut entry_iter = snapshot.entries(true, 0).with_git_statuses(); let mut auto_folded_ancestors = vec![]; while let Some(entry) = entry_iter.entry() { if auto_collapse_dirs && entry.kind.is_dir() { @@ -2349,7 +2350,7 @@ impl ProjectPanel { } } auto_folded_ancestors.clear(); - visible_worktree_entries.push(entry.clone()); + visible_worktree_entries.push(entry.to_owned()); let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id { entry.id == new_entry_id || { self.ancestors.get(&entry.id).map_or(false, |entries| { @@ -2363,25 +2364,27 @@ impl ProjectPanel { false }; if precedes_new_entry { - visible_worktree_entries.push(Entry { - id: NEW_ENTRY_ID, - kind: new_entry_kind, - path: entry.path.join("\0").into(), - inode: 0, - mtime: entry.mtime, - size: entry.size, - is_ignored: entry.is_ignored, - is_external: false, - is_private: false, - is_always_included: entry.is_always_included, + visible_worktree_entries.push(GitEntry { + entry: Entry { + id: NEW_ENTRY_ID, + kind: new_entry_kind, + path: entry.path.join("\0").into(), + inode: 0, + mtime: entry.mtime, + size: entry.size, + is_ignored: entry.is_ignored, + is_external: false, + is_private: false, + is_always_included: entry.is_always_included, + canonical_path: entry.canonical_path.clone(), + char_bag: entry.char_bag, + is_fifo: entry.is_fifo, + }, git_status: entry.git_status, - canonical_path: entry.canonical_path.clone(), - char_bag: entry.char_bag, - is_fifo: entry.is_fifo, }); } let worktree_abs_path = worktree.read(cx).abs_path(); - let (depth, path) = if Some(entry) == worktree.read(cx).root_entry() { + let (depth, path) = if Some(entry.entry) == worktree.read(cx).root_entry() { let Some(path_name) = worktree_abs_path .file_name() .with_context(|| { @@ -2460,6 +2463,7 @@ impl ProjectPanel { snapshot.propagate_git_statuses(&mut visible_worktree_entries); project::sort_worktree_entries(&mut visible_worktree_entries); + self.visible_entries .push((worktree_id, visible_worktree_entries, OnceCell::new())); } @@ -2655,13 +2659,13 @@ impl ProjectPanel { None } - fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, &Entry)> { + fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef)> { let mut offset = 0; for (worktree_id, visible_worktree_entries, _) in &self.visible_entries { if visible_worktree_entries.len() > offset + index { return visible_worktree_entries .get(index) - .map(|entry| (*worktree_id, entry)); + .map(|entry| (*worktree_id, entry.to_ref())); } offset += visible_worktree_entries.len(); } @@ -2694,7 +2698,7 @@ impl ProjectPanel { .collect() }); for entry in visible_worktree_entries[entry_range].iter() { - callback(entry, entries, cx); + callback(&entry, entries, cx); } ix = end_ix; } @@ -2763,7 +2767,7 @@ impl ProjectPanel { }; let (depth, difference) = - ProjectPanel::calculate_depth_and_difference(entry, entries); + ProjectPanel::calculate_depth_and_difference(&entry, entries); let filename = match difference { diff if diff > 1 => entry @@ -2892,9 +2896,9 @@ impl ProjectPanel { worktree_id: WorktreeId, reverse_search: bool, only_visible_entries: bool, - predicate: impl Fn(&Entry, WorktreeId) -> bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, cx: &mut ViewContext, - ) -> Option { + ) -> Option { if only_visible_entries { let entries = self .visible_entries @@ -2909,15 +2913,18 @@ impl ProjectPanel { .clone(); return utils::ReversibleIterable::new(entries.iter(), reverse_search) - .find(|ele| predicate(ele, worktree_id)) + .find(|ele| predicate(ele.to_ref(), worktree_id)) .cloned(); } let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?; worktree.update(cx, |tree, _| { - utils::ReversibleIterable::new(tree.entries(true, 0usize), reverse_search) - .find_single_ended(|ele| predicate(ele, worktree_id)) - .cloned() + utils::ReversibleIterable::new( + tree.entries(true, 0usize).with_git_statuses(), + reverse_search, + ) + .find_single_ended(|ele| predicate(*ele, worktree_id)) + .map(|ele| ele.to_owned()) }) } @@ -2925,7 +2932,7 @@ impl ProjectPanel { &self, start: Option<&SelectedEntry>, reverse_search: bool, - predicate: impl Fn(&Entry, WorktreeId) -> bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, cx: &mut ViewContext, ) -> Option { let mut worktree_ids: Vec<_> = self @@ -2947,7 +2954,9 @@ impl ProjectPanel { let root_entry = tree.root_entry()?; let tree_id = tree.id(); - let mut first_iter = tree.traverse_from_path(true, true, true, entry.path.as_ref()); + let mut first_iter = tree + .traverse_from_path(true, true, true, entry.path.as_ref()) + .with_git_statuses(); if reverse_search { first_iter.next(); @@ -2955,25 +2964,25 @@ impl ProjectPanel { let first = first_iter .enumerate() - .take_until(|(count, ele)| *ele == root_entry && *count != 0usize) - .map(|(_, ele)| ele) - .find(|ele| predicate(ele, tree_id)) - .cloned(); + .take_until(|(count, entry)| entry.entry == root_entry && *count != 0usize) + .map(|(_, entry)| entry) + .find(|ele| predicate(*ele, tree_id)) + .map(|ele| ele.to_owned()); - let second_iter = tree.entries(true, 0usize); + let second_iter = tree.entries(true, 0usize).with_git_statuses(); let second = if reverse_search { second_iter .take_until(|ele| ele.id == start.entry_id) - .filter(|ele| predicate(ele, tree_id)) + .filter(|ele| predicate(*ele, tree_id)) .last() - .cloned() + .map(|ele| ele.to_owned()) } else { second_iter .take_while(|ele| ele.id != start.entry_id) - .filter(|ele| predicate(ele, tree_id)) + .filter(|ele| predicate(*ele, tree_id)) .last() - .cloned() + .map(|ele| ele.to_owned()) }; if reverse_search { @@ -3030,7 +3039,7 @@ impl ProjectPanel { &self, start: Option<&SelectedEntry>, reverse_search: bool, - predicate: impl Fn(&Entry, WorktreeId) -> bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, cx: &mut ViewContext, ) -> Option { let mut worktree_ids: Vec<_> = self @@ -3072,8 +3081,8 @@ impl ProjectPanel { ) }; - let first_search = first_iter.find(|ele| predicate(ele, start.worktree_id)); - let second_search = second_iter.find(|ele| predicate(ele, start.worktree_id)); + let first_search = first_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id)); + let second_search = second_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id)); if first_search.is_some() { return first_search.map(|entry| SelectedEntry { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 29e90cc71e8c2..5256c10ae7a0c 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1768,7 +1768,7 @@ message Entry { bool is_ignored = 7; bool is_external = 8; reserved 6; - optional GitStatus git_status = 9; + reserved 9; bool is_fifo = 10; optional uint64 size = 11; optional string canonical_path = 12; @@ -1784,10 +1784,12 @@ message StatusEntry { GitStatus status = 2; } +// TODO: model this git status better, replicating the staged and unstaged states enum GitStatus { Added = 0; Modified = 1; Conflict = 2; + Deleted = 3; } message BufferState { diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 773e7db88bad3..dfa167842d9cc 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -60,6 +60,7 @@ where } } + /// Item is None, when the list is empty, or this cursor is at the end of the list. #[track_caller] pub fn item(&self) -> Option<&'a T> { self.assert_did_seek(); diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index fbfe3b06f3ab4..fa37c6759948d 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -42,6 +42,21 @@ pub trait Summary: Clone { fn add_summary(&mut self, summary: &Self, cx: &Self::Context); } +/// This type exists because we can't implement Summary for () without causing +/// type resolution errors +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct Unit; + +impl Summary for Unit { + type Context = (); + + fn zero(_: &()) -> Self { + Unit + } + + fn add_summary(&mut self, _: &Self, _: &()) {} +} + /// Each [`Summary`] type can have more than one [`Dimension`] type that it measures. /// /// You can use dimensions to seek to a specific location in the [`SumTree`] @@ -761,6 +776,55 @@ impl SumTree { None } } + + #[inline] + pub fn contains(&self, key: &T::Key, cx: &::Context) -> bool { + self.get(key, cx).is_some() + } + + pub fn update( + &mut self, + key: &T::Key, + cx: &::Context, + f: F, + ) -> Option + where + F: FnOnce(&mut T) -> R, + { + let mut cursor = self.cursor::(cx); + let mut new_tree = cursor.slice(key, Bias::Left, cx); + let mut result = None; + if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal { + let mut updated = cursor.item().unwrap().clone(); + result = Some(f(&mut updated)); + new_tree.push(updated, cx); + cursor.next(cx); + } + new_tree.append(cursor.suffix(cx), cx); + drop(cursor); + *self = new_tree; + result + } + + pub fn retain bool>( + &mut self, + cx: &::Context, + mut predicate: F, + ) { + let mut new_map = SumTree::new(cx); + + let mut cursor = self.cursor::(cx); + cursor.next(cx); + while let Some(item) = cursor.item() { + if predicate(&item) { + new_map.push(item.clone(), cx); + } + cursor.next(cx); + } + drop(cursor); + + *self = new_map; + } } impl Default for SumTree diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 64d9da71fa4f8..f076a4f1bc5a6 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -358,13 +358,14 @@ impl PickerDelegate for TabSwitcherDelegate { .item .project_path(cx) .as_ref() - .and_then(|path| self.project.read(cx).entry_for_path(path, cx)) - .map(|entry| { - entry_git_aware_label_color( - entry.git_status, - entry.is_ignored, - selected, - ) + .and_then(|path| { + let project = self.project.read(cx); + let entry = project.entry_for_path(path, cx)?; + let git_status = project.project_path_git_status(path, cx); + Some((entry, git_status)) + }) + .map(|(entry, git_status)| { + entry_git_aware_label_color(git_status, entry.is_ignored, selected) }) }) .flatten(); diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index dac9a98d69e3a..454a10f1e9e7e 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -17,7 +17,7 @@ use gpui::{ Interactivity, IntoElement, Model, MouseButton, ParentElement, Render, Stateful, StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; -use project::{Project, RepositoryEntry}; +use project::Project; use rpc::proto; use settings::Settings as _; use smallvec::SmallVec; @@ -451,7 +451,7 @@ impl TitleBar { let workspace = self.workspace.upgrade()?; let branch_name = entry .as_ref() - .and_then(RepositoryEntry::branch) + .and_then(|entry| entry.branch()) .map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?; Some( Button::new("project_branch_trigger", branch_name) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 9ee909f73eabb..493172bd9d678 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -21,7 +21,6 @@ use fuzzy::CharBag; use git::GitHostingProviderRegistry; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, - status::GitStatus, COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, }; use gpui::{ @@ -30,6 +29,7 @@ use gpui::{ }; use ignore::IgnoreStack; use language::DiskState; + use parking_lot::Mutex; use paths::local_settings_folder_relative_path; use postage::{ @@ -53,8 +53,9 @@ use std::{ ffi::OsStr, fmt, future::Future, - mem, - ops::{AddAssign, Deref, DerefMut, Sub}, + iter::FusedIterator, + mem::{self}, + ops::{Deref, DerefMut}, path::{Path, PathBuf}, pin::Pin, sync::{ @@ -63,7 +64,9 @@ use std::{ }, time::{Duration, Instant}, }; -use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; +use sum_tree::{ + Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit, +}; use text::{LineEnding, Rope}; use util::{ paths::{home_dir, PathMatcher, SanitizedPath}, @@ -154,7 +157,7 @@ pub struct Snapshot { entries_by_path: SumTree, entries_by_id: SumTree, always_included_entries: Vec>, - repository_entries: TreeMap, + repositories: SumTree, /// A number that increases every time the worktree begins scanning /// a set of paths from the filesystem. This scanning could be caused @@ -171,8 +174,80 @@ pub struct Snapshot { #[derive(Clone, Debug, PartialEq, Eq)] pub struct RepositoryEntry { - pub(crate) work_directory: WorkDirectoryEntry, + /// The git status entries for this repository. + /// Note that the paths on this repository are relative to the git work directory. + /// If the .git folder is external to Zed, these paths will be relative to that folder, + /// and this data structure might reference files external to this worktree. + /// + /// For example: + /// + /// my_root_folder/ <-- repository root + /// .git + /// my_sub_folder_1/ + /// project_root/ <-- Project root, Zed opened here + /// changed_file_1 <-- File with changes, in worktree + /// my_sub_folder_2/ + /// changed_file_2 <-- File with changes, out of worktree + /// ... + /// + /// With this setup, this field would contain 2 entries, like so: + /// - my_sub_folder_1/project_root/changed_file_1 + /// - my_sub_folder_2/changed_file_2 + pub(crate) git_entries_by_path: SumTree, + pub(crate) work_directory_id: ProjectEntryId, + pub(crate) work_directory: WorkDirectory, pub(crate) branch: Option>, +} + +impl Deref for RepositoryEntry { + type Target = WorkDirectory; + + fn deref(&self) -> &Self::Target { + &self.work_directory + } +} + +impl AsRef for RepositoryEntry { + fn as_ref(&self) -> &Path { + &self.path + } +} + +impl RepositoryEntry { + pub fn branch(&self) -> Option> { + self.branch.clone() + } + + pub fn work_directory_id(&self) -> ProjectEntryId { + self.work_directory_id + } + + pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry { + self.into() + } + + pub fn status(&self) -> impl Iterator + '_ { + self.git_entries_by_path.iter().cloned() + } +} + +impl From<&RepositoryEntry> for proto::RepositoryEntry { + fn from(value: &RepositoryEntry) -> Self { + proto::RepositoryEntry { + work_directory_id: value.work_directory_id.to_proto(), + branch: value.branch.as_ref().map(|str| str.to_string()), + } + } +} + +/// This path corresponds to the 'content path' of a repository in relation +/// to Zed's project root. +/// In the majority of the cases, this is the folder that contains the .git folder. +/// But if a sub-folder of a git repository is opened, this corresponds to the +/// project root and the .git folder is located in a parent directory. +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct WorkDirectory { + path: Arc, /// If location_in_repo is set, it means the .git folder is external /// and in a parent folder of the project root. @@ -195,23 +270,14 @@ pub struct RepositoryEntry { pub(crate) location_in_repo: Option>, } -impl RepositoryEntry { - pub fn branch(&self) -> Option> { - self.branch.clone() - } - - pub fn work_directory_id(&self) -> ProjectEntryId { - *self.work_directory - } - - pub fn work_directory(&self, snapshot: &Snapshot) -> Option { - snapshot - .entry_for_id(self.work_directory_id()) - .map(|entry| RepositoryWorkDirectory(entry.path.clone())) +impl WorkDirectory { + pub fn path_key(&self) -> PathKey { + PathKey(self.path.clone()) } - pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry { - self.into() + pub fn contains(&self, path: impl AsRef) -> bool { + let path = path.as_ref(); + path.starts_with(&self.path) } /// relativize returns the given project path relative to the root folder of the @@ -219,53 +285,50 @@ impl RepositoryEntry { /// If the root of the repository (and its .git folder) are located in a parent folder /// of the project root folder, then the returned RepoPath is relative to the root /// of the repository and not a valid path inside the project. - pub fn relativize(&self, worktree: &Snapshot, path: &Path) -> Result { - let relativize_path = |path: &Path| { - let entry = worktree - .entry_for_id(self.work_directory.0) - .ok_or_else(|| anyhow!("entry not found"))?; - + pub fn relativize(&self, path: &Path) -> Result { + if let Some(location_in_repo) = &self.location_in_repo { + Ok(location_in_repo.join(path).into()) + } else { let relativized_path = path - .strip_prefix(&entry.path) - .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, entry.path))?; + .strip_prefix(&self.path) + .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?; Ok(relativized_path.into()) - }; + } + } - if let Some(location_in_repo) = &self.location_in_repo { - relativize_path(&location_in_repo.join(path)) + /// FIXME come up with a better name + pub fn unrelativize(&self, path: &RepoPath) -> Option> { + if let Some(location) = &self.location_in_repo { + // If we fail to strip the prefix, that means this status entry is + // external to this worktree, and we definitely won't have an entry_id + path.strip_prefix(location).ok().map(Into::into) } else { - relativize_path(path) + Some(self.path.join(path).into()) } } } -impl From<&RepositoryEntry> for proto::RepositoryEntry { - fn from(value: &RepositoryEntry) -> Self { - proto::RepositoryEntry { - work_directory_id: value.work_directory.to_proto(), - branch: value.branch.as_ref().map(|str| str.to_string()), +impl Default for WorkDirectory { + fn default() -> Self { + Self { + path: Arc::from(Path::new("")), + location_in_repo: None, } } } -/// This path corresponds to the 'content path' of a repository in relation -/// to Zed's project root. -/// In the majority of the cases, this is the folder that contains the .git folder. -/// But if a sub-folder of a git repository is opened, this corresponds to the -/// project root and the .git folder is located in a parent directory. -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub struct RepositoryWorkDirectory(pub(crate) Arc); +impl Deref for WorkDirectory { + type Target = Path; -impl Default for RepositoryWorkDirectory { - fn default() -> Self { - RepositoryWorkDirectory(Arc::from(Path::new(""))) + fn deref(&self) -> &Self::Target { + self.as_ref() } } -impl AsRef for RepositoryWorkDirectory { +impl AsRef for WorkDirectory { fn as_ref(&self) -> &Path { - self.0.as_ref() + self.path.as_ref() } } @@ -317,6 +380,7 @@ struct BackgroundScannerState { #[derive(Debug, Clone)] pub struct LocalRepositoryEntry { + pub(crate) work_directory: WorkDirectory, pub(crate) git_dir_scan_id: usize, pub(crate) repo_ptr: Arc, /// Absolute path to the actual .git folder. @@ -326,12 +390,39 @@ pub struct LocalRepositoryEntry { pub(crate) dot_git_worktree_abs_path: Option>, } +impl sum_tree::Item for LocalRepositoryEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.work_directory.path.clone(), + item_summary: Unit, + } + } +} + +impl KeyedItem for LocalRepositoryEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.work_directory.path.clone()) + } +} + impl LocalRepositoryEntry { pub fn repo(&self) -> &Arc { &self.repo_ptr } } +impl Deref for LocalRepositoryEntry { + type Target = WorkDirectory; + + fn deref(&self) -> &Self::Target { + &self.work_directory + } +} + impl Deref for LocalSnapshot { type Target = Snapshot; @@ -716,9 +807,9 @@ impl Worktree { let snapshot = this.snapshot(); cx.background_executor().spawn(async move { if let Some(repo) = snapshot.repository_for_path(&path) { - if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() { + if let Some(repo_path) = repo.relativize(&path).log_err() { if let Some(git_repo) = - snapshot.git_repositories.get(&*repo.work_directory) + snapshot.git_repositories.get(&repo.work_directory_id) { return Ok(git_repo.repo_ptr.load_index_text(&repo_path)); } @@ -1234,8 +1325,8 @@ impl LocalWorktree { if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id { if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1251,8 +1342,8 @@ impl LocalWorktree { Ordering::Greater => { if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1279,8 +1370,8 @@ impl LocalWorktree { (None, Some((entry_id, _))) => { if let Some(entry) = old_snapshot.entry_for_id(entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1325,12 +1416,12 @@ impl LocalWorktree { } pub fn local_git_repo(&self, path: &Path) -> Option> { - self.repo_for_path(path) - .map(|(_, entry)| entry.repo_ptr.clone()) + self.local_repo_for_path(path) + .map(|local_repo| local_repo.repo_ptr.clone()) } pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> { - self.git_repositories.get(&repo.work_directory.0) + self.git_repositories.get(&repo.work_directory_id) } fn load_binary_file( @@ -2086,7 +2177,7 @@ impl Snapshot { always_included_entries: Default::default(), entries_by_path: Default::default(), entries_by_id: Default::default(), - repository_entries: Default::default(), + repositories: Default::default(), scan_id: 1, completed_scan_id: 0, } @@ -2121,8 +2212,8 @@ impl Snapshot { updated_entries.sort_unstable_by_key(|e| e.id); let mut updated_repositories = self - .repository_entries - .values() + .repositories + .iter() .map(proto::RepositoryEntry::from) .collect::>(); updated_repositories.sort_unstable_by_key(|e| e.work_directory_id); @@ -2186,7 +2277,7 @@ impl Snapshot { self.entries_by_path = { let mut cursor = self.entries_by_path.cursor::(&()); let mut new_entries_by_path = - cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &()); + cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &()); while let Some(entry) = cursor.item() { if entry.path.starts_with(&removed_entry.path) { self.entries_by_id.remove(&entry.id, &()); @@ -2202,12 +2293,14 @@ impl Snapshot { Some(removed_entry.path) } - #[cfg(any(test, feature = "test-support"))] - pub fn status_for_file(&self, path: impl Into) -> Option { - let path = path.into(); - self.entries_by_path - .get(&PathKey(Arc::from(path)), &()) - .and_then(|entry| entry.git_status) + pub fn status_for_file(&self, path: impl AsRef) -> Option { + let path = path.as_ref(); + self.repository_for_path(path).and_then(|repo| { + let repo_path = repo.relativize(path).unwrap(); + repo.git_entries_by_path + .get(&PathKey(repo_path.0), &()) + .map(|entry| entry.git_status) + }) } fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) { @@ -2267,35 +2360,40 @@ impl Snapshot { self.entries_by_id.edit(entries_by_id_edits, &()); update.removed_repositories.sort_unstable(); - self.repository_entries.retain(|_, entry| { + self.repositories.retain(&(), |entry: &RepositoryEntry| { update .removed_repositories - .binary_search(&entry.work_directory.to_proto()) + .binary_search(&entry.work_directory_id.to_proto()) .is_err() }); for repository in update.updated_repositories { - let work_directory_entry: WorkDirectoryEntry = - ProjectEntryId::from_proto(repository.work_directory_id).into(); - - if let Some(entry) = self.entry_for_id(*work_directory_entry) { - let work_directory = RepositoryWorkDirectory(entry.path.clone()); - if self.repository_entries.get(&work_directory).is_some() { - self.repository_entries.update(&work_directory, |repo| { - repo.branch = repository.branch.map(Into::into); - }); + let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id); + if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) { + if self + .repositories + .contains(&PathKey(work_dir_entry.path.clone()), &()) + { + self.repositories + .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| { + repo.branch = repository.branch.map(Into::into); + }); } else { - self.repository_entries.insert( - work_directory, + self.repositories.insert_or_replace( RepositoryEntry { - work_directory: work_directory_entry, + work_directory_id, + work_directory: WorkDirectory { + path: work_dir_entry.path.clone(), + // When syncing repository entries from a peer, we don't need + // the location_in_repo field, since git operations don't happen locally + // anyway. + location_in_repo: None, + }, branch: repository.branch.map(Into::into), - // When syncing repository entries from a peer, we don't need - // the location_in_repo field, since git operations don't happen locally - // anyway. - location_in_repo: None, + git_entries_by_path: Default::default(), }, - ) + &(), + ); } } else { log::error!("no work directory entry for repository {:?}", repository) @@ -2355,6 +2453,7 @@ impl Snapshot { &(), ); Traversal { + repositories: &self.repositories, cursor, include_files, include_dirs, @@ -2370,6 +2469,7 @@ impl Snapshot { path: &Path, ) -> Traversal { Traversal::new( + &self.repositories, &self.entries_by_path, include_files, include_dirs, @@ -2390,33 +2490,29 @@ impl Snapshot { self.traverse_from_offset(true, true, include_ignored, start) } - pub fn repositories(&self) -> impl Iterator, &RepositoryEntry)> { - self.repository_entries - .iter() - .map(|(path, entry)| (&path.0, entry)) + #[cfg(any(feature = "test-support", test))] + pub fn git_satus(&self, work_dir: &Path) -> Option> { + self.repositories + .get(&PathKey(work_dir.into()), &()) + .map(|repo| repo.status().collect()) } - pub fn repository_for_work_directory(&self, path: &Path) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(path.into())) - .cloned() + pub fn repositories(&self) -> impl Iterator { + self.repositories.iter() } - /// Get the repository whose work directory contains the given path. - pub fn repository_for_path(&self, path: &Path) -> Option { - self.repository_and_work_directory_for_path(path) - .map(|e| e.1) + /// Get the repository whose work directory corresponds to the given path. + pub(crate) fn repository(&self, work_directory: PathKey) -> Option { + self.repositories.get(&work_directory, &()).cloned() } - pub fn repository_and_work_directory_for_path( - &self, - path: &Path, - ) -> Option<(RepositoryWorkDirectory, RepositoryEntry)> { - self.repository_entries + /// Get the repository whose work directory contains the given path. + pub fn repository_for_path(&self, path: &Path) -> Option { + self.repositories .iter() - .filter(|(workdir_path, _)| path.starts_with(workdir_path)) + .filter(|repo| repo.contains(path)) .last() - .map(|(path, repo)| (path.clone(), repo.clone())) + .map(|repo| repo.clone()) } /// Given an ordered iterator of entries, returns an iterator of those entries, @@ -2425,40 +2521,50 @@ impl Snapshot { &'a self, entries: impl 'a + Iterator, ) -> impl 'a + Iterator)> { - let mut containing_repos = Vec::<(&Arc, &RepositoryEntry)>::new(); + let mut containing_repos = Vec::<&RepositoryEntry>::new(); let mut repositories = self.repositories().peekable(); entries.map(move |entry| { - while let Some((repo_path, _)) = containing_repos.last() { - if entry.path.starts_with(repo_path) { + while let Some(repository) = containing_repos.last() { + if repository.contains(&entry.path) { break; } else { containing_repos.pop(); } } - while let Some((repo_path, _)) = repositories.peek() { - if entry.path.starts_with(repo_path) { + while let Some(repository) = repositories.peek() { + if repository.contains(&entry.path) { containing_repos.push(repositories.next().unwrap()); } else { break; } } - let repo = containing_repos.last().map(|(_, repo)| *repo); + let repo = containing_repos.last().copied(); (entry, repo) }) } - /// Updates the `git_status` of the given entries such that files' - /// statuses bubble up to their ancestor directories. - pub fn propagate_git_statuses(&self, result: &mut [Entry]) { - let mut cursor = self - .entries_by_path - .cursor::<(TraversalProgress, GitStatuses)>(&()); + pub fn propagate_git_statuses(&self, entries: &mut [GitEntry]) -> Vec> { + let mut cursor = all_statuses_cursor(self); let mut entry_stack = Vec::<(usize, GitStatuses)>::new(); - let mut result_ix = 0; + let mut result = entries + .iter() + .map(|entry| { + if entry.is_dir() { + return None; + } + + let repository = self.repository_for_path(&entry.path)?; + let RepoPath(path) = repository.relativize(&entry.path).ok()?; + let git_entry = repository.git_entries_by_path.get(&PathKey(path), &())?; + Some(git_entry.git_status) + }) + .collect::>(); + + let mut entry_ix = 0; loop { - let next_entry = result.get(result_ix); - let containing_entry = entry_stack.last().map(|(ix, _)| &result[*ix]); + let next_entry = entries.get(entry_ix); + let containing_entry = entry_stack.last().map(|(ix, _)| &entries[*ix]); let entry_to_finish = match (containing_entry, next_entry) { (Some(_), None) => entry_stack.pop(), @@ -2474,15 +2580,11 @@ impl Snapshot { }; if let Some((entry_ix, prev_statuses)) = entry_to_finish { - cursor.seek_forward( - &TraversalTarget::PathSuccessor(&result[entry_ix].path), - Bias::Left, - &(), - ); + cursor.seek_forward(&PathTarget::Successor(&entries[entry_ix].path)); - let statuses = cursor.start().1 - prev_statuses; + let statuses = cursor.start() - prev_statuses; - result[entry_ix].git_status = if statuses.conflict > 0 { + result[entry_ix] = if statuses.conflict > 0 { Some(GitFileStatus::Conflict) } else if statuses.modified > 0 { Some(GitFileStatus::Modified) @@ -2492,17 +2594,15 @@ impl Snapshot { None }; } else { - if result[result_ix].is_dir() { - cursor.seek_forward( - &TraversalTarget::Path(&result[result_ix].path), - Bias::Left, - &(), - ); - entry_stack.push((result_ix, cursor.start().1)); + if entries[entry_ix].is_dir() { + cursor.seek_forward(&PathTarget::Path(&entries[entry_ix].path)); + entry_stack.push((entry_ix, cursor.start())); } - result_ix += 1; + entry_ix += 1; } } + + result } pub fn paths(&self) -> impl Iterator> { @@ -2515,9 +2615,10 @@ impl Snapshot { pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> { let mut cursor = self.entries_by_path.cursor(&()); - cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &()); + cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &()); let traversal = Traversal { cursor, + repositories: &self.repositories, include_files: true, include_dirs: true, include_ignored: true, @@ -2543,19 +2644,19 @@ impl Snapshot { } pub fn root_git_entry(&self) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(Path::new("").into())) + self.repositories + .get(&PathKey(Path::new("").into()), &()) .map(|entry| entry.to_owned()) } pub fn git_entry(&self, work_directory_path: Arc) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(work_directory_path)) + self.repositories + .get(&PathKey(work_directory_path), &()) .map(|entry| entry.to_owned()) } pub fn git_entries(&self) -> impl Iterator { - self.repository_entries.values() + self.repositories.iter() } pub fn scan_id(&self) -> usize { @@ -2586,10 +2687,10 @@ impl Snapshot { } impl LocalSnapshot { - pub fn repo_for_path(&self, path: &Path) -> Option<(RepositoryEntry, &LocalRepositoryEntry)> { - let (_, repo_entry) = self.repository_and_work_directory_for_path(path)?; - let work_directory_id = repo_entry.work_directory_id(); - Some((repo_entry, self.git_repositories.get(&work_directory_id)?)) + pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> { + let repository_entry = self.repository_for_path(path)?; + let work_directory_id = repository_entry.work_directory_id(); + self.git_repositories.get(&work_directory_id) } fn build_update( @@ -2613,9 +2714,7 @@ impl LocalSnapshot { } for (work_dir_path, change) in repo_changes.iter() { - let new_repo = self - .repository_entries - .get(&RepositoryWorkDirectory(work_dir_path.clone())); + let new_repo = self.repositories.get(&PathKey(work_dir_path.clone()), &()); match (&change.old_repository, new_repo) { (Some(old_repo), Some(new_repo)) => { updated_repositories.push(new_repo.build_update(old_repo)); @@ -2624,7 +2723,7 @@ impl LocalSnapshot { updated_repositories.push(proto::RepositoryEntry::from(new_repo)); } (Some(old_repo), None) => { - removed_repositories.push(old_repo.work_directory.0.to_proto()); + removed_repositories.push(old_repo.work_directory_id.to_proto()); } _ => {} } @@ -2827,15 +2926,15 @@ impl LocalSnapshot { .map(|repo| repo.1.dot_git_dir_abs_path.clone()) .collect::>(); let work_dir_paths = self - .repository_entries + .repositories .iter() - .map(|repo| repo.0.clone().0) + .map(|repo| repo.work_directory.path.clone()) .collect::>(); assert_eq!(dotgit_paths.len(), work_dir_paths.len()); - assert_eq!(self.repository_entries.iter().count(), work_dir_paths.len()); + assert_eq!(self.repositories.iter().count(), work_dir_paths.len()); assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len()); - for (_, entry) in self.repository_entries.iter() { - self.git_repositories.get(&entry.work_directory).unwrap(); + for entry in self.repositories.iter() { + self.git_repositories.get(&entry.work_directory_id).unwrap(); } } @@ -2872,23 +2971,7 @@ impl BackgroundScannerState { let path = entry.path.clone(); let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); - let mut containing_repository = None; - if !ignore_stack.is_abs_path_ignored(&abs_path, true) { - if let Some((repo_entry, repo)) = self.snapshot.repo_for_path(&path) { - if let Some(workdir_path) = repo_entry.work_directory(&self.snapshot) { - if let Ok(repo_path) = repo_entry.relativize(&self.snapshot, &path) { - containing_repository = Some(ScanJobContainingRepository { - work_directory: workdir_path, - statuses: repo - .repo_ptr - .status(&[repo_path.0]) - .log_err() - .unwrap_or_default(), - }); - } - } - } - } + if !ancestor_inodes.contains(&entry.inode) { ancestor_inodes.insert(entry.inode); scan_job_tx @@ -2899,7 +2982,6 @@ impl BackgroundScannerState { scan_queue: scan_job_tx.clone(), ancestor_inodes, is_external: entry.is_external, - containing_repository, }) .unwrap(); } @@ -3006,8 +3088,8 @@ impl BackgroundScannerState { .snapshot .entries_by_path .cursor::(&()); - new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); - removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); + new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &()); + removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &()); new_entries.append(cursor.suffix(&()), &()); } self.snapshot.entries_by_path = new_entries; @@ -3053,9 +3135,9 @@ impl BackgroundScannerState { self.snapshot .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); - self.snapshot - .repository_entries - .retain(|repo_path, _| !repo_path.0.starts_with(path)); + self.snapshot.repositories.retain(&(), |repository| { + !repository.work_directory.starts_with(path) + }); #[cfg(test)] self.snapshot.check_invariants(false); @@ -3066,7 +3148,7 @@ impl BackgroundScannerState { dot_git_path: Arc, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option<(RepositoryWorkDirectory, Arc)> { + ) -> Option { let work_dir_path: Arc = match dot_git_path.parent() { Some(parent_dir) => { // Guard against repositories inside the repository metadata @@ -3102,7 +3184,7 @@ impl BackgroundScannerState { location_in_repo: Option>, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option<(RepositoryWorkDirectory, Arc)> { + ) -> Option { let work_dir_id = self .snapshot .entry_for_path(work_dir_path.clone()) @@ -3134,7 +3216,10 @@ impl BackgroundScannerState { }; log::trace!("constructed libgit2 repo in {:?}", t0.elapsed()); - let work_directory = RepositoryWorkDirectory(work_dir_path.clone()); + let work_directory = WorkDirectory { + path: work_dir_path.clone(), + location_in_repo, + }; if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() { git_hosting_providers::register_additional_providers( @@ -3143,25 +3228,29 @@ impl BackgroundScannerState { ); } - self.snapshot.repository_entries.insert( - work_directory.clone(), + self.snapshot.repositories.insert_or_replace( RepositoryEntry { - work_directory: work_dir_id.into(), + work_directory_id: work_dir_id.into(), + work_directory: work_directory.clone(), branch: repository.branch_name().map(Into::into), - location_in_repo, - }, - ); - self.snapshot.git_repositories.insert( - work_dir_id, - LocalRepositoryEntry { - git_dir_scan_id: 0, - repo_ptr: repository.clone(), - dot_git_dir_abs_path: actual_dot_git_dir_abs_path, - dot_git_worktree_abs_path, + git_entries_by_path: Default::default(), }, + &(), ); - Some((work_directory, repository)) + let local_repository = LocalRepositoryEntry { + work_directory: work_directory.clone(), + git_dir_scan_id: 0, + repo_ptr: repository.clone(), + dot_git_dir_abs_path: actual_dot_git_dir_abs_path, + dot_git_worktree_abs_path, + }; + + self.snapshot + .git_repositories + .insert(work_dir_id, local_repository.clone()); + + Some(local_repository) } } @@ -3466,7 +3555,7 @@ pub struct Entry { /// directory is expanded. External entries are treated like gitignored /// entries in that they are not included in searches. pub is_external: bool, - pub git_status: Option, + /// Whether this entry is considered to be a `.env` file. pub is_private: bool, /// The entry's size on disk, in bytes. @@ -3507,127 +3596,370 @@ pub struct GitRepositoryChange { pub type UpdatedEntriesSet = Arc<[(Arc, ProjectEntryId, PathChange)]>; pub type UpdatedGitRepositoriesSet = Arc<[(Arc, GitRepositoryChange)]>; -impl Entry { - fn new( - path: Arc, - metadata: &fs::Metadata, - next_entry_id: &AtomicUsize, - root_char_bag: CharBag, - canonical_path: Option>, - ) -> Self { - let char_bag = char_bag_for_path(root_char_bag, &path); - Self { - id: ProjectEntryId::new(next_entry_id), - kind: if metadata.is_dir { - EntryKind::PendingDir - } else { - EntryKind::File - }, - path, - inode: metadata.inode, - mtime: Some(metadata.mtime), - size: metadata.len, - canonical_path, - is_ignored: false, - is_always_included: false, - is_external: false, - is_private: false, - git_status: None, - char_bag, - is_fifo: metadata.is_fifo, - } - } +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct StatusEntry { + pub repo_path: RepoPath, + pub git_status: GitFileStatus, +} - pub fn is_created(&self) -> bool { - self.mtime.is_some() - } +#[derive(Clone, Debug)] +struct PathProgress<'a> { + max_path: &'a Path, +} - pub fn is_dir(&self) -> bool { - self.kind.is_dir() - } +#[derive(Clone, Debug)] +pub struct PathSummary { + max_path: Arc, + item_summary: S, +} - pub fn is_file(&self) -> bool { - self.kind.is_file() - } +impl Summary for PathSummary { + type Context = S::Context; - pub fn git_status(&self) -> Option { - self.git_status + fn zero(cx: &Self::Context) -> Self { + Self { + max_path: Path::new("").into(), + item_summary: S::zero(cx), + } } -} -impl EntryKind { - pub fn is_dir(&self) -> bool { - matches!( - self, - EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir - ) + fn add_summary(&mut self, rhs: &Self, cx: &Self::Context) { + self.max_path = rhs.max_path.clone(); + self.item_summary.add_summary(&rhs.item_summary, cx); } +} - pub fn is_unloaded(&self) -> bool { - matches!(self, EntryKind::UnloadedDir) +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathProgress<'a> { + fn zero(_: & as Summary>::Context) -> Self { + Self { + max_path: Path::new(""), + } } - pub fn is_file(&self) -> bool { - matches!(self, EntryKind::File) + fn add_summary( + &mut self, + summary: &'a PathSummary, + _: & as Summary>::Context, + ) { + self.max_path = summary.max_path.as_ref() } } -impl sum_tree::Item for Entry { - type Summary = EntrySummary; - - fn summary(&self, _cx: &()) -> Self::Summary { - let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included - { - 0 - } else { - 1 - }; - let file_count; - let non_ignored_file_count; - if self.is_file() { - file_count = 1; - non_ignored_file_count = non_ignored_count; - } else { - file_count = 0; - non_ignored_file_count = 0; - } - - let mut statuses = GitStatuses::default(); - if let Some(status) = self.git_status { - match status { - GitFileStatus::Added => statuses.added = 1, - GitFileStatus::Modified => statuses.modified = 1, - GitFileStatus::Conflict => statuses.conflict = 1, - } - } +impl sum_tree::Item for RepositoryEntry { + type Summary = PathSummary; - EntrySummary { - max_path: self.path.clone(), - count: 1, - non_ignored_count, - file_count, - non_ignored_file_count, - statuses, + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.work_directory.path.clone(), + item_summary: Unit, } } } -impl sum_tree::KeyedItem for Entry { +impl sum_tree::KeyedItem for RepositoryEntry { type Key = PathKey; fn key(&self) -> Self::Key { - PathKey(self.path.clone()) + PathKey(self.work_directory.path.clone()) } } -#[derive(Clone, Debug)] -pub struct EntrySummary { - max_path: Arc, - count: usize, - non_ignored_count: usize, - file_count: usize, +impl sum_tree::Summary for GitStatuses { + type Context = (); + + fn zero(_: &Self::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, rhs: &Self, _: &Self::Context) { + *self += *rhs; + } +} + +impl sum_tree::Item for StatusEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.repo_path.0.clone(), + item_summary: match self.git_status { + GitFileStatus::Added => GitStatuses { + added: 1, + ..Default::default() + }, + GitFileStatus::Modified => GitStatuses { + modified: 1, + ..Default::default() + }, + GitFileStatus::Conflict => GitStatuses { + conflict: 1, + ..Default::default() + }, + GitFileStatus::Deleted => Default::default(), + GitFileStatus::Untracked => GitStatuses { + untracked: 1, + ..Default::default() + }, + }, + } + } +} + +impl sum_tree::KeyedItem for StatusEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.repo_path.0.clone()) + } +} + +#[derive(Clone, Debug, Default, Copy)] +pub struct GitStatuses { + added: usize, + modified: usize, + conflict: usize, + untracked: usize, +} + +impl std::ops::Add for GitStatuses { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + GitStatuses { + added: self.added + rhs.added, + modified: self.modified + rhs.modified, + conflict: self.conflict + rhs.conflict, + untracked: self.untracked + rhs.untracked, + } + } +} + +impl std::ops::AddAssign for GitStatuses { + fn add_assign(&mut self, rhs: Self) { + self.added += rhs.added; + self.modified += rhs.modified; + self.conflict += rhs.conflict; + self.untracked += rhs.untracked; + } +} + +impl std::ops::Sub for GitStatuses { + type Output = GitStatuses; + + fn sub(self, rhs: Self) -> Self::Output { + GitStatuses { + added: self.added - rhs.added, + modified: self.modified - rhs.modified, + conflict: self.conflict - rhs.conflict, + untracked: self.untracked - rhs.untracked, + } + } +} + +impl<'a> sum_tree::Dimension<'a, PathSummary> for GitStatuses { + fn zero(_cx: &()) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &()) { + *self += summary.item_summary + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathKey { + fn zero(_: &S::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &S::Context) { + self.0 = summary.max_path.clone(); + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for TraversalProgress<'a> { + fn zero(_cx: &S::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &S::Context) { + self.max_path = summary.max_path.as_ref(); + } +} + +struct AllStatusesCursor<'a, I> { + repos: I, + current_location: Option<( + &'a WorkDirectory, + Cursor<'a, StatusEntry, (TraversalProgress<'a>, GitStatuses)>, + )>, + statuses_before_current_repo: GitStatuses, +} + +impl<'a, I> AllStatusesCursor<'a, I> +where + I: Iterator + FusedIterator, +{ + fn seek_forward(&mut self, target: &PathTarget<'_>) { + loop { + let (work_dir, cursor) = match &mut self.current_location { + Some(location) => location, + None => { + let Some(entry) = self.repos.next() else { + break; + }; + + self.current_location.insert(( + &entry.work_directory, + entry + .git_entries_by_path + .cursor::<(TraversalProgress<'_>, GitStatuses)>(&()), + )) + } + }; + + if let Some(repo_path) = work_dir.relativize(target.path()).ok() { + let target = &target.with_path(&repo_path); + cursor.seek_forward(target, Bias::Left, &()); + if let Some(_) = cursor.item() { + break; + } + } else if target.cmp_path(&work_dir.path).is_gt() { + // Fill the cursor with everything from this intermediary repository + cursor.seek_forward(target, Bias::Right, &()); + } else { + break; + } + + self.statuses_before_current_repo += cursor.start().1; + self.current_location = None; + } + } + + fn start(&self) -> GitStatuses { + if let Some((_, cursor)) = self.current_location.as_ref() { + cursor.start().1 + self.statuses_before_current_repo + } else { + self.statuses_before_current_repo + } + } +} + +fn all_statuses_cursor<'a>( + snapshot: &'a Snapshot, +) -> AllStatusesCursor<'a, impl Iterator + FusedIterator> { + let repos = snapshot.repositories().fuse(); + AllStatusesCursor { + repos, + current_location: None, + statuses_before_current_repo: Default::default(), + } +} + +impl Entry { + fn new( + path: Arc, + metadata: &fs::Metadata, + next_entry_id: &AtomicUsize, + root_char_bag: CharBag, + canonical_path: Option>, + ) -> Self { + let char_bag = char_bag_for_path(root_char_bag, &path); + Self { + id: ProjectEntryId::new(next_entry_id), + kind: if metadata.is_dir { + EntryKind::PendingDir + } else { + EntryKind::File + }, + path, + inode: metadata.inode, + mtime: Some(metadata.mtime), + size: metadata.len, + canonical_path, + is_ignored: false, + is_always_included: false, + is_external: false, + is_private: false, + char_bag, + is_fifo: metadata.is_fifo, + } + } + + pub fn is_created(&self) -> bool { + self.mtime.is_some() + } + + pub fn is_dir(&self) -> bool { + self.kind.is_dir() + } + + pub fn is_file(&self) -> bool { + self.kind.is_file() + } +} + +impl EntryKind { + pub fn is_dir(&self) -> bool { + matches!( + self, + EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir + ) + } + + pub fn is_unloaded(&self) -> bool { + matches!(self, EntryKind::UnloadedDir) + } + + pub fn is_file(&self) -> bool { + matches!(self, EntryKind::File) + } +} + +impl sum_tree::Item for Entry { + type Summary = EntrySummary; + + fn summary(&self, _cx: &()) -> Self::Summary { + let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included + { + 0 + } else { + 1 + }; + let file_count; + let non_ignored_file_count; + if self.is_file() { + file_count = 1; + non_ignored_file_count = non_ignored_count; + } else { + file_count = 0; + non_ignored_file_count = 0; + } + + EntrySummary { + max_path: self.path.clone(), + count: 1, + non_ignored_count, + file_count, + non_ignored_file_count, + } + } +} + +impl sum_tree::KeyedItem for Entry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.path.clone()) + } +} + +#[derive(Clone, Debug)] +pub struct EntrySummary { + max_path: Arc, + count: usize, + non_ignored_count: usize, + file_count: usize, non_ignored_file_count: usize, - statuses: GitStatuses, } impl Default for EntrySummary { @@ -3638,7 +3970,6 @@ impl Default for EntrySummary { non_ignored_count: 0, file_count: 0, non_ignored_file_count: 0, - statuses: Default::default(), } } } @@ -3656,7 +3987,6 @@ impl sum_tree::Summary for EntrySummary { self.non_ignored_count += rhs.non_ignored_count; self.file_count += rhs.file_count; self.non_ignored_file_count += rhs.non_ignored_file_count; - self.statuses += rhs.statuses; } } @@ -4224,7 +4554,6 @@ impl BackgroundScanner { let next_entry_id = self.next_entry_id.clone(); let mut ignore_stack = job.ignore_stack.clone(); - let mut containing_repository = job.containing_repository.clone(); let mut new_ignore = None; let mut root_canonical_path = None; let mut new_entries: Vec = Vec::new(); @@ -4261,16 +4590,9 @@ impl BackgroundScanner { self.watcher.as_ref(), ); - if let Some((work_directory, repository)) = repo { - let t0 = Instant::now(); - let statuses = repository - .status(&[PathBuf::from("")]) - .log_err() - .unwrap_or_default(); - log::trace!("computed git status in {:?}", t0.elapsed()); - containing_repository = Some(ScanJobContainingRepository { - work_directory, - statuses, + if let Some(local_repo) = repo { + self.update_git_statuses(UpdateGitStatusesJob { + local_repository: local_repo, }); } } else if child_name == *GITIGNORE { @@ -4370,20 +4692,11 @@ impl BackgroundScanner { }, ancestor_inodes, scan_queue: job.scan_queue.clone(), - containing_repository: containing_repository.clone(), })); } } else { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); child_entry.is_always_included = self.settings.is_path_always_included(&child_path); - if !child_entry.is_ignored { - if let Some(repo) = &containing_repository { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repo.work_directory) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = repo.statuses.get(&repo_path); - } - } - } } { @@ -4440,6 +4753,7 @@ impl BackgroundScanner { abs_paths: Vec, scan_queue_tx: Option>, ) { + // grab metadata for all requested paths let metadata = futures::future::join_all( abs_paths .iter() @@ -4487,28 +4801,44 @@ impl BackgroundScanner { // Group all relative paths by their git repository. let mut paths_by_git_repo = HashMap::default(); for relative_path in relative_paths.iter() { - if let Some((repo_entry, repo)) = state.snapshot.repo_for_path(relative_path) { - if let Ok(repo_path) = repo_entry.relativize(&state.snapshot, relative_path) { + if let Some(local_repo) = state.snapshot.local_repo_for_path(relative_path) { + if let Ok(repo_path) = local_repo.relativize(relative_path) { paths_by_git_repo - .entry(repo.dot_git_dir_abs_path.clone()) + .entry(local_repo.work_directory.clone()) .or_insert_with(|| RepoPaths { - repo: repo.repo_ptr.clone(), - repo_paths: Vec::new(), - relative_paths: Vec::new(), + repo: local_repo.repo_ptr.clone(), + repo_paths: Default::default(), }) - .add_paths(relative_path, repo_path); + .add_path(repo_path); } } } - // Now call `git status` once per repository and collect each file's git status. - let mut git_statuses_by_relative_path = - paths_by_git_repo - .into_values() - .fold(HashMap::default(), |mut map, repo_paths| { - map.extend(repo_paths.into_git_file_statuses()); - map - }); + // TODO: Should we do this outside of the state lock? + for (work_directory, mut paths) in paths_by_git_repo { + if let Ok(status) = paths.repo.status(&paths.repo_paths) { + let mut changed_path_statuses = Vec::new(); + for (repo_path, status) in &*status.entries { + paths.remove_repo_path(repo_path); + changed_path_statuses.push(Edit::Insert(StatusEntry { + repo_path: repo_path.clone(), + git_status: *status, + })); + } + for path in paths.repo_paths { + changed_path_statuses.push(Edit::Remove(PathKey(path.0))); + } + state.snapshot.repositories.update( + &work_directory.path_key(), + &(), + move |repository_entry| { + repository_entry + .git_entries_by_path + .edit(changed_path_statuses, &()); + }, + ); + } + } for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) { let abs_path: Arc = root_abs_path.join(path).into(); @@ -4547,10 +4877,6 @@ impl BackgroundScanner { } } - if !is_dir && !fs_entry.is_ignored && !fs_entry.is_external { - fs_entry.git_status = git_statuses_by_relative_path.remove(path); - } - state.insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()); } Ok(None) => { @@ -4570,18 +4896,19 @@ impl BackgroundScanner { ); } - fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> { + fn remove_repo_path(&self, path: &Arc, snapshot: &mut LocalSnapshot) -> Option<()> { if !path .components() .any(|component| component.as_os_str() == *DOT_GIT) { - if let Some(repository) = snapshot.repository_for_work_directory(path) { - let entry = repository.work_directory.0; - snapshot.git_repositories.remove(&entry); + if let Some(repository) = snapshot.repository(PathKey(path.clone())) { + snapshot + .git_repositories + .remove(&repository.work_directory_id); snapshot .snapshot - .repository_entries - .remove(&RepositoryWorkDirectory(path.into())); + .repositories + .remove(&PathKey(repository.work_directory.path.clone()), &()); return Some(()); } } @@ -4684,7 +5011,7 @@ impl BackgroundScanner { .abs_path .strip_prefix(snapshot.abs_path.as_path()) .unwrap(); - let repo = snapshot.repo_for_path(path); + for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); @@ -4720,18 +5047,6 @@ impl BackgroundScanner { let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone(); path_entry.scan_id = snapshot.scan_id; path_entry.is_ignored = entry.is_ignored; - if !entry.is_dir() && !entry.is_ignored && !entry.is_external { - if let Some((ref repo_entry, local_repo)) = repo { - if let Ok(repo_path) = repo_entry.relativize(snapshot, &entry.path) { - let status = local_repo - .repo_ptr - .status(&[repo_path.0.clone()]) - .ok() - .and_then(|status| status.get(&repo_path)); - entry.git_status = status; - } - } - } entries_by_id_edits.push(Edit::Insert(path_entry)); entries_by_path_edits.push(Edit::Insert(entry)); } @@ -4776,7 +5091,7 @@ impl BackgroundScanner { } }); - let (work_directory, repository) = match existing_repository_entry { + let local_repository = match existing_repository_entry { None => { match state.insert_git_repository( dot_git_dir.into(), @@ -4787,45 +5102,36 @@ impl BackgroundScanner { None => continue, } } - Some((entry_id, repository)) => { - if repository.git_dir_scan_id == scan_id { + Some((entry_id, local_repository)) => { + if local_repository.git_dir_scan_id == scan_id { continue; } let Some(work_dir) = state .snapshot .entry_for_id(entry_id) - .map(|entry| RepositoryWorkDirectory(entry.path.clone())) + .map(|entry| entry.path.clone()) else { continue; }; - let repo = &repository.repo_ptr; - let branch = repo.branch_name(); - repo.reload_index(); + let branch = local_repository.repo_ptr.branch_name(); + local_repository.repo_ptr.reload_index(); state .snapshot .git_repositories .update(&entry_id, |entry| entry.git_dir_scan_id = scan_id); - state - .snapshot - .snapshot - .repository_entries - .update(&work_dir, |entry| entry.branch = branch.map(Into::into)); - (work_dir, repository.repo_ptr.clone()) + state.snapshot.snapshot.repositories.update( + &PathKey(work_dir.clone()), + &(), + |entry| entry.branch = branch.map(Into::into), + ); + + local_repository } }; - repo_updates.push(UpdateGitStatusesJob { - location_in_repo: state - .snapshot - .repository_entries - .get(&work_directory) - .and_then(|repo| repo.location_in_repo.clone()) - .clone(), - work_directory, - repository, - }); + repo_updates.push(UpdateGitStatusesJob { local_repository }); } // Remove any git repositories whose .git entry no longer exists. @@ -4851,9 +5157,9 @@ impl BackgroundScanner { snapshot .git_repositories .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); - snapshot - .repository_entries - .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); + snapshot.repositories.retain(&(), |entry| { + ids_to_preserve.contains(&entry.work_directory_id) + }); } let (mut updates_done_tx, mut updates_done_rx) = barrier::channel(); @@ -4887,59 +5193,71 @@ impl BackgroundScanner { /// Update the git statuses for a given batch of entries. fn update_git_statuses(&self, job: UpdateGitStatusesJob) { - log::trace!("updating git statuses for repo {:?}", job.work_directory.0); + log::trace!( + "updating git statuses for repo {:?}", + job.local_repository.work_directory.path + ); let t0 = Instant::now(); - let Some(statuses) = job.repository.status(&[PathBuf::from("")]).log_err() else { + + let Some(statuses) = job + .local_repository + .repo() + .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) + .log_err() + else { return; }; log::trace!( "computed git statuses for repo {:?} in {:?}", - job.work_directory.0, + job.local_repository.work_directory.path, t0.elapsed() ); let t0 = Instant::now(); - let mut changes = Vec::new(); + let mut changed_paths = Vec::new(); let snapshot = self.state.lock().snapshot.snapshot.clone(); - for file in snapshot.traverse_from_path(true, false, false, job.work_directory.0.as_ref()) { - let Ok(repo_path) = file.path.strip_prefix(&job.work_directory.0) else { - break; - }; - let git_status = if let Some(location) = &job.location_in_repo { - statuses.get(&location.join(repo_path)) - } else { - statuses.get(repo_path) - }; - if file.git_status != git_status { - let mut entry = file.clone(); - entry.git_status = git_status; - changes.push((entry.path, git_status)); + + let Some(mut repository) = + snapshot.repository(job.local_repository.work_directory.path_key()) + else { + log::error!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot"); + debug_assert!(false); + return; + }; + + let mut new_entries_by_path = SumTree::new(&()); + for (repo_path, status) in statuses.entries.iter() { + let project_path = repository.work_directory.unrelativize(repo_path); + new_entries_by_path.insert_or_replace( + StatusEntry { + repo_path: repo_path.clone(), + git_status: *status, + }, + &(), + ); + + if let Some(path) = project_path { + changed_paths.push(path); } } + repository.git_entries_by_path = new_entries_by_path; let mut state = self.state.lock(); - let edits = changes - .iter() - .filter_map(|(path, git_status)| { - let entry = state.snapshot.entry_for_path(path)?.clone(); - Some(Edit::Insert(Entry { - git_status: *git_status, - ..entry.clone() - })) - }) - .collect(); + state + .snapshot + .repositories + .insert_or_replace(repository, &()); - // Apply the git status changes. util::extend_sorted( &mut state.changed_paths, - changes.iter().map(|p| p.0.clone()), + changed_paths.into_iter(), usize::MAX, Ord::cmp, ); - state.snapshot.entries_by_path.edit(edits, &()); + log::trace!( "applied git status updates for repo {:?} in {:?}", - job.work_directory.0, + job.local_repository.work_directory.path, t0.elapsed(), ); } @@ -5109,28 +5427,28 @@ fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { result } +#[derive(Debug)] struct RepoPaths { repo: Arc, - relative_paths: Vec>, - repo_paths: Vec, + // sorted + repo_paths: Vec, } impl RepoPaths { - fn add_paths(&mut self, relative_path: &Arc, repo_path: RepoPath) { - self.relative_paths.push(relative_path.clone()); - self.repo_paths.push(repo_path.0); + fn add_path(&mut self, repo_path: RepoPath) { + match self.repo_paths.binary_search(&repo_path) { + Ok(_) => {} + Err(ix) => self.repo_paths.insert(ix, repo_path), + } } - fn into_git_file_statuses(self) -> HashMap, GitFileStatus> { - let mut statuses = HashMap::default(); - if let Ok(status) = self.repo.status(&self.repo_paths) { - for (repo_path, relative_path) in self.repo_paths.into_iter().zip(self.relative_paths) { - if let Some(path_status) = status.get(&repo_path) { - statuses.insert(relative_path, path_status); - } + fn remove_repo_path(&mut self, repo_path: &RepoPath) { + match self.repo_paths.binary_search(&repo_path) { + Ok(ix) => { + self.repo_paths.remove(ix); } + Err(_) => {} } - statuses } } @@ -5141,13 +5459,6 @@ struct ScanJob { scan_queue: Sender, ancestor_inodes: TreeSet, is_external: bool, - containing_repository: Option, -} - -#[derive(Clone)] -struct ScanJobContainingRepository { - work_directory: RepositoryWorkDirectory, - statuses: GitStatus, } struct UpdateIgnoreStatusJob { @@ -5158,9 +5469,7 @@ struct UpdateIgnoreStatusJob { } struct UpdateGitStatusesJob { - work_directory: RepositoryWorkDirectory, - location_in_repo: Option>, - repository: Arc, + local_repository: LocalRepositoryEntry, } pub trait WorktreeModelHandle { @@ -5333,44 +5642,169 @@ impl<'a> Default for TraversalProgress<'a> { } } -#[derive(Clone, Debug, Default, Copy)] -struct GitStatuses { - added: usize, - modified: usize, - conflict: usize, +#[derive(Debug, Clone, Copy)] +pub struct GitEntryRef<'a> { + pub entry: &'a Entry, + pub git_status: Option, } -impl AddAssign for GitStatuses { - fn add_assign(&mut self, rhs: Self) { - self.added += rhs.added; - self.modified += rhs.modified; - self.conflict += rhs.conflict; +impl<'a> GitEntryRef<'a> { + fn entry(entry: &'a Entry) -> Self { + Self { + entry, + git_status: None, + } + } + + pub fn to_owned(&self) -> GitEntry { + GitEntry { + entry: self.entry.clone(), + git_status: self.git_status.clone(), + } } } -impl Sub for GitStatuses { - type Output = GitStatuses; +impl<'a> Deref for GitEntryRef<'a> { + type Target = Entry; - fn sub(self, rhs: Self) -> Self::Output { - GitStatuses { - added: self.added - rhs.added, - modified: self.modified - rhs.modified, - conflict: self.conflict - rhs.conflict, + fn deref(&self) -> &Self::Target { + &self.entry + } +} + +impl<'a> AsRef for GitEntryRef<'a> { + fn as_ref(&self) -> &Entry { + self.entry + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct GitEntry { + pub entry: Entry, + pub git_status: Option, +} + +impl GitEntry { + pub fn to_ref(&self) -> GitEntryRef { + GitEntryRef { + entry: &self.entry, + git_status: self.git_status.clone(), } } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses { - fn zero(_cx: &()) -> Self { - Default::default() +impl Deref for GitEntry { + type Target = Entry; + + fn deref(&self) -> &Self::Target { + &self.entry } +} - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { - *self += summary.statuses +impl<'a> AsRef for GitEntry { + fn as_ref(&self) -> &Entry { + &self.entry + } +} + +pub struct GitTraversal<'a> { + traversal: Traversal<'a>, + reset: bool, + repositories: Cursor<'a, RepositoryEntry, PathProgress<'a>>, + statuses: Option>>, +} + +impl<'a> GitTraversal<'a> { + pub fn advance(&mut self) -> bool { + self.advance_by(1) + } + + pub fn advance_by(&mut self, count: usize) -> bool { + self.traversal.advance_by(count) + } + + pub fn advance_to_sibling(&mut self) -> bool { + self.traversal.advance_to_sibling() + } + + pub fn back_to_parent(&mut self) -> bool { + if self.traversal.back_to_parent() { + self.reset = true; + true + } else { + false + } + } + + pub fn start_offset(&self) -> usize { + self.traversal.start_offset() + } + + pub fn end_offset(&self) -> usize { + self.traversal.end_offset() + } + + pub fn entry(&mut self) -> Option> { + let reset = mem::take(&mut self.reset); + let entry = self.traversal.cursor.item()?; + let current_repository_id = self + .repositories + .item() + .map(|repository| repository.work_directory_id); + + if reset { + self.repositories + .seek(&PathTarget::Path(&entry.path), Bias::Left, &()); + } else { + self.repositories + .seek_forward(&PathTarget::Path(&entry.path), Bias::Left, &()); + } + let Some(repository) = self.repositories.item() else { + self.statuses = None; + return Some(GitEntryRef::entry(entry)); + }; + + if reset || Some(repository.work_directory_id) != current_repository_id { + self.statuses = Some(repository.git_entries_by_path.cursor::(&())); + } + + let Some(statuses) = self.statuses.as_mut() else { + return Some(GitEntryRef::entry(entry)); + }; + let Some(repo_path) = repository.relativize(&entry.path).ok() else { + return Some(GitEntryRef::entry(entry)); + }; + let found = statuses.seek_forward(&PathTarget::Path(&repo_path.0), Bias::Left, &()); + + if found { + let Some(status) = statuses.item() else { + return Some(GitEntryRef::entry(entry)); + }; + + Some(GitEntryRef { + entry, + git_status: Some(status.git_status), + }) + } else { + Some(GitEntryRef::entry(entry)) + } + } +} + +impl<'a> Iterator for GitTraversal<'a> { + type Item = GitEntryRef<'a>; + fn next(&mut self) -> Option { + if let Some(item) = self.entry() { + self.advance(); + Some(item) + } else { + None + } } } pub struct Traversal<'a> { + repositories: &'a SumTree, cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>, include_ignored: bool, include_files: bool, @@ -5379,6 +5813,7 @@ pub struct Traversal<'a> { impl<'a> Traversal<'a> { fn new( + repositories: &'a SumTree, entries: &'a SumTree, include_files: bool, include_dirs: bool, @@ -5386,8 +5821,9 @@ impl<'a> Traversal<'a> { start_path: &Path, ) -> Self { let mut cursor = entries.cursor(&()); - cursor.seek(&TraversalTarget::Path(start_path), Bias::Left, &()); + cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &()); let mut traversal = Self { + repositories, cursor, include_files, include_dirs, @@ -5398,6 +5834,28 @@ impl<'a> Traversal<'a> { } traversal } + + pub fn with_git_statuses(self) -> GitTraversal<'a> { + let mut repositories = self.repositories.cursor::(&()); + if let Some(start_path) = self.cursor.item() { + repositories.seek(&PathTarget::Path(&start_path.path), Bias::Left, &()); + }; + let statuses = repositories.item().map(|repository| { + let mut statuses = repository.git_entries_by_path.cursor::(&()); + if let Some(start_path) = self.cursor.item() { + statuses.seek(&PathTarget::Path(&start_path.path), Bias::Left, &()); + } + statuses + }); + + GitTraversal { + traversal: self, + repositories, + reset: false, + statuses, + } + } + pub fn advance(&mut self) -> bool { self.advance_by(1) } @@ -5417,11 +5875,8 @@ impl<'a> Traversal<'a> { pub fn advance_to_sibling(&mut self) -> bool { while let Some(entry) = self.cursor.item() { - self.cursor.seek_forward( - &TraversalTarget::PathSuccessor(&entry.path), - Bias::Left, - &(), - ); + self.cursor + .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &()); if let Some(entry) = self.cursor.item() { if (self.include_files || !entry.is_file()) && (self.include_dirs || !entry.is_dir()) @@ -5439,7 +5894,7 @@ impl<'a> Traversal<'a> { return false; }; self.cursor - .seek(&TraversalTarget::Path(parent_path), Bias::Left, &()) + .seek(&TraversalTarget::path(parent_path), Bias::Left, &()) } pub fn entry(&self) -> Option<&'a Entry> { @@ -5472,10 +5927,64 @@ impl<'a> Iterator for Traversal<'a> { } } +#[derive(Debug, Clone, Copy)] +enum PathTarget<'a> { + Path(&'a Path), + Successor(&'a Path), +} + +impl<'a> PathTarget<'a> { + fn path(&self) -> &'a Path { + match self { + PathTarget::Path(path) => path, + PathTarget::Successor(path) => path, + } + } + + fn with_path(self, path: &Path) -> PathTarget<'_> { + match self { + PathTarget::Successor(_) => PathTarget::Successor(path), + PathTarget::Path(_) => PathTarget::Path(path), + } + } + + fn cmp_path(&self, other: &Path) -> Ordering { + match self { + PathTarget::Path(path) => path.cmp(&other), + PathTarget::Successor(path) => { + if other.starts_with(path) { + Ordering::Greater + } else { + Ordering::Equal + } + } + } + } +} + +impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary, PathProgress<'a>> for PathTarget<'b> { + fn cmp(&self, cursor_location: &PathProgress<'a>, _: &S::Context) -> Ordering { + self.cmp_path(&cursor_location.max_path) + } +} + +impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for PathTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &S::Context) -> Ordering { + self.cmp_path(&cursor_location.max_path) + } +} + +impl<'a, 'b> SeekTarget<'a, PathSummary, (TraversalProgress<'a>, GitStatuses)> + for PathTarget<'b> +{ + fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering { + self.cmp_path(&cursor_location.0.max_path) + } +} + #[derive(Debug)] enum TraversalTarget<'a> { - Path(&'a Path), - PathSuccessor(&'a Path), + Path(PathTarget<'a>), Count { count: usize, include_files: bool, @@ -5484,17 +5993,18 @@ enum TraversalTarget<'a> { }, } -impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> { - fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { +impl<'a> TraversalTarget<'a> { + fn path(path: &'a Path) -> Self { + Self::Path(PathTarget::Path(path)) + } + + fn successor(path: &'a Path) -> Self { + Self::Path(PathTarget::Successor(path)) + } + + fn cmp_progress(&self, progress: &TraversalProgress) -> Ordering { match self { - TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path), - TraversalTarget::PathSuccessor(path) => { - if cursor_location.max_path.starts_with(path) { - Ordering::Greater - } else { - Ordering::Equal - } - } + TraversalTarget::Path(path) => path.cmp_path(&progress.max_path), TraversalTarget::Count { count, include_files, @@ -5502,17 +6012,21 @@ impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTa include_ignored, } => Ord::cmp( count, - &cursor_location.count(*include_files, *include_dirs, *include_ignored), + &progress.count(*include_files, *include_dirs, *include_ignored), ), } } } -impl<'a, 'b> SeekTarget<'a, EntrySummary, (TraversalProgress<'a>, GitStatuses)> - for TraversalTarget<'b> -{ - fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering { - self.cmp(&cursor_location.0, &()) +impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { + self.cmp_progress(cursor_location) + } +} + +impl<'a, 'b> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for TraversalTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { + self.cmp_progress(cursor_location) } } @@ -5521,6 +6035,20 @@ pub struct ChildEntriesIter<'a> { traversal: Traversal<'a>, } +impl<'a> ChildEntriesIter<'a> { + pub fn with_git_statuses(self) -> ChildEntriesGitIter<'a> { + ChildEntriesGitIter { + parent_path: self.parent_path, + traversal: self.traversal.with_git_statuses(), + } + } +} + +pub struct ChildEntriesGitIter<'a> { + parent_path: &'a Path, + traversal: GitTraversal<'a>, +} + impl<'a> Iterator for ChildEntriesIter<'a> { type Item = &'a Entry; @@ -5535,6 +6063,20 @@ impl<'a> Iterator for ChildEntriesIter<'a> { } } +impl<'a> Iterator for ChildEntriesGitIter<'a> { + type Item = GitEntryRef<'a>; + + fn next(&mut self) -> Option { + if let Some(item) = self.traversal.entry() { + if item.path.starts_with(self.parent_path) { + self.traversal.advance_to_sibling(); + return Some(item); + } + } + None + } +} + impl<'a> From<&'a Entry> for proto::Entry { fn from(entry: &'a Entry) -> Self { Self { @@ -5545,7 +6087,6 @@ impl<'a> From<&'a Entry> for proto::Entry { mtime: entry.mtime.map(|time| time.into()), is_ignored: entry.is_ignored, is_external: entry.is_external, - git_status: entry.git_status.map(git_status_to_proto), is_fifo: entry.is_fifo, size: Some(entry.size), canonical_path: entry @@ -5582,7 +6123,6 @@ impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry { is_ignored: entry.is_ignored, is_always_included: always_included.is_match(path.as_ref()), is_external: entry.is_external, - git_status: git_status_from_proto(entry.git_status), is_private: false, char_bag, is_fifo: entry.is_fifo, @@ -5596,6 +6136,7 @@ fn git_status_from_proto(git_status: Option) -> Option { proto::GitStatus::Added => GitFileStatus::Added, proto::GitStatus::Modified => GitFileStatus::Modified, proto::GitStatus::Conflict => GitFileStatus::Conflict, + proto::GitStatus::Deleted => GitFileStatus::Deleted, }) }) } @@ -5605,6 +6146,8 @@ fn git_status_to_proto(status: GitFileStatus) -> i32 { GitFileStatus::Added => proto::GitStatus::Added as i32, GitFileStatus::Modified => proto::GitStatus::Modified as i32, GitFileStatus::Conflict => proto::GitStatus::Conflict as i32, + GitFileStatus::Deleted => proto::GitStatus::Deleted as i32, + GitFileStatus::Untracked => todo!(), } } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 8b93396e24b95..4657267491c27 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -1,6 +1,6 @@ use crate::{ - worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, Snapshot, Worktree, - WorktreeModelHandle, + worktree_settings::WorktreeSettings, Entry, EntryKind, Event, GitEntry, PathChange, Snapshot, + Worktree, WorktreeModelHandle, }; use anyhow::Result; use fs::{FakeFs, Fs, RealFs, RemoveOptions}; @@ -1497,10 +1497,11 @@ async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) { cx.executor().run_until_parked(); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + check_propagated_statuses( &snapshot, &[ - (Path::new(""), Some(GitFileStatus::Modified)), + (Path::new(""), Some(GitFileStatus::Modified)), // This is testing our propogation stuff, which we just said we wouldn't do (Path::new("a.txt"), None), (Path::new("b/c.txt"), Some(GitFileStatus::Modified)), ], @@ -2178,15 +2179,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { cx.read(|cx| { let tree = tree.read(cx); - let (work_dir, _) = tree.repositories().next().unwrap(); - assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); + let repo = tree.repositories().next().unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("projects/project1")); assert_eq!( tree.status_for_file(Path::new("projects/project1/a")), Some(GitFileStatus::Modified) ); assert_eq!( tree.status_for_file(Path::new("projects/project1/b")), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2199,15 +2200,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { cx.read(|cx| { let tree = tree.read(cx); - let (work_dir, _) = tree.repositories().next().unwrap(); - assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); + let repo = tree.repositories().next().unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("projects/project2")); assert_eq!( tree.status_for_file(Path::new("projects/project2/a")), Some(GitFileStatus::Modified) ); assert_eq!( tree.status_for_file(Path::new("projects/project2/b")), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); } @@ -2253,23 +2254,13 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); - let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); - assert_eq!( - entry - .work_directory(tree) - .map(|directory| directory.as_ref().to_owned()), - Some(Path::new("dir1").to_owned()) - ); + let repo = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("dir1")); - let entry = tree + let repo = tree .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) .unwrap(); - assert_eq!( - entry - .work_directory(tree) - .map(|directory| directory.as_ref().to_owned()), - Some(Path::new("dir1/deps/dep1").to_owned()) - ); + assert_eq!(repo.path.as_ref(), Path::new("dir1/deps/dep1")); let entries = tree.files(false, 0); @@ -2278,10 +2269,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { .map(|(entry, repo)| { ( entry.path.as_ref(), - repo.and_then(|repo| { - repo.work_directory(tree) - .map(|work_directory| work_directory.0.to_path_buf()) - }), + repo.map(|repo| repo.path.to_path_buf()), ) }) .collect::>(); @@ -2334,7 +2322,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_git_status(cx: &mut TestAppContext) { +async fn test_file_status(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); const IGNORE_RULE: &str = "**/target"; @@ -2393,17 +2381,17 @@ async fn test_git_status(cx: &mut TestAppContext) { tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); assert_eq!(snapshot.repositories().count(), 1); - let (dir, repo_entry) = snapshot.repositories().next().unwrap(); - assert_eq!(dir.as_ref(), Path::new("project")); + let repo_entry = snapshot.repositories().next().unwrap(); + assert_eq!(repo_entry.path.as_ref(), Path::new("project")); assert!(repo_entry.location_in_repo.is_none()); assert_eq!( snapshot.status_for_file(project_path.join(B_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!( snapshot.status_for_file(project_path.join(F_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2433,7 +2421,7 @@ async fn test_git_status(cx: &mut TestAppContext) { let snapshot = tree.snapshot(); assert_eq!( snapshot.status_for_file(project_path.join(F_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); @@ -2455,7 +2443,7 @@ async fn test_git_status(cx: &mut TestAppContext) { assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); assert_eq!( snapshot.status_for_file(project_path.join(B_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!( snapshot.status_for_file(project_path.join(E_TXT)), @@ -2494,7 +2482,7 @@ async fn test_git_status(cx: &mut TestAppContext) { let snapshot = tree.snapshot(); assert_eq!( snapshot.status_for_file(project_path.join(renamed_dir_name).join(RENAMED_FILE)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2518,11 +2506,125 @@ async fn test_git_status(cx: &mut TestAppContext) { .join(Path::new(renamed_dir_name)) .join(RENAMED_FILE) ), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); } +#[gpui::test] +async fn test_git_repository_status(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = temp_tree(json!({ + "project": { + "a.txt": "a", // Modified + "b.txt": "bb", // Added + "c.txt": "ccc", // Unchanged + "d.txt": "dddd", // Deleted + }, + + })); + + // Set up git repository before creating the worktree. + let work_dir = root.path().join("project"); + let repo = git_init(work_dir.as_path()); + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_add("d.txt", &repo); + git_commit("Initial commit", &repo); + std::fs::remove_file(work_dir.join("d.txt")).unwrap(); + std::fs::write(work_dir.join("a.txt"), "aa").unwrap(); + + let tree = Worktree::local( + root.path(), + true, + Arc::new(RealFs::default()), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + // Check that the right git state is observed on startup + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repo = snapshot.repositories().next().unwrap(); + let entries = repo.status().collect::>(); + + assert_eq!(entries.len(), 3); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].git_status, GitFileStatus::Modified); + assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt")); + assert_eq!(entries[1].git_status, GitFileStatus::Untracked); + assert_eq!(entries[2].repo_path.as_ref(), Path::new("d.txt")); + assert_eq!(entries[2].git_status, GitFileStatus::Deleted); + }); + + std::fs::write(work_dir.join("c.txt"), "some changes").unwrap(); + eprintln!("File c.txt has been modified"); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repository = snapshot.repositories().next().unwrap(); + let entries = repository.status().collect::>(); + + std::assert_eq!(entries.len(), 4, "entries: {entries:?}"); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].git_status, GitFileStatus::Modified); + assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt")); + assert_eq!(entries[1].git_status, GitFileStatus::Untracked); + // Status updated + assert_eq!(entries[2].repo_path.as_ref(), Path::new("c.txt")); + assert_eq!(entries[2].git_status, GitFileStatus::Modified); + assert_eq!(entries[3].repo_path.as_ref(), Path::new("d.txt")); + assert_eq!(entries[3].git_status, GitFileStatus::Deleted); + }); + + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_remove_index(Path::new("d.txt"), &repo); + git_commit("Another commit", &repo); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + std::fs::remove_file(work_dir.join("a.txt")).unwrap(); + std::fs::remove_file(work_dir.join("b.txt")).unwrap(); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repo = snapshot.repositories().next().unwrap(); + let entries = repo.status().collect::>(); + + // Deleting an untracked entry, b.txt, should leave no status + // a.txt was tracked, and so should have a status + assert_eq!( + entries.len(), + 1, + "Entries length was incorrect\n{:#?}", + &entries + ); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].git_status, GitFileStatus::Deleted); + }); +} + #[gpui::test] async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { init_test(cx); @@ -2575,22 +2677,22 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); assert_eq!(snapshot.repositories().count(), 1); - let (dir, repo_entry) = snapshot.repositories().next().unwrap(); + let repo = snapshot.repositories().next().unwrap(); // Path is blank because the working directory of // the git repository is located at the root of the project - assert_eq!(dir.as_ref(), Path::new("")); + assert_eq!(repo.path.as_ref(), Path::new("")); // This is the missing path between the root of the project (sub-folder-2) and its // location relative to the root of the repository. assert_eq!( - repo_entry.location_in_repo, + repo.location_in_repo, Some(Arc::from(Path::new("sub-folder-1/sub-folder-2"))) ); assert_eq!(snapshot.status_for_file("c.txt"), None); assert_eq!( snapshot.status_for_file("d/e.txt"), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2612,6 +2714,95 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_traverse_with_git_status(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar", + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z.txt": "sneaky..." + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[ + (Path::new("x2.txt"), GitFileStatus::Modified), + (Path::new("z.txt"), GitFileStatus::Added), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/y/.git"), + &[(Path::new("y1.txt"), GitFileStatus::Conflict)], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Added)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + dbg!(snapshot.git_satus(Path::new("x"))); + + let mut traversal = snapshot + .traverse_from_path(true, false, true, Path::new("x")) + .with_git_statuses(); + + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Modified)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Conflict)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/z.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Added)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Added)); +} + #[gpui::test] async fn test_propagate_git_statuses(cx: &mut TestAppContext) { init_test(cx); @@ -2638,7 +2829,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { "h1.txt": "", "h2.txt": "" }, - }), ) .await; @@ -2668,6 +2858,15 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { cx.executor().run_until_parked(); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + check_propagated_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), + (Path::new("g"), Some(GitFileStatus::Conflict)), + (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), + ], + ); + check_propagated_statuses( &snapshot, &[ @@ -2680,7 +2879,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), (Path::new("f"), None), (Path::new("f/no-status.txt"), None), - (Path::new("g"), Some(GitFileStatus::Conflict)), + (Path::new("g"), Some(GitFileStatus::Conflict)), // This one is missing (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), ], ); @@ -2712,6 +2911,246 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_propagate_statuses_for_repos_under_project(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar" + }, + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[(Path::new("x1.txt"), GitFileStatus::Added)], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/y/.git"), + &[ + (Path::new("y1.txt"), GitFileStatus::Conflict), + (Path::new("y2.txt"), GitFileStatus::Modified), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Modified)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("y"), Some(GitFileStatus::Conflict)), + (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("z"), Some(GitFileStatus::Modified)), + (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + (Path::new("x/x2.txt"), None), + (Path::new("y"), Some(GitFileStatus::Conflict)), + (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)), + (Path::new("z"), Some(GitFileStatus::Modified)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)), + ], + ); +} + +#[gpui::test] +async fn test_propagate_statuses_for_nested_repos(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar", + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z.txt": "sneaky..." + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[ + (Path::new("x2.txt"), GitFileStatus::Modified), + (Path::new("z.txt"), GitFileStatus::Added), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/y/.git"), + &[(Path::new("y1.txt"), GitFileStatus::Conflict)], + ); + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Added)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + // Sanity check the propagation for x/y and z + check_propagated_statuses( + &snapshot, + &[ + (Path::new("x/y"), Some(GitFileStatus::Conflict)), // the y git repository has conflict file in it, and so should have a conflict status + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + ], + ); + check_propagated_statuses( + &snapshot, + &[ + (Path::new("z"), Some(GitFileStatus::Added)), // the y git repository has conflict file in it, and so should have a conflict status + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Added)), + ], + ); + + // Test one of the fundamental cases of propogation blocking, the transition from one git repository to another + check_propagated_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Conflict)), // FIXME: This should be Some(Modified) + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + // Sanity check everything around it + check_propagated_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Conflict)), // FIXME: This should be Some(Modified) + (Path::new("x/x1.txt"), None), + (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + (Path::new("x/z.txt"), Some(GitFileStatus::Added)), + ], + ); + + // Test the other fundamental case, transitioning from git repository to non-git repository + check_propagated_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), // FIXME: This should be None + (Path::new("x"), Some(GitFileStatus::Conflict)), // FIXME: This should be Some(Modified) + (Path::new("x/x1.txt"), None), + ], + ); + + // And all together now + check_propagated_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), // FIXME: This should be None + (Path::new("x"), Some(GitFileStatus::Conflict)), // FIXME: This should be Some(Modified) + (Path::new("x/x1.txt"), None), + (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + (Path::new("x/z.txt"), Some(GitFileStatus::Added)), + (Path::new("z"), Some(GitFileStatus::Added)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Added)), + ], + ); +} + #[gpui::test] async fn test_private_single_file_worktree(cx: &mut TestAppContext) { init_test(cx); @@ -2740,15 +3179,19 @@ fn check_propagated_statuses( snapshot: &Snapshot, expected_statuses: &[(&Path, Option)], ) { - let mut entries = expected_statuses + let entries = expected_statuses .iter() .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) .collect::>(); - snapshot.propagate_git_statuses(&mut entries); + // TODO: recreate this + // let statuses = snapshot.propagate_git_statuses(&entries); + let statuses: Vec> = Vec::new(); + panic!("Redo git status propogation"); assert_eq!( entries .iter() - .map(|e| (e.path.as_ref(), e.git_status)) + .enumerate() + .map(|(ix, e)| (e.path.as_ref(), statuses[ix])) .collect::>(), expected_statuses ); @@ -2763,14 +3206,14 @@ fn git_init(path: &Path) -> git2::Repository { fn git_add>(path: P, repo: &git2::Repository) { let path = path.as_ref(); let mut index = repo.index().expect("Failed to get index"); - index.add_path(path).expect("Failed to add a.txt"); + index.add_path(path).expect("Failed to add file"); index.write().expect("Failed to write index"); } #[track_caller] fn git_remove_index(path: &Path, repo: &git2::Repository) { let mut index = repo.index().expect("Failed to get index"); - index.remove_path(path).expect("Failed to add a.txt"); + index.remove_path(path).expect("Failed to add file"); index.write().expect("Failed to write index"); } @@ -2900,7 +3343,8 @@ fn assert_entry_git_state( ) { let entry = tree.entry_for_path(path).expect("entry {path} not found"); assert_eq!( - entry.git_status, git_status, + tree.status_for_file(Path::new(path)), + git_status, "expected {path} to have git status: {git_status:?}" ); assert_eq!(