Parallelize workspace detection in folder listing
Speed up initial folder listing by checking each subfolder for the .onyx-workspace.json marker in parallel instead of sequentially. This uses futures::future::join_all to run multiple PROPFIND/list_files calls concurrently, reducing latency when detecting workspaces across many subdirectories. Also add the futures 0.3 dependency to the Tauri Cargo.toml and lockfile so the async utilities are available.
This commit is contained in:
parent
ac789e8d56
commit
095ac8fa97
18
apps/tauri/src-tauri/Cargo.lock
generated
18
apps/tauri/src-tauri/Cargo.lock
generated
|
|
@ -909,6 +909,21 @@ dependencies = [
|
|||
"new_debug_unreachable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-executor",
|
||||
"futures-io",
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"futures-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.32"
|
||||
|
|
@ -916,6 +931,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -970,6 +986,7 @@ version = "0.3.32"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"futures-macro",
|
||||
|
|
@ -2403,6 +2420,7 @@ name = "onyx-tasks"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"futures",
|
||||
"notify",
|
||||
"notify-debouncer-mini",
|
||||
"onyx-core",
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ serde = { version = "1", features = ["derive"] }
|
|||
serde_json = "1"
|
||||
onyx-core = { path = "../../../crates/onyx-core", default-features = false }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
futures = "0.3"
|
||||
uuid = { version = "1", features = ["serde", "v4"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
notify = { version = "7", optional = true }
|
||||
|
|
|
|||
|
|
@ -476,22 +476,23 @@ async fn list_remote_folder(
|
|||
.map_err(|e| e.to_string())?;
|
||||
let entries = client.list_files(&path).await.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut folders = Vec::new();
|
||||
for entry in entries {
|
||||
if !entry.is_dir { continue; }
|
||||
// Check if this folder contains .onyx-workspace.json
|
||||
let sub_path = if path.is_empty() {
|
||||
entry.path.clone()
|
||||
} else {
|
||||
format!("{}/{}", path.trim_end_matches('/'), entry.path)
|
||||
};
|
||||
let sub_files = client.list_files(&sub_path).await.unwrap_or_default();
|
||||
let dir_entries: Vec<_> = entries.into_iter().filter(|e| e.is_dir).collect();
|
||||
|
||||
// Check all subfolders for .onyx-workspace.json in parallel
|
||||
let sub_paths: Vec<_> = dir_entries.iter().map(|entry| {
|
||||
if path.is_empty() { entry.path.clone() }
|
||||
else { format!("{}/{}", path.trim_end_matches('/'), entry.path) }
|
||||
}).collect();
|
||||
let checks: Vec<_> = sub_paths.iter().map(|sp| {
|
||||
client.list_files(sp)
|
||||
}).collect();
|
||||
let results: Vec<_> = futures::future::join_all(checks).await
|
||||
.into_iter().map(|r| r.unwrap_or_default()).collect();
|
||||
|
||||
let folders = dir_entries.into_iter().zip(results).map(|(entry, sub_files)| {
|
||||
let is_workspace = sub_files.iter().any(|f| !f.is_dir && f.path == ".onyx-workspace.json");
|
||||
folders.push(RemoteFolderEntry {
|
||||
name: entry.path,
|
||||
is_workspace,
|
||||
});
|
||||
}
|
||||
RemoteFolderEntry { name: entry.path, is_workspace }
|
||||
}).collect();
|
||||
|
||||
Ok(folders)
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue