Merge pull request #56 from SteelDynamite/claude/serene-ride-XUY3D

This commit is contained in:
SteelDynamite 2026-04-19 09:12:44 +01:00 committed by GitHub
commit 6ae1006ab4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 53 additions and 43 deletions

View file

@ -67,6 +67,25 @@ impl AppState {
} }
} }
/// Extract the hostname from a URL (scheme://host/...), used as the credential key.
/// Returns an empty string if the URL has no scheme or host.
fn credential_domain(url: &str) -> String {
url.split("://")
.nth(1)
.and_then(|rest| rest.split('/').next())
.unwrap_or("")
.to_string()
}
/// Join a remote base directory with a child path, handling empty base and trailing slashes.
fn join_remote_path(base: &str, child: &str) -> String {
if base.is_empty() {
child.to_string()
} else {
format!("{}/{}", base.trim_end_matches('/'), child)
}
}
/// Validate that a workspace path is a reasonable directory and not a system path. /// Validate that a workspace path is a reasonable directory and not a system path.
fn validate_workspace_path(path: &str) -> Result<(), String> { fn validate_workspace_path(path: &str) -> Result<(), String> {
let p = PathBuf::from(path); let p = PathBuf::from(path);
@ -79,7 +98,10 @@ fn validate_workspace_path(path: &str) -> Result<(), String> {
#[cfg(unix)] #[cfg(unix)]
{ {
let forbidden = ["/", "/etc", "/usr", "/bin", "/sbin", "/var", "/proc", "/sys", "/dev"]; let forbidden = ["/", "/etc", "/usr", "/bin", "/sbin", "/var", "/proc", "/sys", "/dev"];
// Strip trailing slashes, but keep "/" itself — trim_end_matches would
// collapse it to "" and slip past the forbidden check.
let canonical = normalized.trim_end_matches('/'); let canonical = normalized.trim_end_matches('/');
let canonical = if canonical.is_empty() { "/" } else { canonical };
if forbidden.contains(&canonical) { if forbidden.contains(&canonical) {
return Err(format!("Cannot use system directory as workspace: {}", path)); return Err(format!("Cannot use system directory as workspace: {}", path));
} }
@ -263,10 +285,7 @@ async fn rename_workspace(
let base_url = webdav_url.as_deref().ok_or("No WebDAV URL configured")?; let base_url = webdav_url.as_deref().ok_or("No WebDAV URL configured")?;
let remote_path = webdav_path.as_deref().unwrap_or(""); let remote_path = webdav_path.as_deref().unwrap_or("");
let domain = base_url let domain = credential_domain(base_url);
.split("://").nth(1)
.and_then(|rest| rest.split('/').next())
.unwrap_or("").to_string();
let creds = app_handle.state::<Credentials<tauri::Wry>>(); let creds = app_handle.state::<Credentials<tauri::Wry>>();
let (username, password) = creds.load(&domain)?; let (username, password) = creds.load(&domain)?;
@ -645,10 +664,9 @@ async fn list_remote_folder(
let dir_entries: Vec<_> = entries.into_iter().filter(|e| e.is_dir).collect(); let dir_entries: Vec<_> = entries.into_iter().filter(|e| e.is_dir).collect();
// Check all subfolders for .onyx-workspace.json in parallel // Check all subfolders for .onyx-workspace.json in parallel
let sub_paths: Vec<_> = dir_entries.iter().map(|entry| { let sub_paths: Vec<_> = dir_entries.iter()
if path.is_empty() { entry.path.clone() } .map(|entry| join_remote_path(&path, &entry.path))
else { format!("{}/{}", path.trim_end_matches('/'), entry.path) } .collect();
}).collect();
let checks: Vec<_> = sub_paths.iter().map(|sp| { let checks: Vec<_> = sub_paths.iter().map(|sp| {
client.list_files(sp) client.list_files(sp)
}).collect(); }).collect();
@ -680,11 +698,7 @@ async fn inspect_remote_workspace(
let mut lists = Vec::new(); let mut lists = Vec::new();
for entry in entries { for entry in entries {
if !entry.is_dir { continue; } if !entry.is_dir { continue; }
let list_path = if path.is_empty() { let list_path = join_remote_path(&path, &entry.path);
entry.path.clone()
} else {
format!("{}/{}", path.trim_end_matches('/'), entry.path)
};
let files = client.list_files(&list_path).await.unwrap_or_else(|e| { let files = client.list_files(&list_path).await.unwrap_or_else(|e| {
eprintln!("Warning: failed to list remote folder '{}': {}", list_path, e); eprintln!("Warning: failed to list remote folder '{}': {}", list_path, e);
Vec::new() Vec::new()
@ -720,11 +734,7 @@ async fn create_remote_workspace(
"list_order": [], "list_order": [],
"last_opened_list": null, "last_opened_list": null,
}); });
let file_path = if path.is_empty() { let file_path = join_remote_path(&path, ".onyx-workspace.json");
".onyx-workspace.json".to_string()
} else {
format!("{}/{}", path.trim_end_matches('/'), ".onyx-workspace.json")
};
client.put_file(&file_path, serde_json::to_string_pretty(&metadata).map_err(|e| e.to_string())?.into_bytes()) client.put_file(&file_path, serde_json::to_string_pretty(&metadata).map_err(|e| e.to_string())?.into_bytes())
.await .await
.map_err(|e| e.to_string())?; .map_err(|e| e.to_string())?;
@ -758,12 +768,7 @@ fn add_webdav_workspace(
s.repo = None; s.repo = None;
// Store credentials keyed by hostname // Store credentials keyed by hostname
let domain = webdav_url let domain = credential_domain(&webdav_url);
.split("://")
.nth(1)
.and_then(|rest| rest.split('/').next())
.unwrap_or("")
.to_string();
s.save_config()?; s.save_config()?;
drop(s); drop(s);
let creds = app_handle.state::<Credentials<tauri::Wry>>(); let creds = app_handle.state::<Credentials<tauri::Wry>>();
@ -826,12 +831,7 @@ async fn sync_workspace(
}; };
// Step 2: load credentials // Step 2: load credentials
let domain = webdav_url let domain = credential_domain(&webdav_url);
.split("://")
.nth(1)
.and_then(|rest| rest.split('/').next())
.unwrap_or("")
.to_string();
let creds = app_handle.state::<Credentials<tauri::Wry>>(); let creds = app_handle.state::<Credentials<tauri::Wry>>();
let (username, password) = creds.load(&domain)?; let (username, password) = creds.load(&domain)?;

View file

@ -13,6 +13,8 @@
let viewYear = $state(existing ? existing.getFullYear() : now.getFullYear()); let viewYear = $state(existing ? existing.getFullYear() : now.getFullYear());
let viewMonth = $state(existing ? existing.getMonth() : now.getMonth()); let viewMonth = $state(existing ? existing.getMonth() : now.getMonth());
let selectedDay = $state(existing ? existing.getDate() : now.getDate()); let selectedDay = $state(existing ? existing.getDate() : now.getDate());
let selectedYear = $state(existing ? existing.getFullYear() : now.getFullYear());
let selectedMonth = $state(existing ? existing.getMonth() : now.getMonth());
let includeTime = $state(has_time); let includeTime = $state(has_time);
let selectedHour = $state(existing ? existing.getHours() : now.getHours()); let selectedHour = $state(existing ? existing.getHours() : now.getHours());
let selectedMinute = $state(existing ? existing.getMinutes() : 0); let selectedMinute = $state(existing ? existing.getMinutes() : 0);
@ -58,9 +60,6 @@
return `${viewYear}-${viewMonth + 1}-${day}` === todayStr; return `${viewYear}-${viewMonth + 1}-${day}` === todayStr;
} }
let selectedYear = $state(existing ? existing.getFullYear() : now.getFullYear());
let selectedMonth = $state(existing ? existing.getMonth() : now.getMonth());
function isSelected(day: number): boolean { function isSelected(day: number): boolean {
return selectedDay === day && selectedYear === viewYear && selectedMonth === viewMonth; return selectedDay === day && selectedYear === viewYear && selectedMonth === viewMonth;
} }

View file

@ -418,7 +418,7 @@ function debouncedSync() {
function restartSyncInterval() { function restartSyncInterval() {
if (_syncInterval) clearInterval(_syncInterval); if (_syncInterval) clearInterval(_syncInterval);
var secs = _appFocused ? syncIntervalSecs : syncIntervalUnfocusedSecs; const secs = _appFocused ? syncIntervalSecs : syncIntervalUnfocusedSecs;
_syncInterval = setInterval(triggerSync, secs * 1000); _syncInterval = setInterval(triggerSync, secs * 1000);
} }

View file

@ -457,26 +457,37 @@ impl Storage for FileSystemStorage {
} }
let mut tasks = Vec::new(); let mut tasks = Vec::new();
for (_id, mut entries) in by_id { for (_id, entries) in by_id {
if entries.len() > 1 { let winner = if entries.len() > 1 {
entries.sort_by(|a, b| { // Read mtime once per file so sort_by doesn't hit the filesystem
// O(n log n) times and can't produce inconsistent orderings if a
// file is touched mid-sort.
let mut with_mtime: Vec<(PathBuf, Task, Option<std::time::SystemTime>)> = entries
.into_iter()
.map(|(p, t)| {
let mtime = fs::metadata(&p).and_then(|m| m.modified()).ok();
(p, t, mtime)
})
.collect();
with_mtime.sort_by(|a, b| {
// Primary: highest version first // Primary: highest version first
let version_cmp = b.1.version.cmp(&a.1.version); let version_cmp = b.1.version.cmp(&a.1.version);
if version_cmp != std::cmp::Ordering::Equal { if version_cmp != std::cmp::Ordering::Equal {
return version_cmp; return version_cmp;
} }
// Tiebreaker: most recently modified file first // Tiebreaker: most recently modified file first
let mtime_a = fs::metadata(&a.0).and_then(|m| m.modified()).ok(); b.2.cmp(&a.2)
let mtime_b = fs::metadata(&b.0).and_then(|m| m.modified()).ok();
mtime_b.cmp(&mtime_a)
}); });
for (stale_path, _) in entries.drain(1..) { for (stale_path, _, _) in with_mtime.drain(1..) {
if let Err(e) = fs::remove_file(&stale_path) { if let Err(e) = fs::remove_file(&stale_path) {
eprintln!("Warning: failed to remove stale duplicate task file {:?}: {}", stale_path, e); eprintln!("Warning: failed to remove stale duplicate task file {:?}: {}", stale_path, e);
} }
} }
} with_mtime.into_iter().next().map(|(_, t, _)| t)
let (_, task) = entries.into_iter().next() } else {
entries.into_iter().next().map(|(_, t)| t)
};
let task = winner
.ok_or_else(|| Error::InvalidData("Empty dedup entries for task".to_string()))?; .ok_or_else(|| Error::InvalidData("Empty dedup entries for task".to_string()))?;
tasks.push(task); tasks.push(task);
} }