Initial release: Ironpad v0.1.0 - Local-first, file-based project and knowledge management system. Rust backend, Vue 3 frontend, Milkdown editor, Git integration, cross-platform builds. Built with AI using Open Method.

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
skepsismusic
2026-02-06 00:13:31 +01:00
commit ebe3e2aa8f
97 changed files with 25033 additions and 0 deletions

View File

@@ -0,0 +1,349 @@
use std::fs;
use std::io::Write;
use std::path::Path;
use serde_yaml::Value;
use walkdir::WalkDir;
use crate::models::note::{Note, NoteSummary};
use crate::services::frontmatter;
use crate::config;
/// List all notes in the filesystem (read-only).
pub fn list_notes() -> Result<Vec<NoteSummary>, String> {
let mut notes = Vec::new();
let root = config::data_dir();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
// Only include notes and project index files
if !is_note_file(path) {
continue;
}
match parse_note_summary(path) {
Ok(note) => notes.push(note),
Err(err) => {
tracing::warn!("Skipping file {:?}: {}", path, err);
}
}
}
Ok(notes)
}
fn is_ignored(path: &Path) -> bool {
path.components().any(|c| {
matches!(
c.as_os_str().to_str(),
Some(".git") | Some("assets") | Some("archive")
)
})
}
fn is_note_file(path: &Path) -> bool {
let path_str = path.to_string_lossy();
// data/notes/**/*.md (handles both forward and back slashes)
if path_str.contains("notes") && !path_str.contains("archive") {
return true;
}
// data/projects/*/index.md
if path_str.contains("projects") && path.file_name().and_then(|s| s.to_str()) == Some("index.md") {
return true;
}
// Root-level files (index.md, inbox.md) - parent is the data dir
if let Some(parent) = path.parent() {
if parent == config::data_dir() {
return true;
}
}
false
}
fn parse_note_summary(path: &Path) -> Result<NoteSummary, String> {
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, _body, _has_fm) = frontmatter::parse_frontmatter(&content);
let id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
let title = fm
.get(&Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| {
path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled")
.to_string()
});
let note_type = fm
.get(&Value::from("type"))
.and_then(|v| v.as_str())
.unwrap_or("note")
.to_string();
let updated = fm
.get(&Value::from("updated"))
.and_then(|v| v.as_str())
.map(String::from);
Ok(NoteSummary {
id,
title,
path: normalize_path(path),
note_type,
updated,
})
}
pub fn normalize_path(path: &Path) -> String {
// Strip the data directory prefix and normalize separators
let path_str = path.to_string_lossy();
let stripped = if let Some(idx) = path_str.find("data") {
&path_str[idx + 5..] // Skip "data" + separator
} else {
&path_str
};
stripped.replace('\\', "/").trim_start_matches('/').to_string()
}
/// Read a full note by deterministic ID.
pub fn read_note_by_id(note_id: &str) -> Result<Note, String> {
let root = config::data_dir();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if !is_note_file(path) {
continue;
}
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, body, _has_fm) = frontmatter::parse_frontmatter(&content);
let derived_id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
if derived_id != note_id {
continue;
}
let note_type = fm
.get(&Value::from("type"))
.and_then(|v| v.as_str())
.unwrap_or("note")
.to_string();
return Ok(Note {
id: derived_id,
path: normalize_path(path),
note_type,
frontmatter: fm,
content: body.trim_start().to_string(),
});
}
Err(format!("Note not found: {}", note_id))
}
/// Create a new empty note in data/notes/.
pub fn create_note() -> Result<Note, String> {
use chrono::Utc;
let dir = config::data_dir().join("notes");
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
let filename = format!("{}.md", Utc::now().format("%Y%m%d-%H%M%S"));
let path = dir.join(&filename);
let fm = frontmatter::generate_frontmatter(&path, "note");
let content = frontmatter::serialize_frontmatter(&fm, "")?;
// Atomic write: write to temp file, then rename
atomic_write(&path, content.as_bytes())?;
let id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
Ok(Note {
id,
path: normalize_path(&path),
note_type: "note".to_string(),
frontmatter: fm,
content: String::new(),
})
}
/// Update an existing note by ID with full markdown payload.
/// Handles notes with or without existing frontmatter.
/// Preserves user-defined fields, updates backend-owned fields.
pub fn update_note(note_id: &str, new_content: &str) -> Result<Note, String> {
let root = config::data_dir();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if !is_note_file(path) {
continue;
}
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (mut fm, _old_body, has_fm) = frontmatter::parse_frontmatter(&content);
let derived_id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
if derived_id != note_id {
continue;
}
// Ensure frontmatter has all required fields
// This handles files without frontmatter or with incomplete frontmatter
if !has_fm || !frontmatter::is_frontmatter_complete(&fm) {
frontmatter::ensure_frontmatter(&mut fm, path);
} else {
// Just update the timestamp
frontmatter::update_frontmatter(&mut fm);
}
// Rebuild file content
let rebuilt = frontmatter::serialize_frontmatter(&fm, new_content.trim_start())?;
// Atomic write
atomic_write(path, rebuilt.as_bytes())?;
let note_type = fm
.get(&Value::from("type"))
.and_then(|v| v.as_str())
.unwrap_or("note")
.to_string();
return Ok(Note {
id: derived_id,
path: normalize_path(path),
note_type,
frontmatter: fm,
content: new_content.to_string(),
});
}
Err(format!("Note not found: {}", note_id))
}
/// Archive a note by ID (move to data/archive/).
pub fn archive_note(note_id: &str) -> Result<(), String> {
let root = config::data_dir();
let archive_dir = config::data_dir().join("archive");
fs::create_dir_all(&archive_dir).map_err(|e| e.to_string())?;
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if !is_note_file(path) {
continue;
}
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let derived_id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
if derived_id != note_id {
continue;
}
let filename = path.file_name().ok_or("Invalid filename")?;
let target = archive_dir.join(filename);
fs::rename(path, target).map_err(|e| e.to_string())?;
return Ok(());
}
Err(format!("Note not found: {}", note_id))
}
/// Atomic write: write to temp file, then rename.
/// This prevents data loss on crash or power failure.
/// Also marks the file as recently saved to avoid triggering external edit notifications.
pub fn atomic_write(path: &Path, contents: &[u8]) -> Result<(), String> {
let parent = path.parent().ok_or("Invalid path")?;
let temp_name = format!(
".{}.tmp",
path.file_name()
.and_then(|s| s.to_str())
.unwrap_or("file")
);
let temp_path = parent.join(temp_name);
// Mark this file as being saved by us (to avoid triggering external edit notification)
let normalized = normalize_path(path);
crate::watcher::mark_file_saved(&normalized);
// Write to temp file
let mut file = fs::File::create(&temp_path).map_err(|e| e.to_string())?;
file.write_all(contents).map_err(|e| e.to_string())?;
file.sync_all().map_err(|e| e.to_string())?;
drop(file);
// Rename temp file to target (atomic on most filesystems)
fs::rename(&temp_path, path).map_err(|e| e.to_string())?;
Ok(())
}

View File

@@ -0,0 +1,191 @@
use std::path::Path;
use chrono::Utc;
use serde_yaml::{Mapping, Value};
/// Derive deterministic ID from file path.
/// Matches filesystem ID logic: strips data directory prefix and folder name.
pub fn derive_id_from_path(path: &Path) -> String {
let path_str = path.to_string_lossy();
// Find "data" in the path and strip everything before and including it
let rel_str = if let Some(idx) = path_str.find("data") {
&path_str[idx + 5..] // Skip "data" + separator
} else {
&path_str
};
// Split by both forward and back slashes, filter empty parts
let mut parts: Vec<String> = rel_str
.split(['/', '\\'])
.filter(|s| !s.is_empty())
.map(|s| s.replace(".md", ""))
.collect();
// Drop top-level folder name (notes, projects, etc.) if we have multiple parts
if parts.len() > 1 {
parts.remove(0);
}
parts.join("-")
}
/// Parse frontmatter from file content.
/// Returns (frontmatter mapping, body content, has_frontmatter flag).
pub fn parse_frontmatter(content: &str) -> (Mapping, String, bool) {
if !content.starts_with("---") {
return (Mapping::new(), content.to_string(), false);
}
let mut parts = content.splitn(3, "---");
parts.next(); // empty before first ---
let yaml = parts.next().unwrap_or("");
let body = parts.next().unwrap_or("");
let fm: Value = serde_yaml::from_str(yaml).unwrap_or(Value::Null);
let map = fm.as_mapping().cloned().unwrap_or_default();
(map, body.to_string(), true)
}
/// Serialize frontmatter and body back to markdown string.
pub fn serialize_frontmatter(frontmatter: &Mapping, body: &str) -> Result<String, String> {
let yaml = serde_yaml::to_string(frontmatter).map_err(|e| e.to_string())?;
let mut content = String::new();
content.push_str("---\n");
content.push_str(&yaml);
content.push_str("---\n\n");
content.push_str(body.trim_start());
Ok(content)
}
/// Generate initial frontmatter for a newly created file.
/// Sets backend-owned fields only.
pub fn generate_frontmatter(path: &Path, note_type: &str) -> Mapping {
let mut map = Mapping::new();
let id = derive_id_from_path(path);
let now = Utc::now().to_rfc3339();
map.insert(Value::from("id"), Value::from(id));
map.insert(Value::from("type"), Value::from(note_type));
map.insert(Value::from("created"), Value::from(now.clone()));
map.insert(Value::from("updated"), Value::from(now));
map
}
/// Ensure frontmatter has all required backend-owned fields.
/// - If `id` is missing, derive from path
/// - If `created` is missing, set to now
/// - Always updates `updated` timestamp
/// - Preserves all user-defined fields (title, tags, status, etc.)
pub fn ensure_frontmatter(existing: &mut Mapping, path: &Path) {
let now = Utc::now().to_rfc3339();
// Ensure ID exists (derive from path if missing)
if !existing.contains_key(&Value::from("id")) {
let id = derive_id_from_path(path);
existing.insert(Value::from("id"), Value::from(id));
}
// Ensure created timestamp exists (set once, never overwritten)
if !existing.contains_key(&Value::from("created")) {
existing.insert(Value::from("created"), Value::from(now.clone()));
}
// Always update the updated timestamp
existing.insert(Value::from("updated"), Value::from(now));
}
/// Update frontmatter on save.
/// Only updates `updated` timestamp, preserves all other fields.
pub fn update_frontmatter(existing: &mut Mapping) {
let now = Utc::now().to_rfc3339();
existing.insert(Value::from("updated"), Value::from(now));
}
/// Check if frontmatter has all required backend-owned fields.
pub fn is_frontmatter_complete(frontmatter: &Mapping) -> bool {
frontmatter.contains_key(&Value::from("id"))
&& frontmatter.contains_key(&Value::from("created"))
&& frontmatter.contains_key(&Value::from("updated"))
}
// ============ Helper functions for cleaner frontmatter field access ============
/// Get a string value from frontmatter by key.
pub fn get_str(fm: &Mapping, key: &str) -> Option<String> {
fm.get(&Value::from(key))
.and_then(|v| v.as_str())
.map(String::from)
}
/// Get a string value from frontmatter, with a default fallback.
pub fn get_str_or(fm: &Mapping, key: &str, default: &str) -> String {
get_str(fm, key).unwrap_or_else(|| default.to_string())
}
/// Get a bool value from frontmatter by key.
pub fn get_bool(fm: &Mapping, key: &str) -> Option<bool> {
fm.get(&Value::from(key)).and_then(|v| v.as_bool())
}
/// Get a bool value from frontmatter, with a default fallback.
pub fn get_bool_or(fm: &Mapping, key: &str, default: bool) -> bool {
get_bool(fm, key).unwrap_or(default)
}
/// Get a u64 value from frontmatter by key.
pub fn get_u64(fm: &Mapping, key: &str) -> Option<u64> {
fm.get(&Value::from(key)).and_then(|v| v.as_u64())
}
/// Get a string sequence (tags, etc.) from frontmatter by key.
pub fn get_string_seq(fm: &Mapping, key: &str) -> Vec<String> {
fm.get(&Value::from(key))
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_frontmatter_with_frontmatter() {
let content = "---\nid: test\ntitle: Test Note\n---\n\nBody content";
let (fm, body, has_fm) = parse_frontmatter(content);
assert!(has_fm);
assert_eq!(fm.get(&Value::from("id")).unwrap().as_str().unwrap(), "test");
assert_eq!(fm.get(&Value::from("title")).unwrap().as_str().unwrap(), "Test Note");
assert!(body.contains("Body content"));
}
#[test]
fn test_parse_frontmatter_without_frontmatter() {
let content = "Just some content without frontmatter";
let (fm, body, has_fm) = parse_frontmatter(content);
assert!(!has_fm);
assert!(fm.is_empty());
assert_eq!(body, content);
}
#[test]
fn test_derive_id_from_path() {
let path = Path::new("data/notes/my-note.md");
assert_eq!(derive_id_from_path(path), "my-note");
let path = Path::new("data/projects/myproject/index.md");
assert_eq!(derive_id_from_path(path), "myproject-index");
}
}

655
backend/src/services/git.rs Normal file
View File

@@ -0,0 +1,655 @@
use std::time::Duration;
use chrono::Utc;
use git2::{Repository, Signature, StatusOptions};
use serde::Serialize;
use tokio::time::interval;
use crate::config;
/// Git status for a file
#[derive(Debug, Clone, Serialize)]
pub struct FileStatus {
pub path: String,
pub status: String, // "new", "modified", "deleted", "renamed", "untracked"
}
/// Overall repository status
#[derive(Debug, Serialize)]
pub struct RepoStatus {
pub is_repo: bool,
pub branch: Option<String>,
pub files: Vec<FileStatus>,
pub has_changes: bool,
pub last_commit: Option<CommitInfo>,
}
/// Commit information
#[derive(Debug, Clone, Serialize)]
pub struct CommitInfo {
pub id: String,
pub message: String,
pub timestamp: String,
}
/// Extended commit info for history
#[derive(Debug, Serialize)]
pub struct CommitDetail {
pub id: String,
pub short_id: String,
pub message: String,
pub author: String,
pub timestamp: String,
pub files_changed: usize,
}
/// Diff information
#[derive(Debug, Serialize)]
pub struct DiffInfo {
pub files: Vec<FileDiff>,
pub stats: DiffStats,
}
/// File diff
#[derive(Debug, Serialize)]
pub struct FileDiff {
pub path: String,
pub status: String,
pub additions: usize,
pub deletions: usize,
pub hunks: Vec<DiffHunk>,
}
/// Diff hunk (section of changes)
#[derive(Debug, Serialize)]
pub struct DiffHunk {
pub header: String,
pub lines: Vec<DiffLine>,
}
/// Single diff line
#[derive(Debug, Serialize)]
pub struct DiffLine {
pub origin: char,
pub content: String,
}
/// Diff statistics
#[derive(Debug, Serialize)]
pub struct DiffStats {
pub files_changed: usize,
pub insertions: usize,
pub deletions: usize,
}
/// Remote repository information
#[derive(Debug, Serialize)]
pub struct RemoteInfo {
pub name: String,
pub url: String,
pub has_upstream: bool,
pub ahead: usize,
pub behind: usize,
}
/// Auto-commit is enabled by default.
/// The background task simply tries to commit every interval;
/// commit_all() already handles "no changes" gracefully.
/// Get repository status
pub fn get_status() -> Result<RepoStatus, String> {
let data_path = config::data_dir();
// Try to open as git repo
let repo = match Repository::open(data_path) {
Ok(r) => r,
Err(_) => {
return Ok(RepoStatus {
is_repo: false,
branch: None,
files: Vec::new(),
has_changes: false,
last_commit: None,
});
}
};
// Get current branch
let branch = repo
.head()
.ok()
.and_then(|h| h.shorthand().map(String::from));
// Get file statuses
let mut opts = StatusOptions::new();
opts.include_untracked(true)
.recurse_untracked_dirs(true)
.exclude_submodules(true);
let statuses = repo.statuses(Some(&mut opts)).map_err(|e| e.to_string())?;
let files: Vec<FileStatus> = statuses
.iter()
.filter_map(|entry| {
let path = entry.path()?.to_string();
let status = entry.status();
let status_str = if status.is_index_new() || status.is_wt_new() {
"new"
} else if status.is_index_modified() || status.is_wt_modified() {
"modified"
} else if status.is_index_deleted() || status.is_wt_deleted() {
"deleted"
} else if status.is_index_renamed() || status.is_wt_renamed() {
"renamed"
} else {
return None;
};
Some(FileStatus {
path,
status: status_str.to_string(),
})
})
.collect();
let has_changes = !files.is_empty();
// Get last commit info
let last_commit = repo.head().ok().and_then(|head| {
let commit = head.peel_to_commit().ok()?;
Some(CommitInfo {
id: commit.id().to_string()[..8].to_string(),
message: commit.message()?.trim().to_string(),
timestamp: chrono::DateTime::from_timestamp(commit.time().seconds(), 0)?
.to_rfc3339(),
})
});
Ok(RepoStatus {
is_repo: true,
branch,
files,
has_changes,
last_commit,
})
}
/// Create a commit with all changes
pub fn commit_all(message: Option<&str>) -> Result<CommitInfo, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
// Stage all changes
let mut index = repo.index().map_err(|e| e.to_string())?;
index
.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)
.map_err(|e| e.to_string())?;
index.write().map_err(|e| e.to_string())?;
// Check if there are changes to commit
let tree_id = index.write_tree().map_err(|e| e.to_string())?;
let tree = repo.find_tree(tree_id).map_err(|e| e.to_string())?;
// Get parent commit (if any)
let parent = repo.head().ok().and_then(|h| h.peel_to_commit().ok());
// Check if tree is different from parent
if let Some(ref p) = parent {
if p.tree().map(|t| t.id()) == Ok(tree_id) {
return Err("No changes to commit".to_string());
}
}
// Create signature
let sig = Signature::now("Ironpad", "ironpad@local").map_err(|e| e.to_string())?;
// Generate commit message
let msg = message.unwrap_or_else(|| "Auto-save");
let timestamp = Utc::now().format("%Y-%m-%d %H:%M");
let full_message = format!("{} ({})", msg, timestamp);
// Create commit
let parents: Vec<&git2::Commit> = parent.as_ref().map(|p| vec![p]).unwrap_or_default();
let commit_id = repo
.commit(Some("HEAD"), &sig, &sig, &full_message, &tree, &parents)
.map_err(|e| e.to_string())?;
Ok(CommitInfo {
id: commit_id.to_string()[..8].to_string(),
message: full_message,
timestamp: Utc::now().to_rfc3339(),
})
}
/// Initialize data directory as a git repository if not already
pub fn init_repo() -> Result<(), String> {
let data_path = config::data_dir();
if Repository::open(data_path).is_ok() {
return Ok(()); // Already a repo
}
Repository::init(data_path).map_err(|e| format!("Failed to init repo: {}", e))?;
// Create initial .gitignore
let gitignore_path = data_path.join(".gitignore");
if !gitignore_path.exists() {
std::fs::write(&gitignore_path, "*.tmp\n.DS_Store\n")
.map_err(|e| format!("Failed to create .gitignore: {}", e))?;
}
// Initial commit
commit_all(Some("Initial commit"))?;
Ok(())
}
/// Check for merge conflicts
pub fn check_conflicts() -> Result<Vec<String>, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let mut conflicts = Vec::new();
// Check for .git/index.lock (another git operation in progress)
let lock_path = data_path.join(".git").join("index.lock");
if lock_path.exists() {
// This isn't a conflict per se, but indicates git is busy
tracing::warn!("Git index.lock exists - another operation may be in progress");
}
// Check status for conflicted files
let mut opts = StatusOptions::new();
opts.include_untracked(false);
let statuses = repo.statuses(Some(&mut opts)).map_err(|e| e.to_string())?;
for entry in statuses.iter() {
let status = entry.status();
// Check for conflict status flags
if status.is_conflicted() {
if let Some(path) = entry.path() {
conflicts.push(path.to_string());
}
}
}
// Also check the index for conflicts
let index = repo.index().map_err(|e| e.to_string())?;
if index.has_conflicts() {
for conflict in index.conflicts().map_err(|e| e.to_string())? {
if let Ok(conflict) = conflict {
if let Some(ancestor) = conflict.ancestor {
if let Some(path) = std::str::from_utf8(&ancestor.path).ok() {
if !conflicts.contains(&path.to_string()) {
conflicts.push(path.to_string());
}
}
}
}
}
}
Ok(conflicts)
}
/// Push to remote repository
pub fn push_to_remote() -> Result<(), String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
// Get the current branch
let head = repo.head().map_err(|e| e.to_string())?;
let branch_name = head
.shorthand()
.ok_or_else(|| "Could not get branch name".to_string())?;
// Find the remote (default to "origin")
let mut remote = repo
.find_remote("origin")
.map_err(|e| format!("Remote 'origin' not found: {}", e))?;
// Check if remote URL is configured
let remote_url = remote.url().ok_or_else(|| "No remote URL configured".to_string())?;
if remote_url.is_empty() {
return Err("No remote URL configured".to_string());
}
// Create callbacks for authentication
let mut callbacks = git2::RemoteCallbacks::new();
// Try to use credential helper from git config
callbacks.credentials(|_url, username_from_url, _allowed_types| {
// Try SSH agent first
git2::Cred::ssh_key_from_agent(username_from_url.unwrap_or("git"))
});
// Set up push options
let mut push_options = git2::PushOptions::new();
push_options.remote_callbacks(callbacks);
// Push the current branch
let refspec = format!("refs/heads/{}:refs/heads/{}", branch_name, branch_name);
remote
.push(&[&refspec], Some(&mut push_options))
.map_err(|e| format!("Push failed: {}. Make sure SSH keys are configured.", e))?;
tracing::info!("Successfully pushed to origin/{}", branch_name);
Ok(())
}
/// Check if remote is configured
pub fn has_remote() -> bool {
let data_path = config::data_dir();
if let Ok(repo) = Repository::open(data_path) {
if let Ok(remote) = repo.find_remote("origin") {
return remote.url().is_some();
}
}
false
}
/// Start auto-commit background task.
/// Tries to commit every 60 seconds; commit_all() already handles "no changes" gracefully.
pub fn start_auto_commit() {
tokio::spawn(async move {
let mut interval = interval(Duration::from_secs(60));
loop {
interval.tick().await;
match commit_all(Some("Auto-save")) {
Ok(info) => {
tracing::info!("Auto-commit: {} - {}", info.id, info.message);
}
Err(e) => {
if !e.contains("No changes") {
tracing::warn!("Auto-commit failed: {}", e);
}
}
}
}
});
}
/// Get commit history (most recent first)
pub fn get_log(limit: Option<usize>) -> Result<Vec<CommitDetail>, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let mut revwalk = repo.revwalk().map_err(|e| e.to_string())?;
revwalk.push_head().map_err(|e| e.to_string())?;
revwalk
.set_sorting(git2::Sort::TIME)
.map_err(|e| e.to_string())?;
let max_commits = limit.unwrap_or(50);
let mut commits = Vec::new();
for (i, oid_result) in revwalk.enumerate() {
if i >= max_commits {
break;
}
let oid = oid_result.map_err(|e| e.to_string())?;
let commit = repo.find_commit(oid).map_err(|e| e.to_string())?;
// Count files changed in this commit
let files_changed = if commit.parent_count() > 0 {
let parent = commit.parent(0).ok();
let parent_tree = parent.as_ref().and_then(|p| p.tree().ok());
let commit_tree = commit.tree().ok();
if let (Some(pt), Some(ct)) = (parent_tree, commit_tree) {
let diff = repo
.diff_tree_to_tree(Some(&pt), Some(&ct), None)
.ok();
diff.map(|d| d.deltas().count()).unwrap_or(0)
} else {
0
}
} else {
// Initial commit - count all files
commit
.tree()
.ok()
.map(|t| count_tree_entries(&t))
.unwrap_or(0)
};
let timestamp =
chrono::DateTime::from_timestamp(commit.time().seconds(), 0)
.map(|dt| dt.to_rfc3339())
.unwrap_or_else(|| "Unknown".to_string());
commits.push(CommitDetail {
id: oid.to_string(),
short_id: oid.to_string()[..8].to_string(),
message: commit.message().unwrap_or("").trim().to_string(),
author: commit.author().name().unwrap_or("Unknown").to_string(),
timestamp,
files_changed,
});
}
Ok(commits)
}
/// Helper to count entries in a tree recursively
fn count_tree_entries(tree: &git2::Tree) -> usize {
tree.iter()
.filter(|entry| entry.kind() == Some(git2::ObjectType::Blob))
.count()
}
/// Get working directory diff (uncommitted changes)
pub fn get_working_diff() -> Result<DiffInfo, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
// Get HEAD tree (or empty tree if no commits)
let head_tree = repo
.head()
.ok()
.and_then(|h| h.peel_to_tree().ok());
// Diff against working directory
let diff = repo
.diff_tree_to_workdir_with_index(head_tree.as_ref(), None)
.map_err(|e| e.to_string())?;
parse_diff(&diff)
}
/// Get diff for a specific commit
pub fn get_commit_diff(commit_id: &str) -> Result<DiffInfo, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let oid = git2::Oid::from_str(commit_id).map_err(|e| format!("Invalid commit ID: {}", e))?;
let commit = repo
.find_commit(oid)
.map_err(|e| format!("Commit not found: {}", e))?;
let commit_tree = commit.tree().map_err(|e| e.to_string())?;
let parent_tree = if commit.parent_count() > 0 {
commit.parent(0).ok().and_then(|p| p.tree().ok())
} else {
None
};
let diff = repo
.diff_tree_to_tree(parent_tree.as_ref(), Some(&commit_tree), None)
.map_err(|e| e.to_string())?;
parse_diff(&diff)
}
/// Parse a git2::Diff into our DiffInfo structure
fn parse_diff(diff: &git2::Diff) -> Result<DiffInfo, String> {
let stats = diff.stats().map_err(|e| e.to_string())?;
let mut files = Vec::new();
for delta_idx in 0..diff.deltas().count() {
let delta = diff.get_delta(delta_idx).ok_or("Missing delta")?;
let path = delta
.new_file()
.path()
.or_else(|| delta.old_file().path())
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| "Unknown".to_string());
let status = match delta.status() {
git2::Delta::Added => "added",
git2::Delta::Deleted => "deleted",
git2::Delta::Modified => "modified",
git2::Delta::Renamed => "renamed",
git2::Delta::Copied => "copied",
_ => "unknown",
};
let mut hunks = Vec::new();
let mut additions = 0;
let mut deletions = 0;
// Get patch for this file
if let Ok(patch) = git2::Patch::from_diff(diff, delta_idx) {
if let Some(p) = patch {
for hunk_idx in 0..p.num_hunks() {
if let Ok((hunk, _)) = p.hunk(hunk_idx) {
let mut lines = Vec::new();
for line_idx in 0..p.num_lines_in_hunk(hunk_idx).unwrap_or(0) {
if let Ok(line) = p.line_in_hunk(hunk_idx, line_idx) {
let origin = line.origin();
let content = std::str::from_utf8(line.content())
.unwrap_or("")
.to_string();
match origin {
'+' => additions += 1,
'-' => deletions += 1,
_ => {}
}
lines.push(DiffLine { origin, content });
}
}
hunks.push(DiffHunk {
header: std::str::from_utf8(hunk.header())
.unwrap_or("")
.trim()
.to_string(),
lines,
});
}
}
}
}
files.push(FileDiff {
path,
status: status.to_string(),
additions,
deletions,
hunks,
});
}
Ok(DiffInfo {
files,
stats: DiffStats {
files_changed: stats.files_changed(),
insertions: stats.insertions(),
deletions: stats.deletions(),
},
})
}
/// Get remote repository information
pub fn get_remote_info() -> Result<Option<RemoteInfo>, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let remote = match repo.find_remote("origin") {
Ok(r) => r,
Err(_) => return Ok(None),
};
let url = remote.url().unwrap_or("").to_string();
if url.is_empty() {
return Ok(None);
}
// Get current branch
let head = match repo.head() {
Ok(h) => h,
Err(_) => {
return Ok(Some(RemoteInfo {
name: "origin".to_string(),
url,
has_upstream: false,
ahead: 0,
behind: 0,
}));
}
};
let branch_name = head.shorthand().unwrap_or("HEAD");
// Try to find upstream branch
let local_branch = repo.find_branch(branch_name, git2::BranchType::Local).ok();
let upstream = local_branch.as_ref().and_then(|b| b.upstream().ok());
let (ahead, behind) = if let Some(ref up) = upstream {
// Calculate ahead/behind
let local_oid = head.target().unwrap_or_else(git2::Oid::zero);
let upstream_oid = up
.get()
.target()
.unwrap_or_else(git2::Oid::zero);
repo.graph_ahead_behind(local_oid, upstream_oid)
.unwrap_or((0, 0))
} else {
(0, 0)
};
Ok(Some(RemoteInfo {
name: "origin".to_string(),
url,
has_upstream: upstream.is_some(),
ahead,
behind,
}))
}
/// Fetch from remote
pub fn fetch_from_remote() -> Result<(), String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let mut remote = repo
.find_remote("origin")
.map_err(|e| format!("Remote 'origin' not found: {}", e))?;
// Create callbacks for authentication
let mut callbacks = git2::RemoteCallbacks::new();
callbacks.credentials(|_url, username_from_url, _allowed_types| {
git2::Cred::ssh_key_from_agent(username_from_url.unwrap_or("git"))
});
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
remote
.fetch(&[] as &[&str], Some(&mut fetch_options), None)
.map_err(|e| format!("Fetch failed: {}", e))?;
Ok(())
}

View File

@@ -0,0 +1,149 @@
use std::collections::HashMap;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::RwLock;
/// Type of lock held on a file
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LockType {
Editor,
TaskView,
}
/// Information about a file lock
#[derive(Debug, Clone, Serialize)]
pub struct LockInfo {
pub path: String,
pub client_id: String,
pub lock_type: LockType,
pub acquired_at: DateTime<Utc>,
}
/// Error type for lock operations
#[derive(Debug, Clone, Serialize)]
pub enum LockError {
AlreadyLocked { holder: String, lock_type: LockType },
NotLocked,
NotOwner,
}
impl std::fmt::Display for LockError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LockError::AlreadyLocked { holder, lock_type } => {
write!(f, "File already locked by {} ({:?})", holder, lock_type)
}
LockError::NotLocked => write!(f, "File is not locked"),
LockError::NotOwner => write!(f, "You do not own this lock"),
}
}
}
/// Manages file locks across the application
#[derive(Debug, Clone)]
pub struct FileLockManager {
locks: Arc<RwLock<HashMap<String, LockInfo>>>,
}
impl FileLockManager {
pub fn new() -> Self {
Self {
locks: Arc::new(RwLock::new(HashMap::new())),
}
}
/// Attempt to acquire a lock on a file
pub async fn acquire(
&self,
path: &str,
client_id: &str,
lock_type: LockType,
) -> Result<LockInfo, LockError> {
let mut locks = self.locks.write().await;
// Check if already locked
if let Some(existing) = locks.get(path) {
if existing.client_id != client_id {
return Err(LockError::AlreadyLocked {
holder: existing.client_id.clone(),
lock_type: existing.lock_type,
});
}
// Same client - update lock type
}
let lock_info = LockInfo {
path: path.to_string(),
client_id: client_id.to_string(),
lock_type,
acquired_at: Utc::now(),
};
locks.insert(path.to_string(), lock_info.clone());
Ok(lock_info)
}
/// Release a lock on a file
pub async fn release(&self, path: &str, client_id: &str) -> Result<(), LockError> {
let mut locks = self.locks.write().await;
if let Some(existing) = locks.get(path) {
if existing.client_id != client_id {
return Err(LockError::NotOwner);
}
locks.remove(path);
Ok(())
} else {
Err(LockError::NotLocked)
}
}
/// Check if a file is locked
pub async fn is_locked(&self, path: &str) -> Option<LockInfo> {
let locks = self.locks.read().await;
locks.get(path).cloned()
}
/// Check if a file is locked by someone other than the given client
pub async fn is_locked_by_other(&self, path: &str, client_id: &str) -> Option<LockInfo> {
let locks = self.locks.read().await;
locks.get(path).and_then(|lock| {
if lock.client_id != client_id {
Some(lock.clone())
} else {
None
}
})
}
/// Release all locks held by a client (used on disconnect)
pub async fn release_all_for_client(&self, client_id: &str) -> Vec<String> {
let mut locks = self.locks.write().await;
let paths_to_remove: Vec<String> = locks
.iter()
.filter(|(_, lock)| lock.client_id == client_id)
.map(|(path, _)| path.clone())
.collect();
for path in &paths_to_remove {
locks.remove(path);
}
paths_to_remove
}
/// Get all current locks (for debugging/monitoring)
pub async fn get_all_locks(&self) -> Vec<LockInfo> {
let locks = self.locks.read().await;
locks.values().cloned().collect()
}
}
impl Default for FileLockManager {
fn default() -> Self {
Self::new()
}
}

View File

View File

@@ -0,0 +1,6 @@
pub mod filesystem;
pub mod frontmatter;
pub mod git;
pub mod locks;
pub mod markdown;
pub mod search;

View File

@@ -0,0 +1,188 @@
use std::fs;
use std::path::Path;
use std::process::{Command, Stdio};
use serde::Serialize;
use walkdir::WalkDir;
use crate::config;
/// Search result item
#[derive(Debug, Serialize)]
pub struct SearchResult {
pub path: String,
pub title: String,
pub matches: Vec<SearchMatch>,
}
/// Individual match within a file
#[derive(Debug, Serialize)]
pub struct SearchMatch {
pub line_number: u32,
pub line_content: String,
}
/// Search notes using simple string matching
/// Falls back to manual search if ripgrep is not available
pub fn search_notes(query: &str) -> Result<Vec<SearchResult>, String> {
if query.trim().is_empty() {
return Ok(Vec::new());
}
// Try ripgrep first (faster)
match search_with_ripgrep(query) {
Ok(results) => return Ok(results),
Err(e) => {
tracing::debug!("ripgrep not available, falling back to manual search: {}", e);
}
}
// Fallback to manual search
search_manual(query)
}
/// Search using ripgrep (rg)
fn search_with_ripgrep(query: &str) -> Result<Vec<SearchResult>, String> {
let data_dir_str = config::data_dir().to_string_lossy();
let output = Command::new("rg")
.args([
"--json", // JSON output for parsing
"--ignore-case", // Case insensitive
"--type", "md", // Only markdown files
"--max-count", "5", // Max 5 matches per file
query,
&data_dir_str,
])
.stdout(Stdio::piped())
.stderr(Stdio::null())
.output()
.map_err(|e| format!("Failed to run ripgrep: {}", e))?;
if !output.status.success() && output.stdout.is_empty() {
// No matches found or error
return Ok(Vec::new());
}
parse_ripgrep_output(&output.stdout)
}
/// Parse ripgrep JSON output
fn parse_ripgrep_output(output: &[u8]) -> Result<Vec<SearchResult>, String> {
use std::collections::HashMap;
let output_str = String::from_utf8_lossy(output);
let mut results_map: HashMap<String, SearchResult> = HashMap::new();
for line in output_str.lines() {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(line) {
if json["type"] == "match" {
let data = &json["data"];
let path_str = data["path"]["text"].as_str().unwrap_or("");
let line_number = data["line_number"].as_u64().unwrap_or(0) as u32;
let line_content = data["lines"]["text"]
.as_str()
.unwrap_or("")
.trim()
.to_string();
let normalized_path = normalize_path(path_str);
let title = extract_title_from_path(&normalized_path);
let result = results_map.entry(normalized_path.clone()).or_insert_with(|| {
SearchResult {
path: normalized_path,
title,
matches: Vec::new(),
}
});
result.matches.push(SearchMatch {
line_number,
line_content,
});
}
}
}
Ok(results_map.into_values().collect())
}
/// Manual search fallback (no external dependencies)
fn search_manual(query: &str) -> Result<Vec<SearchResult>, String> {
let query_lower = query.to_lowercase();
let root = config::data_dir();
let mut results = Vec::new();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => continue,
};
let mut matches = Vec::new();
for (i, line) in content.lines().enumerate() {
if line.to_lowercase().contains(&query_lower) {
matches.push(SearchMatch {
line_number: (i + 1) as u32,
line_content: line.trim().to_string(),
});
// Limit matches per file
if matches.len() >= 5 {
break;
}
}
}
if !matches.is_empty() {
let normalized_path = normalize_path(&path.to_string_lossy());
let title = extract_title_from_path(&normalized_path);
results.push(SearchResult {
path: normalized_path,
title,
matches,
});
}
}
Ok(results)
}
fn is_ignored(path: &Path) -> bool {
path.components().any(|c| {
matches!(
c.as_os_str().to_str(),
Some(".git") | Some("assets") | Some("archive")
)
})
}
fn normalize_path(path: &str) -> String {
if let Some(idx) = path.find("data") {
let stripped = &path[idx + 5..];
return stripped
.replace('\\', "/")
.trim_start_matches('/')
.to_string();
}
path.replace('\\', "/")
}
fn extract_title_from_path(path: &str) -> String {
Path::new(path)
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled")
.to_string()
}