Initial release: Ironpad v0.1.0 - Local-first, file-based project and knowledge management system. Rust backend, Vue 3 frontend, Milkdown editor, Git integration, cross-platform builds. Built with AI using Open Method.

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
skepsismusic
2026-02-06 00:13:31 +01:00
commit ebe3e2aa8f
97 changed files with 25033 additions and 0 deletions

1
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

2342
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

49
backend/Cargo.toml Normal file
View File

@@ -0,0 +1,49 @@
[package]
name = "ironpad"
version = "0.1.0"
edition = "2021"
[dependencies]
# Web framework
axum = { version = "0.8", features = ["ws", "multipart"] }
tokio = { version = "1", features = ["full"] }
tower = "0.5"
tower-http = { version = "0.6", features = ["cors", "normalize-path", "fs"] }
# Browser opening (production mode)
webbrowser = "1.0"
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_yaml = "0.9"
# Markdown parsing (CommonMark)
markdown = "1.0.0-alpha.22"
# Git operations
git2 = "0.19"
# File system watching
notify = "6.1"
notify-debouncer-full = "0.3"
# Search (ripgrep internals)
grep = "0.3"
walkdir = "2.4"
# Date/time
chrono = { version = "0.4", features = ["serde"] }
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# WebSocket support
futures-util = "0.3"
uuid = { version = "1.0", features = ["v4"] }
# Utilities
lazy_static = "1.4"
tokio-util = { version = "0.7", features = ["io"] }

40
backend/src/config.rs Normal file
View File

@@ -0,0 +1,40 @@
use std::path::{Path, PathBuf};
use std::sync::OnceLock;
/// Resolved data directory path.
/// Priority: IRONPAD_DATA_DIR env var > auto-detect (production vs development).
static DATA_DIR: OnceLock<PathBuf> = OnceLock::new();
/// Initialize the data directory path. Call once at startup.
///
/// Resolution order:
/// 1. `IRONPAD_DATA_DIR` environment variable (if set)
/// 2. `./data` if `static/index.html` exists (production mode)
/// 3. `../data` (development mode, binary runs from backend/)
pub fn init_data_dir() {
let path = if let Ok(custom) = std::env::var("IRONPAD_DATA_DIR") {
tracing::info!("Using custom data directory from IRONPAD_DATA_DIR");
PathBuf::from(custom)
} else if Path::new("static/index.html").exists() {
// Production mode: data/ is next to the binary
PathBuf::from("data")
} else {
// Development mode: binary runs from backend/, data/ is one level up
PathBuf::from("../data")
};
// Create the data directory if it doesn't exist
if !path.exists() {
if let Err(e) = std::fs::create_dir_all(&path) {
tracing::error!("Failed to create data directory {}: {}", path.display(), e);
}
}
tracing::info!("Data directory: {}", path.display());
DATA_DIR.set(path).expect("Data directory already initialized");
}
/// Get the resolved data directory path.
pub fn data_dir() -> &'static Path {
DATA_DIR.get().expect("Data directory not initialized. Call config::init_data_dir() first.")
}

130
backend/src/main.rs Normal file
View File

@@ -0,0 +1,130 @@
use std::net::SocketAddr;
use std::path::Path;
use std::sync::Arc;
use axum::{routing::get, Router};
use tokio::net::TcpListener;
use tower_http::cors::CorsLayer;
use tower_http::services::ServeDir;
use tracing::{info, warn};
pub mod config;
mod models;
mod routes;
mod services;
mod watcher;
mod websocket;
/// Find an available port and return the bound listener.
/// Avoids TOCTOU race by keeping the listener alive.
async fn find_available_port() -> (TcpListener, u16) {
for port in 3000..=3010 {
let addr = SocketAddr::from(([127, 0, 0, 1], port));
if let Ok(listener) = TcpListener::bind(addr).await {
return (listener, port);
}
}
panic!("No available ports in range 30003010");
}
#[tokio::main]
async fn main() {
// Logging
tracing_subscriber::fmt().init();
// Resolve data directory (production vs development mode)
config::init_data_dir();
// Find port and bind (listener kept alive to avoid race condition)
let (listener, port) = find_available_port().await;
// WebSocket state (shared across handlers)
let ws_state = Arc::new(websocket::WsState::new());
// Start file watcher
let ws_state_clone = ws_state.clone();
if let Err(e) = watcher::start_watcher(ws_state_clone).await {
warn!("File watcher failed to start: {}", e);
}
// Initialize git repo if needed
if let Err(e) = services::git::init_repo() {
warn!("Git init skipped: {}", e);
}
// Start auto-commit background task (tries to commit every 60s)
services::git::start_auto_commit();
// CORS layer (permissive for local-only app)
let cors = CorsLayer::permissive();
// API router
let api_router = Router::new()
// Notes CRUD
.route(
"/notes",
get(routes::notes::list_notes).post(routes::notes::create_note),
)
.nest("/notes", routes::notes::router())
// Tasks
.nest("/tasks", routes::tasks::router())
// Search
.nest("/search", routes::search::router())
// Git
.nest("/git", routes::git::router())
// Projects
.nest("/projects", routes::projects::router())
// Daily notes
.nest("/daily", routes::daily::router())
// Assets
.nest("/assets", routes::assets::router());
// App router with WebSocket state
let mut app = Router::new()
.route("/health", get(|| async { "ok" }))
.route(
"/ws",
get({
let ws = ws_state.clone();
move |upgrade: axum::extract::WebSocketUpgrade| {
websocket::ws_handler(upgrade, axum::extract::State(ws))
}
}),
)
.nest("/api", api_router)
.layer(cors);
// Check for embedded frontend (production mode)
let static_dir = Path::new("static");
let has_frontend = static_dir.join("index.html").exists();
if has_frontend {
// Production mode: serve frontend from static/ and use SPA fallback
info!("Production mode: serving frontend from static/");
let serve_dir = ServeDir::new("static")
.fallback(tower_http::services::ServeFile::new("static/index.html"));
app = app.fallback_service(serve_dir);
} else {
// Development mode: API-only
app = app.fallback(|| async {
"Ironpad API server running. Use 'npm run dev' in frontend/ for the GUI."
});
}
// Start server
info!("🚀 Ironpad running on http://localhost:{port}");
// Auto-open browser in production mode
if has_frontend {
let url = format!("http://localhost:{}", port);
tokio::spawn(async move {
// Small delay to ensure server is ready
tokio::time::sleep(std::time::Duration::from_millis(300)).await;
if let Err(e) = webbrowser::open(&url) {
tracing::warn!("Failed to open browser: {}. Open http://localhost:{} manually.", e, port);
}
});
}
axum::serve(listener, app).await.expect("Server failed");
}

View File

@@ -0,0 +1,3 @@
pub mod note;
pub mod project;
pub mod task;

View File

@@ -0,0 +1,23 @@
use serde::Serialize;
/// Lightweight note representation for list views.
/// Read-only, derived from filesystem + frontmatter.
#[derive(Debug, Serialize)]
pub struct NoteSummary {
pub id: String,
pub title: String,
pub path: String,
pub note_type: String,
pub updated: Option<String>,
}
/// Full note payload for editor view.
/// Returned by GET /api/notes/:id
#[derive(Debug, Serialize)]
pub struct Note {
pub id: String,
pub path: String,
pub note_type: String,
pub frontmatter: serde_yaml::Mapping,
pub content: String,
}

View File

@@ -0,0 +1,3 @@
// Project structs are defined inline in routes/projects.rs
// because they are tightly coupled to the API response shape.
// This module is kept as a placeholder for future shared types.

View File

@@ -0,0 +1,3 @@
// Task structs are defined inline in routes/tasks.rs
// because they are tightly coupled to the API response shape.
// This module is kept as a placeholder for future shared types.

View File

@@ -0,0 +1,265 @@
use axum::{
body::Body,
extract::{Multipart, Path, Query},
http::{header, StatusCode},
response::IntoResponse,
routing::{get, post},
Json, Router,
};
use serde::{Deserialize, Serialize};
use std::fs;
use std::io::Write;
use std::path::Path as StdPath;
use tokio_util::io::ReaderStream;
use crate::config;
const MAX_FILE_SIZE: usize = 10 * 1024 * 1024; // 10MB
#[derive(Debug, Deserialize)]
pub struct UploadQuery {
pub project: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct UploadResponse {
pub url: String,
pub filename: String,
pub size: usize,
}
pub fn router() -> Router {
Router::new()
.route("/upload", post(upload_asset))
.route("/{project}/{filename}", get(get_asset))
}
async fn upload_asset(
Query(query): Query<UploadQuery>,
mut multipart: Multipart,
) -> impl IntoResponse {
// Determine target directory
let assets_dir = if let Some(project_id) = &query.project {
config::data_dir()
.join("projects")
.join(project_id)
.join("assets")
} else {
config::data_dir().join("notes").join("assets")
};
// Create assets directory if it doesn't exist
if !assets_dir.exists() {
if let Err(e) = fs::create_dir_all(&assets_dir) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create assets directory: {}", e),
)
.into_response();
}
}
// Process uploaded file
while let Ok(Some(field)) = multipart.next_field().await {
let name = field.name().unwrap_or("file").to_string();
if name != "file" {
continue;
}
let original_filename = field
.file_name()
.map(|s| s.to_string())
.unwrap_or_else(|| format!("upload_{}", chrono::Utc::now().timestamp()));
// Validate file type (images only for now)
let content_type = field
.content_type()
.map(|s| s.to_string())
.unwrap_or_default();
if !is_allowed_content_type(&content_type) {
return (
StatusCode::BAD_REQUEST,
format!("Unsupported file type: {}. Only images are allowed.", content_type),
)
.into_response();
}
// Read file data
let data = match field.bytes().await {
Ok(bytes) => bytes,
Err(e) => {
return (
StatusCode::BAD_REQUEST,
format!("Failed to read file data: {}", e),
)
.into_response();
}
};
// Check file size
if data.len() > MAX_FILE_SIZE {
return (
StatusCode::BAD_REQUEST,
format!("File too large. Maximum size is {} MB.", MAX_FILE_SIZE / 1024 / 1024),
)
.into_response();
}
// Generate unique filename if needed
let filename = generate_unique_filename(&assets_dir, &original_filename);
let file_path = assets_dir.join(&filename);
// Write file
let mut file = match fs::File::create(&file_path) {
Ok(f) => f,
Err(e) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create file: {}", e),
)
.into_response();
}
};
if let Err(e) = file.write_all(&data) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to write file: {}", e),
)
.into_response();
}
// Build response URL
let project_part = query.project.as_deref().unwrap_or("notes");
let url = format!("/api/assets/{}/{}", project_part, filename);
return (
StatusCode::CREATED,
Json(UploadResponse {
url,
filename,
size: data.len(),
}),
)
.into_response();
}
(StatusCode::BAD_REQUEST, "No file provided").into_response()
}
/// Validate that a path component doesn't contain directory traversal
fn validate_path_component(component: &str) -> Result<(), String> {
if component.contains("..") || component.contains('/') || component.contains('\\') || component.is_empty() {
return Err("Invalid path component".to_string());
}
Ok(())
}
async fn get_asset(Path((project, filename)): Path<(String, String)>) -> impl IntoResponse {
// Validate path components to prevent directory traversal
if validate_path_component(&project).is_err() || validate_path_component(&filename).is_err() {
return (StatusCode::BAD_REQUEST, "Invalid path").into_response();
}
// Determine file path
let file_path = if project == "notes" {
config::data_dir()
.join("notes")
.join("assets")
.join(&filename)
} else {
config::data_dir()
.join("projects")
.join(&project)
.join("assets")
.join(&filename)
};
// Check if file exists
if !file_path.exists() {
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
}
// Read file
let file = match tokio::fs::File::open(&file_path).await {
Ok(f) => f,
Err(e) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to open file: {}", e),
)
.into_response();
}
};
// Determine content type
let content_type = get_content_type(&filename);
// Stream file response
let stream = ReaderStream::new(file);
let body = Body::from_stream(stream);
(
StatusCode::OK,
[(header::CONTENT_TYPE, content_type)],
body,
)
.into_response()
}
fn is_allowed_content_type(content_type: &str) -> bool {
matches!(
content_type,
"image/jpeg"
| "image/png"
| "image/gif"
| "image/webp"
| "image/svg+xml"
| "application/pdf"
)
}
fn get_content_type(filename: &str) -> &'static str {
let ext = filename
.rsplit('.')
.next()
.unwrap_or("")
.to_lowercase();
match ext.as_str() {
"jpg" | "jpeg" => "image/jpeg",
"png" => "image/png",
"gif" => "image/gif",
"webp" => "image/webp",
"svg" => "image/svg+xml",
"pdf" => "application/pdf",
_ => "application/octet-stream",
}
}
fn generate_unique_filename(dir: &StdPath, original: &str) -> String {
// Extract name and extension
let (name, ext) = if let Some(dot_idx) = original.rfind('.') {
(&original[..dot_idx], &original[dot_idx..])
} else {
(original, "")
};
// Sanitize filename
let sanitized_name: String = name
.chars()
.map(|c| if c.is_alphanumeric() || c == '-' || c == '_' { c } else { '_' })
.collect();
let base_filename = format!("{}{}", sanitized_name, ext);
let target_path = dir.join(&base_filename);
// If file doesn't exist, use original name
if !target_path.exists() {
return base_filename;
}
// Otherwise, add timestamp
let timestamp = chrono::Utc::now().timestamp_millis();
format!("{}_{}{}", sanitized_name, timestamp, ext)
}

319
backend/src/routes/daily.rs Normal file
View File

@@ -0,0 +1,319 @@
use axum::{
body::Bytes,
extract::Path,
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use chrono::{NaiveDate, Utc};
use serde::{Deserialize, Serialize};
use std::fs;
use crate::services::filesystem;
use crate::config;
use crate::services::frontmatter;
#[derive(Debug, Serialize)]
pub struct DailyNote {
pub id: String,
pub date: String,
pub path: String,
pub content: String,
pub frontmatter: serde_yaml::Mapping,
}
#[derive(Debug, Serialize)]
pub struct DailyNoteSummary {
pub id: String,
pub date: String,
pub path: String,
pub title: String,
}
pub fn router() -> Router {
Router::new()
.route("/", get(list_daily_notes))
.route("/today", get(get_or_create_today))
.route("/{date}", get(get_daily_note).post(create_daily_note).put(update_daily_note))
}
/// List all daily notes
async fn list_daily_notes() -> impl IntoResponse {
match list_daily_notes_impl() {
Ok(notes) => Json(notes).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to list daily notes: {}", err),
)
.into_response(),
}
}
fn list_daily_notes_impl() -> Result<Vec<DailyNoteSummary>, String> {
let daily_dir = config::data_dir().join("daily");
// Create directory if it doesn't exist
if !daily_dir.exists() {
fs::create_dir_all(&daily_dir).map_err(|e| e.to_string())?;
return Ok(Vec::new());
}
let mut notes = Vec::new();
for entry in fs::read_dir(&daily_dir).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
let filename = path.file_stem().and_then(|s| s.to_str()).unwrap_or("");
// Validate date format
if NaiveDate::parse_from_str(filename, "%Y-%m-%d").is_err() {
continue;
}
let content = fs::read_to_string(&path).map_err(|e| e.to_string())?;
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let title = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| filename.to_string());
notes.push(DailyNoteSummary {
id: format!("daily-{}", filename),
date: filename.to_string(),
path: format!("daily/{}.md", filename),
title,
});
}
// Sort by date descending
notes.sort_by(|a, b| b.date.cmp(&a.date));
Ok(notes)
}
/// Get or create today's daily note
async fn get_or_create_today() -> impl IntoResponse {
let today = Utc::now().format("%Y-%m-%d").to_string();
match get_daily_note_impl(&today) {
Ok(note) => Json(note).into_response(),
Err(_) => {
// Note doesn't exist, create it with default template
match create_daily_note_impl(&today, None) {
Ok(note) => (StatusCode::CREATED, Json(note)).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create today's note: {}", err),
)
.into_response(),
}
}
}
}
/// Get a daily note by date
async fn get_daily_note(Path(date): Path<String>) -> impl IntoResponse {
// Validate date format
if NaiveDate::parse_from_str(&date, "%Y-%m-%d").is_err() {
return (StatusCode::BAD_REQUEST, "Invalid date format. Use YYYY-MM-DD").into_response();
}
match get_daily_note_impl(&date) {
Ok(note) => Json(note).into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get daily note: {}", err),
)
.into_response(),
}
}
fn get_daily_note_impl(date: &str) -> Result<DailyNote, String> {
let daily_dir = config::data_dir().join("daily");
let note_path = daily_dir.join(format!("{}.md", date));
if !note_path.exists() {
return Err(format!("Daily note not found: {}", date));
}
let content = fs::read_to_string(&note_path).map_err(|e| e.to_string())?;
let (fm, body, _) = frontmatter::parse_frontmatter(&content);
Ok(DailyNote {
id: format!("daily-{}", date),
date: date.to_string(),
path: format!("daily/{}.md", date),
content: body,
frontmatter: fm,
})
}
#[derive(Debug, Deserialize)]
pub struct CreateDailyNoteRequest {
pub content: Option<String>,
}
/// Create a daily note (optionally with content)
async fn create_daily_note(
Path(date): Path<String>,
body: Option<Json<CreateDailyNoteRequest>>,
) -> impl IntoResponse {
// Validate date format
if NaiveDate::parse_from_str(&date, "%Y-%m-%d").is_err() {
return (StatusCode::BAD_REQUEST, "Invalid date format. Use YYYY-MM-DD").into_response();
}
let content = body.and_then(|b| b.content.clone());
match create_daily_note_impl(&date, content.as_deref()) {
Ok(note) => (StatusCode::CREATED, Json(note)).into_response(),
Err(err) if err.contains("already exists") => {
(StatusCode::CONFLICT, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create daily note: {}", err),
)
.into_response(),
}
}
fn create_daily_note_impl(date: &str, initial_content: Option<&str>) -> Result<DailyNote, String> {
let daily_dir = config::data_dir().join("daily");
// Create directory if it doesn't exist
if !daily_dir.exists() {
fs::create_dir_all(&daily_dir).map_err(|e| e.to_string())?;
}
let note_path = daily_dir.join(format!("{}.md", date));
if note_path.exists() {
return Err(format!("Daily note already exists: {}", date));
}
let now = Utc::now().to_rfc3339();
// Parse date for display
let parsed_date = NaiveDate::parse_from_str(date, "%Y-%m-%d")
.map_err(|e| e.to_string())?;
let display_date = parsed_date.format("%A, %B %d, %Y").to_string();
// Create frontmatter
let mut fm = serde_yaml::Mapping::new();
fm.insert(
serde_yaml::Value::from("id"),
serde_yaml::Value::from(format!("daily-{}", date)),
);
fm.insert(
serde_yaml::Value::from("type"),
serde_yaml::Value::from("daily"),
);
fm.insert(
serde_yaml::Value::from("title"),
serde_yaml::Value::from(display_date.clone()),
);
fm.insert(
serde_yaml::Value::from("date"),
serde_yaml::Value::from(date),
);
fm.insert(
serde_yaml::Value::from("created"),
serde_yaml::Value::from(now.clone()),
);
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now),
);
// Use provided content or default template
let body = initial_content
.map(|c| c.to_string())
.unwrap_or_else(|| {
format!(
"# {}\n\n## Today's Focus\n\n- \n\n## Notes\n\n\n\n## Tasks\n\n- [ ] \n",
display_date
)
});
let content = frontmatter::serialize_frontmatter(&fm, &body)?;
filesystem::atomic_write(&note_path, content.as_bytes())?;
Ok(DailyNote {
id: format!("daily-{}", date),
date: date.to_string(),
path: format!("daily/{}.md", date),
content: body,
frontmatter: fm,
})
}
/// Update a daily note's content
async fn update_daily_note(
Path(date): Path<String>,
body: Bytes,
) -> impl IntoResponse {
// Validate date format
if NaiveDate::parse_from_str(&date, "%Y-%m-%d").is_err() {
return (StatusCode::BAD_REQUEST, "Invalid date format. Use YYYY-MM-DD").into_response();
}
let content = String::from_utf8_lossy(&body).to_string();
match update_daily_note_impl(&date, &content) {
Ok(note) => Json(note).into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to update daily note: {}", err),
)
.into_response(),
}
}
fn update_daily_note_impl(date: &str, new_content: &str) -> Result<DailyNote, String> {
let daily_dir = config::data_dir().join("daily");
let note_path = daily_dir.join(format!("{}.md", date));
if !note_path.exists() {
return Err(format!("Daily note not found: {}", date));
}
// Read existing file to preserve frontmatter
let existing_content = fs::read_to_string(&note_path).map_err(|e| e.to_string())?;
let (mut fm, _, _) = frontmatter::parse_frontmatter(&existing_content);
// Update the 'updated' timestamp
let now = Utc::now().to_rfc3339();
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now),
);
// Serialize with updated frontmatter and new content (atomic write)
let file_content = frontmatter::serialize_frontmatter(&fm, new_content)?;
filesystem::atomic_write(&note_path, file_content.as_bytes())?;
Ok(DailyNote {
id: format!("daily-{}", date),
date: date.to_string(),
path: format!("daily/{}.md", date),
content: new_content.to_string(),
frontmatter: fm,
})
}

184
backend/src/routes/git.rs Normal file
View File

@@ -0,0 +1,184 @@
use axum::{
extract::{Path, Query},
http::StatusCode,
response::IntoResponse,
routing::{get, post},
Json, Router,
};
use serde::{Deserialize, Serialize};
use crate::services::git;
pub fn router() -> Router {
Router::new()
.route("/status", get(get_status))
.route("/commit", post(commit))
.route("/init", post(init_repo))
.route("/conflicts", get(get_conflicts))
.route("/push", post(push))
.route("/log", get(get_log))
.route("/diff", get(get_working_diff))
.route("/diff/{commit_id}", get(get_commit_diff))
.route("/remote", get(get_remote))
.route("/fetch", post(fetch))
}
async fn get_status() -> impl IntoResponse {
match git::get_status() {
Ok(status) => Json(status).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get git status: {}", err),
)
.into_response(),
}
}
#[derive(Debug, Deserialize)]
pub struct CommitRequest {
message: Option<String>,
}
async fn commit(Json(payload): Json<CommitRequest>) -> impl IntoResponse {
match git::commit_all(payload.message.as_deref()) {
Ok(info) => (StatusCode::CREATED, Json(info)).into_response(),
Err(err) => (StatusCode::BAD_REQUEST, err).into_response(),
}
}
async fn init_repo() -> impl IntoResponse {
match git::init_repo() {
Ok(_) => StatusCode::OK.into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to init repo: {}", err),
)
.into_response(),
}
}
async fn get_conflicts() -> impl IntoResponse {
match git::check_conflicts() {
Ok(conflicts) => Json(conflicts).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to check conflicts: {}", err),
)
.into_response(),
}
}
#[derive(Debug, Serialize)]
struct PushResponse {
success: bool,
message: String,
}
async fn push() -> impl IntoResponse {
// Check if remote is configured
if !git::has_remote() {
return (
StatusCode::BAD_REQUEST,
Json(PushResponse {
success: false,
message: "No remote repository configured. Add a remote with: git remote add origin <url>".to_string(),
}),
)
.into_response();
}
match git::push_to_remote() {
Ok(()) => (
StatusCode::OK,
Json(PushResponse {
success: true,
message: "Successfully pushed to remote".to_string(),
}),
)
.into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(PushResponse {
success: false,
message: err,
}),
)
.into_response(),
}
}
#[derive(Debug, Deserialize)]
pub struct LogQuery {
limit: Option<usize>,
}
async fn get_log(Query(query): Query<LogQuery>) -> impl IntoResponse {
match git::get_log(query.limit) {
Ok(commits) => Json(commits).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get git log: {}", err),
)
.into_response(),
}
}
async fn get_working_diff() -> impl IntoResponse {
match git::get_working_diff() {
Ok(diff) => Json(diff).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get diff: {}", err),
)
.into_response(),
}
}
async fn get_commit_diff(Path(commit_id): Path<String>) -> impl IntoResponse {
match git::get_commit_diff(&commit_id) {
Ok(diff) => Json(diff).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get commit diff: {}", err),
)
.into_response(),
}
}
async fn get_remote() -> impl IntoResponse {
match git::get_remote_info() {
Ok(info) => Json(info).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get remote info: {}", err),
)
.into_response(),
}
}
#[derive(Debug, Serialize)]
struct FetchResponse {
success: bool,
message: String,
}
async fn fetch() -> impl IntoResponse {
match git::fetch_from_remote() {
Ok(()) => (
StatusCode::OK,
Json(FetchResponse {
success: true,
message: "Successfully fetched from remote".to_string(),
}),
)
.into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
Json(FetchResponse {
success: false,
message: err,
}),
)
.into_response(),
}
}

View File

@@ -0,0 +1,7 @@
pub mod assets;
pub mod daily;
pub mod git;
pub mod notes;
pub mod projects;
pub mod search;
pub mod tasks;

View File

@@ -0,0 +1,82 @@
use axum::{
extract::Path,
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use crate::models::note::{Note, NoteSummary};
use crate::services::filesystem;
pub fn router() -> Router {
Router::new()
.route("/{id}", get(get_note).put(update_note).delete(delete_note))
}
pub async fn list_notes() -> impl IntoResponse {
match filesystem::list_notes() {
Ok(notes) => Json::<Vec<NoteSummary>>(notes).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to list notes: {}", err),
)
.into_response(),
}
}
async fn get_note(Path(id): Path<String>) -> impl IntoResponse {
match filesystem::read_note_by_id(&id) {
Ok(note) => Json::<Note>(note).into_response(),
Err(err) if err.starts_with("Note not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read note: {}", err),
)
.into_response(),
}
}
pub async fn create_note() -> impl IntoResponse {
match filesystem::create_note() {
Ok(note) => (StatusCode::CREATED, Json::<Note>(note)).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create note: {}", err),
)
.into_response(),
}
}
async fn update_note(
Path(id): Path<String>,
body: String,
) -> impl IntoResponse {
match filesystem::update_note(&id, &body) {
Ok(note) => Json::<Note>(note).into_response(),
Err(err) if err.starts_with("Note not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to update note: {}", err),
)
.into_response(),
}
}
async fn delete_note(Path(id): Path<String>) -> impl IntoResponse {
match filesystem::archive_note(&id) {
Ok(_) => StatusCode::NO_CONTENT.into_response(),
Err(err) if err.starts_with("Note not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to archive note: {}", err),
)
.into_response(),
}
}

View File

@@ -0,0 +1,860 @@
use axum::{
extract::Path,
http::StatusCode,
response::IntoResponse,
routing::{get, put},
Json, Router,
};
use serde::{Deserialize, Serialize};
use std::fs;
use crate::routes::tasks::{
CreateTaskRequest, UpdateTaskMetaRequest,
list_project_tasks_handler, create_task_handler, get_task_handler,
update_task_content_handler, toggle_task_handler, update_task_meta_handler,
delete_task_handler,
};
use crate::services::filesystem;
use crate::config;
use crate::services::frontmatter;
#[derive(Debug, Serialize)]
pub struct Project {
pub id: String,
pub name: String,
pub path: String,
pub created: String,
}
#[derive(Debug, Serialize)]
pub struct ProjectWithContent {
pub id: String,
pub name: String,
pub path: String,
pub created: String,
pub content: String,
}
#[derive(Debug, Deserialize)]
pub struct UpdateProjectContentRequest {
pub content: String,
}
#[derive(Debug, Deserialize)]
pub struct CreateProjectRequest {
pub name: String,
}
#[derive(Debug, Serialize)]
pub struct ProjectNote {
pub id: String,
pub title: String,
pub path: String,
pub project_id: String,
pub created: String,
pub updated: String,
}
#[derive(Debug, Serialize)]
pub struct ProjectNoteWithContent {
pub id: String,
pub title: String,
pub path: String,
pub project_id: String,
pub created: String,
pub updated: String,
pub content: String,
}
#[derive(Debug, Deserialize)]
pub struct CreateNoteRequest {
pub title: Option<String>,
}
pub fn router() -> Router {
Router::new()
.route("/", get(list_projects).post(create_project))
.route("/{id}", get(get_project))
.route("/{id}/content", get(get_project_content).put(update_project_content))
// Task routes (file-based)
.route("/{id}/tasks", get(get_project_tasks).post(create_project_task))
.route("/{id}/tasks/{task_id}", get(get_project_task).put(update_project_task).delete(delete_project_task))
.route("/{id}/tasks/{task_id}/toggle", put(toggle_project_task))
.route("/{id}/tasks/{task_id}/meta", put(update_project_task_meta))
// Note routes
.route("/{id}/notes", get(list_project_notes).post(create_project_note))
.route("/{id}/notes/{note_id}", get(get_project_note).put(update_project_note).delete(delete_project_note))
}
// ============ Task Handlers ============
async fn get_project_tasks(Path(id): Path<String>) -> impl IntoResponse {
list_project_tasks_handler(id).await
}
async fn create_project_task(
Path(id): Path<String>,
Json(payload): Json<CreateTaskRequest>,
) -> impl IntoResponse {
create_task_handler(id, payload).await
}
async fn get_project_task(Path((id, task_id)): Path<(String, String)>) -> impl IntoResponse {
get_task_handler(id, task_id).await
}
async fn update_project_task(
Path((id, task_id)): Path<(String, String)>,
body: String,
) -> impl IntoResponse {
update_task_content_handler(id, task_id, body).await
}
async fn toggle_project_task(Path((id, task_id)): Path<(String, String)>) -> impl IntoResponse {
toggle_task_handler(id, task_id).await
}
async fn update_project_task_meta(
Path((id, task_id)): Path<(String, String)>,
Json(payload): Json<UpdateTaskMetaRequest>,
) -> impl IntoResponse {
update_task_meta_handler(id, task_id, payload).await
}
async fn delete_project_task(Path((id, task_id)): Path<(String, String)>) -> impl IntoResponse {
delete_task_handler(id, task_id).await
}
async fn list_projects() -> impl IntoResponse {
match list_projects_impl() {
Ok(projects) => Json(projects).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to list projects: {}", err),
)
.into_response(),
}
}
fn list_projects_impl() -> Result<Vec<Project>, String> {
let projects_dir = config::data_dir().join("projects");
if !projects_dir.exists() {
return Ok(Vec::new());
}
let mut projects = Vec::new();
for entry in fs::read_dir(&projects_dir).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let path = entry.path();
if !path.is_dir() {
continue;
}
let index_path = path.join("index.md");
if !index_path.exists() {
continue;
}
let content = fs::read_to_string(&index_path).map_err(|e| e.to_string())?;
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let id = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
let name = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| id.clone());
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
projects.push(Project {
id: id.clone(),
name,
path: format!("projects/{}", id),
created,
});
}
Ok(projects)
}
async fn get_project(Path(id): Path<String>) -> impl IntoResponse {
let projects_dir = config::data_dir().join("projects").join(&id);
let index_path = projects_dir.join("index.md");
if !index_path.exists() {
return (StatusCode::NOT_FOUND, "Project not found").into_response();
}
match fs::read_to_string(&index_path) {
Ok(content) => {
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let name = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| id.clone());
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
Json(Project {
id: id.clone(),
name,
path: format!("projects/{}", id),
created,
})
.into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read project: {}", err),
)
.into_response(),
}
}
async fn create_project(Json(payload): Json<CreateProjectRequest>) -> impl IntoResponse {
match create_project_impl(&payload.name) {
Ok(project) => (StatusCode::CREATED, Json(project)).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create project: {}", err),
)
.into_response(),
}
}
fn create_project_impl(name: &str) -> Result<Project, String> {
use chrono::Utc;
// Create slug from name
let slug = name
.to_lowercase()
.chars()
.map(|c| if c.is_alphanumeric() { c } else { '-' })
.collect::<String>()
.trim_matches('-')
.to_string();
if slug.is_empty() {
return Err("Invalid project name".to_string());
}
let projects_dir = config::data_dir().join("projects");
let project_dir = projects_dir.join(&slug);
if project_dir.exists() {
return Err("Project already exists".to_string());
}
// Create directories
fs::create_dir_all(&project_dir).map_err(|e| e.to_string())?;
fs::create_dir_all(project_dir.join("assets")).map_err(|e| e.to_string())?;
// Create index.md
let index_path = project_dir.join("index.md");
let now = Utc::now().to_rfc3339();
let mut fm = serde_yaml::Mapping::new();
fm.insert(
serde_yaml::Value::from("id"),
serde_yaml::Value::from(format!("{}-index", slug)),
);
fm.insert(
serde_yaml::Value::from("type"),
serde_yaml::Value::from("project"),
);
fm.insert(
serde_yaml::Value::from("title"),
serde_yaml::Value::from(name),
);
fm.insert(
serde_yaml::Value::from("created"),
serde_yaml::Value::from(now.clone()),
);
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now.clone()),
);
let content = frontmatter::serialize_frontmatter(&fm, &format!("# {}\n\n", name))?;
filesystem::atomic_write(&index_path, content.as_bytes())?;
// Also create notes directory for project-scoped notes
fs::create_dir_all(project_dir.join("notes")).map_err(|e| e.to_string())?;
// Create tasks directory for file-based tasks
fs::create_dir_all(project_dir.join("tasks")).map_err(|e| e.to_string())?;
Ok(Project {
id: slug.clone(),
name: name.to_string(),
path: format!("projects/{}", slug),
created: now,
})
}
async fn get_project_content(Path(id): Path<String>) -> impl IntoResponse {
let index_path = config::data_dir()
.join("projects")
.join(&id)
.join("index.md");
if !index_path.exists() {
return (StatusCode::NOT_FOUND, "Project not found").into_response();
}
match fs::read_to_string(&index_path) {
Ok(content) => {
let (fm, body, _) = frontmatter::parse_frontmatter(&content);
let name = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| id.clone());
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
Json(ProjectWithContent {
id: id.clone(),
name,
path: format!("projects/{}", id),
created,
content: body,
})
.into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read project: {}", err),
)
.into_response(),
}
}
async fn update_project_content(
Path(id): Path<String>,
body: String,
) -> impl IntoResponse {
let index_path = config::data_dir()
.join("projects")
.join(&id)
.join("index.md");
if !index_path.exists() {
return (StatusCode::NOT_FOUND, "Project not found").into_response();
}
// Read existing file to get frontmatter
let existing = match fs::read_to_string(&index_path) {
Ok(content) => content,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read project: {}", err),
)
.into_response();
}
};
let (mut fm, _, _) = frontmatter::parse_frontmatter(&existing);
// Update the timestamp
let now = chrono::Utc::now().to_rfc3339();
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now),
);
// Serialize with new content
let new_content = match frontmatter::serialize_frontmatter(&fm, &body) {
Ok(c) => c,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to serialize: {}", err),
)
.into_response();
}
};
// Write back (atomic to prevent corruption)
if let Err(err) = filesystem::atomic_write(&index_path, new_content.as_bytes()) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to write file: {}", err),
)
.into_response();
}
let name = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| id.clone());
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
Json(ProjectWithContent {
id: id.clone(),
name,
path: format!("projects/{}", id),
created,
content: body,
})
.into_response()
}
// ============ Project Notes Handlers ============
async fn list_project_notes(Path(project_id): Path<String>) -> impl IntoResponse {
let notes_dir = config::data_dir()
.join("projects")
.join(&project_id)
.join("notes");
// Create notes directory if it doesn't exist
if !notes_dir.exists() {
if let Err(e) = fs::create_dir_all(&notes_dir) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create notes directory: {}", e),
)
.into_response();
}
}
let mut notes = Vec::new();
let entries = match fs::read_dir(&notes_dir) {
Ok(e) => e,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read notes directory: {}", err),
)
.into_response();
}
};
for entry in entries {
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
};
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
let content = match fs::read_to_string(&path) {
Ok(c) => c,
Err(_) => continue,
};
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let filename = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
let id = fm
.get(&serde_yaml::Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| filename.clone());
let title = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| filename.clone());
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let updated = fm
.get(&serde_yaml::Value::from("updated"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
notes.push(ProjectNote {
id,
title,
path: format!("projects/{}/notes/{}.md", project_id, filename),
project_id: project_id.clone(),
created,
updated,
});
}
// Sort by updated date descending
// Sort by created date (stable ordering - won't change when note is viewed/edited)
notes.sort_by(|a, b| b.created.cmp(&a.created));
Json(notes).into_response()
}
async fn create_project_note(
Path(project_id): Path<String>,
Json(payload): Json<CreateNoteRequest>,
) -> impl IntoResponse {
use chrono::Utc;
let notes_dir = config::data_dir()
.join("projects")
.join(&project_id)
.join("notes");
// Create notes directory if it doesn't exist
if let Err(e) = fs::create_dir_all(&notes_dir) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create notes directory: {}", e),
)
.into_response();
}
// Generate filename from timestamp
let now = Utc::now();
let filename = now.format("%Y%m%d-%H%M%S").to_string();
let note_path = notes_dir.join(format!("{}.md", filename));
let title = payload.title.unwrap_or_else(|| "Untitled".to_string());
let now_str = now.to_rfc3339();
let mut fm = serde_yaml::Mapping::new();
fm.insert(
serde_yaml::Value::from("id"),
serde_yaml::Value::from(format!("{}-{}", project_id, filename)),
);
fm.insert(
serde_yaml::Value::from("type"),
serde_yaml::Value::from("note"),
);
fm.insert(
serde_yaml::Value::from("title"),
serde_yaml::Value::from(title.clone()),
);
fm.insert(
serde_yaml::Value::from("project_id"),
serde_yaml::Value::from(project_id.clone()),
);
fm.insert(
serde_yaml::Value::from("created"),
serde_yaml::Value::from(now_str.clone()),
);
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now_str.clone()),
);
let body = format!("# {}\n\n", title);
let content = match frontmatter::serialize_frontmatter(&fm, &body) {
Ok(c) => c,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to serialize frontmatter: {}", err),
)
.into_response();
}
};
if let Err(err) = filesystem::atomic_write(&note_path, content.as_bytes()) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to write note file: {}", err),
)
.into_response();
}
(
StatusCode::CREATED,
Json(ProjectNoteWithContent {
id: format!("{}-{}", project_id, filename),
title,
path: format!("projects/{}/notes/{}.md", project_id, filename),
project_id,
created: now_str.clone(),
updated: now_str,
content: body,
}),
)
.into_response()
}
async fn get_project_note(Path((project_id, note_id)): Path<(String, String)>) -> impl IntoResponse {
let notes_dir = config::data_dir()
.join("projects")
.join(&project_id)
.join("notes");
// Try to find the note by ID (which might be the filename)
let note_path = notes_dir.join(format!("{}.md", note_id));
if !note_path.exists() {
// Try to find by searching all notes for matching ID
if let Ok(entries) = fs::read_dir(&notes_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if let Ok(content) = fs::read_to_string(&path) {
let (fm, body, _) = frontmatter::parse_frontmatter(&content);
let file_id = fm
.get(&serde_yaml::Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from);
if file_id.as_deref() == Some(&note_id) {
let title = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let updated = fm
.get(&serde_yaml::Value::from("updated"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let filename = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
return Json(ProjectNoteWithContent {
id: note_id,
title,
path: format!("projects/{}/notes/{}.md", project_id, filename),
project_id,
created,
updated,
content: body,
})
.into_response();
}
}
}
}
return (StatusCode::NOT_FOUND, "Note not found").into_response();
}
let content = match fs::read_to_string(&note_path) {
Ok(c) => c,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read note: {}", err),
)
.into_response();
}
};
let (fm, body, _) = frontmatter::parse_frontmatter(&content);
let id = fm
.get(&serde_yaml::Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| note_id.clone());
let title = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let updated = fm
.get(&serde_yaml::Value::from("updated"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
Json(ProjectNoteWithContent {
id,
title,
path: format!("projects/{}/notes/{}.md", project_id, note_id),
project_id,
created,
updated,
content: body,
})
.into_response()
}
async fn update_project_note(
Path((project_id, note_id)): Path<(String, String)>,
body: String,
) -> impl IntoResponse {
let notes_dir = config::data_dir()
.join("projects")
.join(&project_id)
.join("notes");
let note_path = notes_dir.join(format!("{}.md", note_id));
if !note_path.exists() {
return (StatusCode::NOT_FOUND, "Note not found").into_response();
}
// Read existing content for frontmatter
let existing = match fs::read_to_string(&note_path) {
Ok(c) => c,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to read note: {}", err),
)
.into_response();
}
};
let (mut fm, _, _) = frontmatter::parse_frontmatter(&existing);
// Update timestamp
let now = chrono::Utc::now().to_rfc3339();
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now.clone()),
);
// Serialize with new content
let new_content = match frontmatter::serialize_frontmatter(&fm, &body) {
Ok(c) => c,
Err(err) => {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to serialize: {}", err),
)
.into_response();
}
};
if let Err(err) = filesystem::atomic_write(&note_path, new_content.as_bytes()) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to write file: {}", err),
)
.into_response();
}
let id = fm
.get(&serde_yaml::Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| note_id.clone());
let title = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
let created = fm
.get(&serde_yaml::Value::from("created"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_default();
Json(ProjectNoteWithContent {
id,
title,
path: format!("projects/{}/notes/{}.md", project_id, note_id),
project_id,
created,
updated: now,
content: body,
})
.into_response()
}
async fn delete_project_note(
Path((project_id, note_id)): Path<(String, String)>,
) -> impl IntoResponse {
let notes_dir = config::data_dir()
.join("projects")
.join(&project_id)
.join("notes");
let note_path = notes_dir.join(format!("{}.md", note_id));
if !note_path.exists() {
return (StatusCode::NOT_FOUND, "Note not found").into_response();
}
// Move to archive instead of deleting
let archive_dir = config::data_dir().join("archive");
if let Err(e) = fs::create_dir_all(&archive_dir) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create archive directory: {}", e),
)
.into_response();
}
let archive_path = archive_dir.join(format!("{}-{}.md", project_id, note_id));
if let Err(err) = fs::rename(&note_path, &archive_path) {
return (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to archive note: {}", err),
)
.into_response();
}
StatusCode::NO_CONTENT.into_response()
}

View File

@@ -0,0 +1,30 @@
use axum::{
extract::Query,
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use serde::Deserialize;
use crate::services::search;
#[derive(Debug, Deserialize)]
pub struct SearchQuery {
q: String,
}
pub fn router() -> Router {
Router::new().route("/", get(search_notes))
}
async fn search_notes(Query(params): Query<SearchQuery>) -> impl IntoResponse {
match search::search_notes(&params.q) {
Ok(results) => Json(results).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Search failed: {}", err),
)
.into_response(),
}
}

835
backend/src/routes/tasks.rs Normal file
View File

@@ -0,0 +1,835 @@
use axum::{
http::StatusCode,
response::IntoResponse,
routing::get,
Json, Router,
};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path as StdPath;
use crate::services::filesystem;
use crate::config;
use crate::services::frontmatter;
/// Task summary for list views
#[derive(Debug, Clone, Serialize)]
pub struct Task {
pub id: String,
pub title: String,
pub completed: bool,
pub section: String,
pub priority: Option<String>,
pub due_date: Option<String>,
pub is_active: bool,
pub tags: Vec<String>,
pub parent_id: Option<String>,
pub recurrence: Option<String>,
pub recurrence_interval: Option<u32>,
pub project_id: String,
pub path: String,
pub created: String,
pub updated: String,
}
/// Task with full content for detail view
#[derive(Debug, Clone, Serialize)]
pub struct TaskWithContent {
pub id: String,
pub title: String,
pub completed: bool,
pub section: String,
pub priority: Option<String>,
pub due_date: Option<String>,
pub is_active: bool,
pub tags: Vec<String>,
pub parent_id: Option<String>,
pub recurrence: Option<String>,
pub recurrence_interval: Option<u32>,
pub project_id: String,
pub path: String,
pub created: String,
pub updated: String,
pub content: String,
}
#[derive(Debug, Deserialize)]
pub struct CreateTaskRequest {
pub title: String,
pub section: Option<String>,
pub parent_id: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct UpdateTaskMetaRequest {
pub title: Option<String>,
pub section: Option<String>,
pub priority: Option<String>,
pub due_date: Option<String>,
pub is_active: Option<bool>,
pub tags: Option<Vec<String>>,
pub recurrence: Option<String>,
pub recurrence_interval: Option<u32>,
}
pub fn router() -> Router {
Router::new()
.route("/", get(list_all_tasks_handler))
}
// ============ Handler Functions (called from projects.rs) ============
/// List all tasks for a project
pub async fn list_project_tasks_handler(project_id: String) -> impl IntoResponse {
match list_project_tasks_impl(&project_id) {
Ok(tasks) => Json(tasks).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to list tasks: {}", err),
)
.into_response(),
}
}
/// Create a new task
pub async fn create_task_handler(
project_id: String,
payload: CreateTaskRequest,
) -> impl IntoResponse {
match create_task_impl(&project_id, &payload.title, payload.section.as_deref(), payload.parent_id.as_deref()) {
Ok(task) => (StatusCode::CREATED, Json(task)).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to create task: {}", err),
)
.into_response(),
}
}
/// Get a task with content
pub async fn get_task_handler(project_id: String, task_id: String) -> impl IntoResponse {
match get_task_impl(&project_id, &task_id) {
Ok(task) => Json(task).into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to get task: {}", err),
)
.into_response(),
}
}
/// Update task content (markdown body)
pub async fn update_task_content_handler(
project_id: String,
task_id: String,
body: String,
) -> impl IntoResponse {
match update_task_content_impl(&project_id, &task_id, &body) {
Ok(task) => Json(task).into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to update task: {}", err),
)
.into_response(),
}
}
/// Toggle task completion
pub async fn toggle_task_handler(project_id: String, task_id: String) -> impl IntoResponse {
match toggle_task_impl(&project_id, &task_id) {
Ok(task) => Json(task).into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to toggle task: {}", err),
)
.into_response(),
}
}
/// Update task metadata (title, section, priority)
pub async fn update_task_meta_handler(
project_id: String,
task_id: String,
payload: UpdateTaskMetaRequest,
) -> impl IntoResponse {
match update_task_meta_impl(&project_id, &task_id, payload) {
Ok(task) => Json(task).into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to update task metadata: {}", err),
)
.into_response(),
}
}
/// Delete (archive) a task
pub async fn delete_task_handler(project_id: String, task_id: String) -> impl IntoResponse {
match delete_task_impl(&project_id, &task_id) {
Ok(()) => StatusCode::NO_CONTENT.into_response(),
Err(err) if err.contains("not found") => {
(StatusCode::NOT_FOUND, err).into_response()
}
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to delete task: {}", err),
)
.into_response(),
}
}
// ============ Implementation Functions ============
fn get_tasks_dir(project_id: &str) -> std::path::PathBuf {
config::data_dir()
.join("projects")
.join(project_id)
.join("tasks")
}
fn ensure_tasks_dir(project_id: &str) -> Result<std::path::PathBuf, String> {
let tasks_dir = get_tasks_dir(project_id);
if !tasks_dir.exists() {
fs::create_dir_all(&tasks_dir).map_err(|e| e.to_string())?;
}
Ok(tasks_dir)
}
fn list_project_tasks_impl(project_id: &str) -> Result<Vec<Task>, String> {
let tasks_dir = ensure_tasks_dir(project_id)?;
let mut tasks = Vec::new();
let entries = match fs::read_dir(&tasks_dir) {
Ok(e) => e,
Err(_) => return Ok(Vec::new()), // No tasks directory yet
};
for entry in entries {
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
};
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
let content = match fs::read_to_string(&path) {
Ok(c) => c,
Err(_) => continue,
};
if let Some(task) = parse_task_file(&content, &path, project_id) {
tasks.push(task);
}
}
// Sort by updated date descending (most recent first)
// Sort by created date (stable ordering - won't change when task is viewed/edited)
tasks.sort_by(|a, b| b.created.cmp(&a.created));
Ok(tasks)
}
/// Shared helper: extract common task fields from frontmatter.
/// Eliminates duplication between parse_task_file and parse_task_with_content.
fn extract_task_fields(fm: &serde_yaml::Mapping, path: &StdPath, project_id: &str) -> Task {
let filename = path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
Task {
id: frontmatter::get_str_or(fm, "id", &filename),
title: frontmatter::get_str_or(fm, "title", "Untitled"),
completed: frontmatter::get_bool_or(fm, "completed", false),
section: frontmatter::get_str_or(fm, "section", "Active"),
priority: frontmatter::get_str(fm, "priority"),
due_date: frontmatter::get_str(fm, "due_date"),
is_active: frontmatter::get_bool_or(fm, "is_active", true),
tags: frontmatter::get_string_seq(fm, "tags"),
parent_id: frontmatter::get_str(fm, "parent_id"),
recurrence: frontmatter::get_str(fm, "recurrence"),
recurrence_interval: frontmatter::get_u64(fm, "recurrence_interval").map(|v| v as u32),
project_id: project_id.to_string(),
path: format!("projects/{}/tasks/{}.md", project_id, filename),
created: frontmatter::get_str_or(fm, "created", ""),
updated: frontmatter::get_str_or(fm, "updated", ""),
}
}
fn parse_task_file(content: &str, path: &StdPath, project_id: &str) -> Option<Task> {
let (fm, _, _) = frontmatter::parse_frontmatter(content);
Some(extract_task_fields(&fm, path, project_id))
}
fn create_task_impl(
project_id: &str,
title: &str,
section: Option<&str>,
parent_id: Option<&str>,
) -> Result<TaskWithContent, String> {
use chrono::Utc;
let tasks_dir = ensure_tasks_dir(project_id)?;
// Generate filename from timestamp
let now = Utc::now();
let filename = format!("task-{}", now.format("%Y%m%d-%H%M%S"));
let task_path = tasks_dir.join(format!("{}.md", filename));
let section = section.unwrap_or("Active").to_string();
let now_str = now.to_rfc3339();
let id = format!("{}-{}", project_id, filename);
let mut fm = serde_yaml::Mapping::new();
fm.insert(
serde_yaml::Value::from("id"),
serde_yaml::Value::from(id.clone()),
);
fm.insert(
serde_yaml::Value::from("type"),
serde_yaml::Value::from("task"),
);
fm.insert(
serde_yaml::Value::from("title"),
serde_yaml::Value::from(title),
);
fm.insert(
serde_yaml::Value::from("completed"),
serde_yaml::Value::from(false),
);
fm.insert(
serde_yaml::Value::from("section"),
serde_yaml::Value::from(section.clone()),
);
fm.insert(
serde_yaml::Value::from("priority"),
serde_yaml::Value::from("normal"),
);
fm.insert(
serde_yaml::Value::from("is_active"),
serde_yaml::Value::from(true),
);
fm.insert(
serde_yaml::Value::from("project_id"),
serde_yaml::Value::from(project_id),
);
if let Some(pid) = parent_id {
fm.insert(
serde_yaml::Value::from("parent_id"),
serde_yaml::Value::from(pid),
);
}
fm.insert(
serde_yaml::Value::from("created"),
serde_yaml::Value::from(now_str.clone()),
);
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now_str.clone()),
);
let body = format!("# {}\n\n", title);
let content = frontmatter::serialize_frontmatter(&fm, &body)?;
filesystem::atomic_write(&task_path, content.as_bytes())?;
Ok(TaskWithContent {
id,
title: title.to_string(),
completed: false,
section,
priority: Some("normal".to_string()),
due_date: None,
is_active: true,
tags: Vec::new(),
parent_id: parent_id.map(String::from),
recurrence: None,
recurrence_interval: None,
project_id: project_id.to_string(),
path: format!("projects/{}/tasks/{}.md", project_id, filename),
created: now_str.clone(),
updated: now_str,
content: body,
})
}
fn get_task_impl(project_id: &str, task_id: &str) -> Result<TaskWithContent, String> {
let tasks_dir = get_tasks_dir(project_id);
// Try direct filename match first
let task_path = tasks_dir.join(format!("{}.md", task_id));
if task_path.exists() {
return read_task_with_content(&task_path, project_id);
}
// Search by ID in frontmatter
if let Ok(entries) = fs::read_dir(&tasks_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if let Ok(content) = fs::read_to_string(&path) {
let (fm, body, _) = frontmatter::parse_frontmatter(&content);
let file_id = fm
.get(&serde_yaml::Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from);
if file_id.as_deref() == Some(task_id) {
return parse_task_with_content(&fm, &body, &path, project_id);
}
}
}
}
Err("Task not found".to_string())
}
fn read_task_with_content(path: &StdPath, project_id: &str) -> Result<TaskWithContent, String> {
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, body, _) = frontmatter::parse_frontmatter(&content);
parse_task_with_content(&fm, &body, path, project_id)
}
fn parse_task_with_content(
fm: &serde_yaml::Mapping,
body: &str,
path: &StdPath,
project_id: &str,
) -> Result<TaskWithContent, String> {
let task = extract_task_fields(fm, path, project_id);
Ok(TaskWithContent {
id: task.id,
title: task.title,
completed: task.completed,
section: task.section,
priority: task.priority,
due_date: task.due_date,
is_active: task.is_active,
tags: task.tags,
parent_id: task.parent_id,
recurrence: task.recurrence,
recurrence_interval: task.recurrence_interval,
project_id: task.project_id,
path: task.path,
created: task.created,
updated: task.updated,
content: body.to_string(),
})
}
fn update_task_content_impl(
project_id: &str,
task_id: &str,
new_body: &str,
) -> Result<TaskWithContent, String> {
let task_path = find_task_path(project_id, task_id)?;
// Read existing content
let existing = fs::read_to_string(&task_path).map_err(|e| e.to_string())?;
let (mut fm, _, _) = frontmatter::parse_frontmatter(&existing);
// Update timestamp
let now = chrono::Utc::now().to_rfc3339();
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now),
);
// Serialize with new content (atomic write to prevent corruption)
let new_content = frontmatter::serialize_frontmatter(&fm, new_body)?;
filesystem::atomic_write(&task_path, new_content.as_bytes())?;
parse_task_with_content(&fm, new_body, &task_path, project_id)
}
fn toggle_task_impl(project_id: &str, task_id: &str) -> Result<Task, String> {
let task_path = find_task_path(project_id, task_id)?;
// Read existing content
let existing = fs::read_to_string(&task_path).map_err(|e| e.to_string())?;
let (mut fm, body, _) = frontmatter::parse_frontmatter(&existing);
// Toggle completed
let current_completed = fm
.get(&serde_yaml::Value::from("completed"))
.and_then(|v| v.as_bool())
.unwrap_or(false);
let new_completed = !current_completed;
fm.insert(
serde_yaml::Value::from("completed"),
serde_yaml::Value::from(new_completed),
);
// Update section based on completion status
let new_section = if new_completed {
"Completed"
} else {
"Active"
};
fm.insert(
serde_yaml::Value::from("section"),
serde_yaml::Value::from(new_section),
);
// Update timestamp
let now = chrono::Utc::now().to_rfc3339();
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now),
);
// Serialize and write (atomic to prevent corruption)
let new_content = frontmatter::serialize_frontmatter(&fm, &body)?;
filesystem::atomic_write(&task_path, new_content.as_bytes())?;
// If completing a recurring task, create the next instance
if new_completed {
let recurrence = fm
.get(&serde_yaml::Value::from("recurrence"))
.and_then(|v| v.as_str())
.map(String::from);
if let Some(rec) = recurrence {
let interval = fm
.get(&serde_yaml::Value::from("recurrence_interval"))
.and_then(|v| v.as_u64())
.unwrap_or(1) as i64;
let title = fm
.get(&serde_yaml::Value::from("title"))
.and_then(|v| v.as_str())
.unwrap_or("Untitled")
.to_string();
let due_date = fm
.get(&serde_yaml::Value::from("due_date"))
.and_then(|v| v.as_str())
.map(String::from);
let tags = fm
.get(&serde_yaml::Value::from("tags"))
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect::<Vec<_>>()
})
.unwrap_or_default();
// Calculate next due date
let next_due = calculate_next_due_date(due_date.as_deref(), &rec, interval);
// Create the next recurring task
let _ = create_recurring_task_impl(
project_id,
&title,
next_due.as_deref(),
&rec,
interval as u32,
&tags,
);
}
}
// Return updated task
let task = parse_task_file(&fs::read_to_string(&task_path).unwrap(), &task_path, project_id)
.ok_or_else(|| "Failed to parse updated task".to_string())?;
Ok(task)
}
fn calculate_next_due_date(current_due: Option<&str>, recurrence: &str, interval: i64) -> Option<String> {
use chrono::{NaiveDate, Duration, Utc, Months};
let base_date = if let Some(due_str) = current_due {
NaiveDate::parse_from_str(due_str, "%Y-%m-%d").unwrap_or_else(|_| Utc::now().date_naive())
} else {
Utc::now().date_naive()
};
let next = match recurrence {
"daily" => Some(base_date + Duration::days(interval)),
"weekly" => Some(base_date + Duration::weeks(interval)),
"monthly" => base_date.checked_add_months(Months::new(interval as u32)),
"yearly" => base_date.checked_add_months(Months::new((interval * 12) as u32)),
_ => None,
};
next.map(|d| d.format("%Y-%m-%d").to_string())
}
fn create_recurring_task_impl(
project_id: &str,
title: &str,
due_date: Option<&str>,
recurrence: &str,
interval: u32,
tags: &[String],
) -> Result<TaskWithContent, String> {
use chrono::Utc;
let tasks_dir = ensure_tasks_dir(project_id)?;
let now = Utc::now();
// Add a small suffix to avoid filename collision with completed task
let filename = format!("task-{}-r", now.format("%Y%m%d-%H%M%S"));
let task_path = tasks_dir.join(format!("{}.md", filename));
let now_str = now.to_rfc3339();
let id = format!("{}-{}", project_id, filename);
let mut fm = serde_yaml::Mapping::new();
fm.insert(serde_yaml::Value::from("id"), serde_yaml::Value::from(id.clone()));
fm.insert(serde_yaml::Value::from("type"), serde_yaml::Value::from("task"));
fm.insert(serde_yaml::Value::from("title"), serde_yaml::Value::from(title));
fm.insert(serde_yaml::Value::from("completed"), serde_yaml::Value::from(false));
fm.insert(serde_yaml::Value::from("section"), serde_yaml::Value::from("Active"));
fm.insert(serde_yaml::Value::from("priority"), serde_yaml::Value::from("normal"));
fm.insert(serde_yaml::Value::from("is_active"), serde_yaml::Value::from(true));
fm.insert(serde_yaml::Value::from("project_id"), serde_yaml::Value::from(project_id));
fm.insert(serde_yaml::Value::from("recurrence"), serde_yaml::Value::from(recurrence));
fm.insert(serde_yaml::Value::from("recurrence_interval"), serde_yaml::Value::from(interval as u64));
if let Some(due) = due_date {
fm.insert(serde_yaml::Value::from("due_date"), serde_yaml::Value::from(due));
}
if !tags.is_empty() {
let yaml_tags: Vec<serde_yaml::Value> = tags.iter().map(|t| serde_yaml::Value::from(t.as_str())).collect();
fm.insert(serde_yaml::Value::from("tags"), serde_yaml::Value::Sequence(yaml_tags));
}
fm.insert(serde_yaml::Value::from("created"), serde_yaml::Value::from(now_str.clone()));
fm.insert(serde_yaml::Value::from("updated"), serde_yaml::Value::from(now_str.clone()));
let body = format!("# {}\n\n", title);
let content = frontmatter::serialize_frontmatter(&fm, &body)?;
filesystem::atomic_write(&task_path, content.as_bytes())?;
Ok(TaskWithContent {
id,
title: title.to_string(),
completed: false,
section: "Active".to_string(),
priority: Some("normal".to_string()),
due_date: due_date.map(String::from),
is_active: true,
tags: tags.to_vec(),
parent_id: None,
recurrence: Some(recurrence.to_string()),
recurrence_interval: Some(interval),
project_id: project_id.to_string(),
path: format!("projects/{}/tasks/{}.md", project_id, filename),
created: now_str.clone(),
updated: now_str,
content: body,
})
}
fn update_task_meta_impl(
project_id: &str,
task_id: &str,
meta: UpdateTaskMetaRequest,
) -> Result<Task, String> {
let task_path = find_task_path(project_id, task_id)?;
// Read existing content
let existing = fs::read_to_string(&task_path).map_err(|e| e.to_string())?;
let (mut fm, body, _) = frontmatter::parse_frontmatter(&existing);
// Update fields if provided
if let Some(title) = meta.title {
fm.insert(
serde_yaml::Value::from("title"),
serde_yaml::Value::from(title),
);
}
if let Some(section) = meta.section {
fm.insert(
serde_yaml::Value::from("section"),
serde_yaml::Value::from(section),
);
}
if let Some(priority) = meta.priority {
fm.insert(
serde_yaml::Value::from("priority"),
serde_yaml::Value::from(priority),
);
}
if let Some(due_date) = meta.due_date {
fm.insert(
serde_yaml::Value::from("due_date"),
serde_yaml::Value::from(due_date),
);
}
if let Some(is_active) = meta.is_active {
fm.insert(
serde_yaml::Value::from("is_active"),
serde_yaml::Value::from(is_active),
);
}
if let Some(tags) = meta.tags {
let yaml_tags: Vec<serde_yaml::Value> =
tags.into_iter().map(serde_yaml::Value::from).collect();
fm.insert(
serde_yaml::Value::from("tags"),
serde_yaml::Value::Sequence(yaml_tags),
);
}
if let Some(recurrence) = meta.recurrence {
if recurrence.is_empty() {
fm.remove(&serde_yaml::Value::from("recurrence"));
fm.remove(&serde_yaml::Value::from("recurrence_interval"));
} else {
fm.insert(
serde_yaml::Value::from("recurrence"),
serde_yaml::Value::from(recurrence),
);
}
}
if let Some(interval) = meta.recurrence_interval {
fm.insert(
serde_yaml::Value::from("recurrence_interval"),
serde_yaml::Value::from(interval as u64),
);
}
// Update timestamp
let now = chrono::Utc::now().to_rfc3339();
fm.insert(
serde_yaml::Value::from("updated"),
serde_yaml::Value::from(now),
);
// Serialize and write (atomic to prevent corruption)
let new_content = frontmatter::serialize_frontmatter(&fm, &body)?;
filesystem::atomic_write(&task_path, new_content.as_bytes())?;
// Return updated task
let task = parse_task_file(&fs::read_to_string(&task_path).unwrap(), &task_path, project_id)
.ok_or_else(|| "Failed to parse updated task".to_string())?;
Ok(task)
}
fn delete_task_impl(project_id: &str, task_id: &str) -> Result<(), String> {
let task_path = find_task_path(project_id, task_id)?;
// Move to archive
let archive_dir = config::data_dir().join("archive");
fs::create_dir_all(&archive_dir).map_err(|e| e.to_string())?;
let filename = task_path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("task.md");
let archive_path = archive_dir.join(format!("{}-{}", project_id, filename));
fs::rename(&task_path, &archive_path).map_err(|e| e.to_string())?;
Ok(())
}
fn find_task_path(project_id: &str, task_id: &str) -> Result<std::path::PathBuf, String> {
let tasks_dir = get_tasks_dir(project_id);
// Try direct filename match
let direct_path = tasks_dir.join(format!("{}.md", task_id));
if direct_path.exists() {
return Ok(direct_path);
}
// Search by ID in frontmatter
if let Ok(entries) = fs::read_dir(&tasks_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if let Ok(content) = fs::read_to_string(&path) {
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let file_id = fm
.get(&serde_yaml::Value::from("id"))
.and_then(|v| v.as_str());
if file_id == Some(task_id) {
return Ok(path);
}
}
}
}
Err("Task not found".to_string())
}
// ============ Legacy/Global Task Listing ============
async fn list_all_tasks_handler() -> impl IntoResponse {
match list_all_tasks_impl() {
Ok(tasks) => Json(tasks).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to list tasks: {}", err),
)
.into_response(),
}
}
fn list_all_tasks_impl() -> Result<Vec<Task>, String> {
let projects_dir = config::data_dir().join("projects");
if !projects_dir.exists() {
return Ok(Vec::new());
}
let mut all_tasks = Vec::new();
for entry in fs::read_dir(&projects_dir).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let project_path = entry.path();
if !project_path.is_dir() {
continue;
}
let project_id = project_path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
if let Ok(tasks) = list_project_tasks_impl(&project_id) {
all_tasks.extend(tasks);
}
}
// Sort all tasks by updated date descending
// Sort by created date (stable ordering)
all_tasks.sort_by(|a, b| b.created.cmp(&a.created));
Ok(all_tasks)
}

View File

@@ -0,0 +1,349 @@
use std::fs;
use std::io::Write;
use std::path::Path;
use serde_yaml::Value;
use walkdir::WalkDir;
use crate::models::note::{Note, NoteSummary};
use crate::services::frontmatter;
use crate::config;
/// List all notes in the filesystem (read-only).
pub fn list_notes() -> Result<Vec<NoteSummary>, String> {
let mut notes = Vec::new();
let root = config::data_dir();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
// Only include notes and project index files
if !is_note_file(path) {
continue;
}
match parse_note_summary(path) {
Ok(note) => notes.push(note),
Err(err) => {
tracing::warn!("Skipping file {:?}: {}", path, err);
}
}
}
Ok(notes)
}
fn is_ignored(path: &Path) -> bool {
path.components().any(|c| {
matches!(
c.as_os_str().to_str(),
Some(".git") | Some("assets") | Some("archive")
)
})
}
fn is_note_file(path: &Path) -> bool {
let path_str = path.to_string_lossy();
// data/notes/**/*.md (handles both forward and back slashes)
if path_str.contains("notes") && !path_str.contains("archive") {
return true;
}
// data/projects/*/index.md
if path_str.contains("projects") && path.file_name().and_then(|s| s.to_str()) == Some("index.md") {
return true;
}
// Root-level files (index.md, inbox.md) - parent is the data dir
if let Some(parent) = path.parent() {
if parent == config::data_dir() {
return true;
}
}
false
}
fn parse_note_summary(path: &Path) -> Result<NoteSummary, String> {
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, _body, _has_fm) = frontmatter::parse_frontmatter(&content);
let id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
let title = fm
.get(&Value::from("title"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| {
path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled")
.to_string()
});
let note_type = fm
.get(&Value::from("type"))
.and_then(|v| v.as_str())
.unwrap_or("note")
.to_string();
let updated = fm
.get(&Value::from("updated"))
.and_then(|v| v.as_str())
.map(String::from);
Ok(NoteSummary {
id,
title,
path: normalize_path(path),
note_type,
updated,
})
}
pub fn normalize_path(path: &Path) -> String {
// Strip the data directory prefix and normalize separators
let path_str = path.to_string_lossy();
let stripped = if let Some(idx) = path_str.find("data") {
&path_str[idx + 5..] // Skip "data" + separator
} else {
&path_str
};
stripped.replace('\\', "/").trim_start_matches('/').to_string()
}
/// Read a full note by deterministic ID.
pub fn read_note_by_id(note_id: &str) -> Result<Note, String> {
let root = config::data_dir();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if !is_note_file(path) {
continue;
}
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, body, _has_fm) = frontmatter::parse_frontmatter(&content);
let derived_id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
if derived_id != note_id {
continue;
}
let note_type = fm
.get(&Value::from("type"))
.and_then(|v| v.as_str())
.unwrap_or("note")
.to_string();
return Ok(Note {
id: derived_id,
path: normalize_path(path),
note_type,
frontmatter: fm,
content: body.trim_start().to_string(),
});
}
Err(format!("Note not found: {}", note_id))
}
/// Create a new empty note in data/notes/.
pub fn create_note() -> Result<Note, String> {
use chrono::Utc;
let dir = config::data_dir().join("notes");
fs::create_dir_all(&dir).map_err(|e| e.to_string())?;
let filename = format!("{}.md", Utc::now().format("%Y%m%d-%H%M%S"));
let path = dir.join(&filename);
let fm = frontmatter::generate_frontmatter(&path, "note");
let content = frontmatter::serialize_frontmatter(&fm, "")?;
// Atomic write: write to temp file, then rename
atomic_write(&path, content.as_bytes())?;
let id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
Ok(Note {
id,
path: normalize_path(&path),
note_type: "note".to_string(),
frontmatter: fm,
content: String::new(),
})
}
/// Update an existing note by ID with full markdown payload.
/// Handles notes with or without existing frontmatter.
/// Preserves user-defined fields, updates backend-owned fields.
pub fn update_note(note_id: &str, new_content: &str) -> Result<Note, String> {
let root = config::data_dir();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if !is_note_file(path) {
continue;
}
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (mut fm, _old_body, has_fm) = frontmatter::parse_frontmatter(&content);
let derived_id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
if derived_id != note_id {
continue;
}
// Ensure frontmatter has all required fields
// This handles files without frontmatter or with incomplete frontmatter
if !has_fm || !frontmatter::is_frontmatter_complete(&fm) {
frontmatter::ensure_frontmatter(&mut fm, path);
} else {
// Just update the timestamp
frontmatter::update_frontmatter(&mut fm);
}
// Rebuild file content
let rebuilt = frontmatter::serialize_frontmatter(&fm, new_content.trim_start())?;
// Atomic write
atomic_write(path, rebuilt.as_bytes())?;
let note_type = fm
.get(&Value::from("type"))
.and_then(|v| v.as_str())
.unwrap_or("note")
.to_string();
return Ok(Note {
id: derived_id,
path: normalize_path(path),
note_type,
frontmatter: fm,
content: new_content.to_string(),
});
}
Err(format!("Note not found: {}", note_id))
}
/// Archive a note by ID (move to data/archive/).
pub fn archive_note(note_id: &str) -> Result<(), String> {
let root = config::data_dir();
let archive_dir = config::data_dir().join("archive");
fs::create_dir_all(&archive_dir).map_err(|e| e.to_string())?;
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
if !is_note_file(path) {
continue;
}
let content = fs::read_to_string(path).map_err(|e| e.to_string())?;
let (fm, _, _) = frontmatter::parse_frontmatter(&content);
let derived_id = fm
.get(&Value::from("id"))
.and_then(|v| v.as_str())
.map(String::from)
.unwrap_or_else(|| frontmatter::derive_id_from_path(path));
if derived_id != note_id {
continue;
}
let filename = path.file_name().ok_or("Invalid filename")?;
let target = archive_dir.join(filename);
fs::rename(path, target).map_err(|e| e.to_string())?;
return Ok(());
}
Err(format!("Note not found: {}", note_id))
}
/// Atomic write: write to temp file, then rename.
/// This prevents data loss on crash or power failure.
/// Also marks the file as recently saved to avoid triggering external edit notifications.
pub fn atomic_write(path: &Path, contents: &[u8]) -> Result<(), String> {
let parent = path.parent().ok_or("Invalid path")?;
let temp_name = format!(
".{}.tmp",
path.file_name()
.and_then(|s| s.to_str())
.unwrap_or("file")
);
let temp_path = parent.join(temp_name);
// Mark this file as being saved by us (to avoid triggering external edit notification)
let normalized = normalize_path(path);
crate::watcher::mark_file_saved(&normalized);
// Write to temp file
let mut file = fs::File::create(&temp_path).map_err(|e| e.to_string())?;
file.write_all(contents).map_err(|e| e.to_string())?;
file.sync_all().map_err(|e| e.to_string())?;
drop(file);
// Rename temp file to target (atomic on most filesystems)
fs::rename(&temp_path, path).map_err(|e| e.to_string())?;
Ok(())
}

View File

@@ -0,0 +1,191 @@
use std::path::Path;
use chrono::Utc;
use serde_yaml::{Mapping, Value};
/// Derive deterministic ID from file path.
/// Matches filesystem ID logic: strips data directory prefix and folder name.
pub fn derive_id_from_path(path: &Path) -> String {
let path_str = path.to_string_lossy();
// Find "data" in the path and strip everything before and including it
let rel_str = if let Some(idx) = path_str.find("data") {
&path_str[idx + 5..] // Skip "data" + separator
} else {
&path_str
};
// Split by both forward and back slashes, filter empty parts
let mut parts: Vec<String> = rel_str
.split(['/', '\\'])
.filter(|s| !s.is_empty())
.map(|s| s.replace(".md", ""))
.collect();
// Drop top-level folder name (notes, projects, etc.) if we have multiple parts
if parts.len() > 1 {
parts.remove(0);
}
parts.join("-")
}
/// Parse frontmatter from file content.
/// Returns (frontmatter mapping, body content, has_frontmatter flag).
pub fn parse_frontmatter(content: &str) -> (Mapping, String, bool) {
if !content.starts_with("---") {
return (Mapping::new(), content.to_string(), false);
}
let mut parts = content.splitn(3, "---");
parts.next(); // empty before first ---
let yaml = parts.next().unwrap_or("");
let body = parts.next().unwrap_or("");
let fm: Value = serde_yaml::from_str(yaml).unwrap_or(Value::Null);
let map = fm.as_mapping().cloned().unwrap_or_default();
(map, body.to_string(), true)
}
/// Serialize frontmatter and body back to markdown string.
pub fn serialize_frontmatter(frontmatter: &Mapping, body: &str) -> Result<String, String> {
let yaml = serde_yaml::to_string(frontmatter).map_err(|e| e.to_string())?;
let mut content = String::new();
content.push_str("---\n");
content.push_str(&yaml);
content.push_str("---\n\n");
content.push_str(body.trim_start());
Ok(content)
}
/// Generate initial frontmatter for a newly created file.
/// Sets backend-owned fields only.
pub fn generate_frontmatter(path: &Path, note_type: &str) -> Mapping {
let mut map = Mapping::new();
let id = derive_id_from_path(path);
let now = Utc::now().to_rfc3339();
map.insert(Value::from("id"), Value::from(id));
map.insert(Value::from("type"), Value::from(note_type));
map.insert(Value::from("created"), Value::from(now.clone()));
map.insert(Value::from("updated"), Value::from(now));
map
}
/// Ensure frontmatter has all required backend-owned fields.
/// - If `id` is missing, derive from path
/// - If `created` is missing, set to now
/// - Always updates `updated` timestamp
/// - Preserves all user-defined fields (title, tags, status, etc.)
pub fn ensure_frontmatter(existing: &mut Mapping, path: &Path) {
let now = Utc::now().to_rfc3339();
// Ensure ID exists (derive from path if missing)
if !existing.contains_key(&Value::from("id")) {
let id = derive_id_from_path(path);
existing.insert(Value::from("id"), Value::from(id));
}
// Ensure created timestamp exists (set once, never overwritten)
if !existing.contains_key(&Value::from("created")) {
existing.insert(Value::from("created"), Value::from(now.clone()));
}
// Always update the updated timestamp
existing.insert(Value::from("updated"), Value::from(now));
}
/// Update frontmatter on save.
/// Only updates `updated` timestamp, preserves all other fields.
pub fn update_frontmatter(existing: &mut Mapping) {
let now = Utc::now().to_rfc3339();
existing.insert(Value::from("updated"), Value::from(now));
}
/// Check if frontmatter has all required backend-owned fields.
pub fn is_frontmatter_complete(frontmatter: &Mapping) -> bool {
frontmatter.contains_key(&Value::from("id"))
&& frontmatter.contains_key(&Value::from("created"))
&& frontmatter.contains_key(&Value::from("updated"))
}
// ============ Helper functions for cleaner frontmatter field access ============
/// Get a string value from frontmatter by key.
pub fn get_str(fm: &Mapping, key: &str) -> Option<String> {
fm.get(&Value::from(key))
.and_then(|v| v.as_str())
.map(String::from)
}
/// Get a string value from frontmatter, with a default fallback.
pub fn get_str_or(fm: &Mapping, key: &str, default: &str) -> String {
get_str(fm, key).unwrap_or_else(|| default.to_string())
}
/// Get a bool value from frontmatter by key.
pub fn get_bool(fm: &Mapping, key: &str) -> Option<bool> {
fm.get(&Value::from(key)).and_then(|v| v.as_bool())
}
/// Get a bool value from frontmatter, with a default fallback.
pub fn get_bool_or(fm: &Mapping, key: &str, default: bool) -> bool {
get_bool(fm, key).unwrap_or(default)
}
/// Get a u64 value from frontmatter by key.
pub fn get_u64(fm: &Mapping, key: &str) -> Option<u64> {
fm.get(&Value::from(key)).and_then(|v| v.as_u64())
}
/// Get a string sequence (tags, etc.) from frontmatter by key.
pub fn get_string_seq(fm: &Mapping, key: &str) -> Vec<String> {
fm.get(&Value::from(key))
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_frontmatter_with_frontmatter() {
let content = "---\nid: test\ntitle: Test Note\n---\n\nBody content";
let (fm, body, has_fm) = parse_frontmatter(content);
assert!(has_fm);
assert_eq!(fm.get(&Value::from("id")).unwrap().as_str().unwrap(), "test");
assert_eq!(fm.get(&Value::from("title")).unwrap().as_str().unwrap(), "Test Note");
assert!(body.contains("Body content"));
}
#[test]
fn test_parse_frontmatter_without_frontmatter() {
let content = "Just some content without frontmatter";
let (fm, body, has_fm) = parse_frontmatter(content);
assert!(!has_fm);
assert!(fm.is_empty());
assert_eq!(body, content);
}
#[test]
fn test_derive_id_from_path() {
let path = Path::new("data/notes/my-note.md");
assert_eq!(derive_id_from_path(path), "my-note");
let path = Path::new("data/projects/myproject/index.md");
assert_eq!(derive_id_from_path(path), "myproject-index");
}
}

655
backend/src/services/git.rs Normal file
View File

@@ -0,0 +1,655 @@
use std::time::Duration;
use chrono::Utc;
use git2::{Repository, Signature, StatusOptions};
use serde::Serialize;
use tokio::time::interval;
use crate::config;
/// Git status for a file
#[derive(Debug, Clone, Serialize)]
pub struct FileStatus {
pub path: String,
pub status: String, // "new", "modified", "deleted", "renamed", "untracked"
}
/// Overall repository status
#[derive(Debug, Serialize)]
pub struct RepoStatus {
pub is_repo: bool,
pub branch: Option<String>,
pub files: Vec<FileStatus>,
pub has_changes: bool,
pub last_commit: Option<CommitInfo>,
}
/// Commit information
#[derive(Debug, Clone, Serialize)]
pub struct CommitInfo {
pub id: String,
pub message: String,
pub timestamp: String,
}
/// Extended commit info for history
#[derive(Debug, Serialize)]
pub struct CommitDetail {
pub id: String,
pub short_id: String,
pub message: String,
pub author: String,
pub timestamp: String,
pub files_changed: usize,
}
/// Diff information
#[derive(Debug, Serialize)]
pub struct DiffInfo {
pub files: Vec<FileDiff>,
pub stats: DiffStats,
}
/// File diff
#[derive(Debug, Serialize)]
pub struct FileDiff {
pub path: String,
pub status: String,
pub additions: usize,
pub deletions: usize,
pub hunks: Vec<DiffHunk>,
}
/// Diff hunk (section of changes)
#[derive(Debug, Serialize)]
pub struct DiffHunk {
pub header: String,
pub lines: Vec<DiffLine>,
}
/// Single diff line
#[derive(Debug, Serialize)]
pub struct DiffLine {
pub origin: char,
pub content: String,
}
/// Diff statistics
#[derive(Debug, Serialize)]
pub struct DiffStats {
pub files_changed: usize,
pub insertions: usize,
pub deletions: usize,
}
/// Remote repository information
#[derive(Debug, Serialize)]
pub struct RemoteInfo {
pub name: String,
pub url: String,
pub has_upstream: bool,
pub ahead: usize,
pub behind: usize,
}
/// Auto-commit is enabled by default.
/// The background task simply tries to commit every interval;
/// commit_all() already handles "no changes" gracefully.
/// Get repository status
pub fn get_status() -> Result<RepoStatus, String> {
let data_path = config::data_dir();
// Try to open as git repo
let repo = match Repository::open(data_path) {
Ok(r) => r,
Err(_) => {
return Ok(RepoStatus {
is_repo: false,
branch: None,
files: Vec::new(),
has_changes: false,
last_commit: None,
});
}
};
// Get current branch
let branch = repo
.head()
.ok()
.and_then(|h| h.shorthand().map(String::from));
// Get file statuses
let mut opts = StatusOptions::new();
opts.include_untracked(true)
.recurse_untracked_dirs(true)
.exclude_submodules(true);
let statuses = repo.statuses(Some(&mut opts)).map_err(|e| e.to_string())?;
let files: Vec<FileStatus> = statuses
.iter()
.filter_map(|entry| {
let path = entry.path()?.to_string();
let status = entry.status();
let status_str = if status.is_index_new() || status.is_wt_new() {
"new"
} else if status.is_index_modified() || status.is_wt_modified() {
"modified"
} else if status.is_index_deleted() || status.is_wt_deleted() {
"deleted"
} else if status.is_index_renamed() || status.is_wt_renamed() {
"renamed"
} else {
return None;
};
Some(FileStatus {
path,
status: status_str.to_string(),
})
})
.collect();
let has_changes = !files.is_empty();
// Get last commit info
let last_commit = repo.head().ok().and_then(|head| {
let commit = head.peel_to_commit().ok()?;
Some(CommitInfo {
id: commit.id().to_string()[..8].to_string(),
message: commit.message()?.trim().to_string(),
timestamp: chrono::DateTime::from_timestamp(commit.time().seconds(), 0)?
.to_rfc3339(),
})
});
Ok(RepoStatus {
is_repo: true,
branch,
files,
has_changes,
last_commit,
})
}
/// Create a commit with all changes
pub fn commit_all(message: Option<&str>) -> Result<CommitInfo, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
// Stage all changes
let mut index = repo.index().map_err(|e| e.to_string())?;
index
.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)
.map_err(|e| e.to_string())?;
index.write().map_err(|e| e.to_string())?;
// Check if there are changes to commit
let tree_id = index.write_tree().map_err(|e| e.to_string())?;
let tree = repo.find_tree(tree_id).map_err(|e| e.to_string())?;
// Get parent commit (if any)
let parent = repo.head().ok().and_then(|h| h.peel_to_commit().ok());
// Check if tree is different from parent
if let Some(ref p) = parent {
if p.tree().map(|t| t.id()) == Ok(tree_id) {
return Err("No changes to commit".to_string());
}
}
// Create signature
let sig = Signature::now("Ironpad", "ironpad@local").map_err(|e| e.to_string())?;
// Generate commit message
let msg = message.unwrap_or_else(|| "Auto-save");
let timestamp = Utc::now().format("%Y-%m-%d %H:%M");
let full_message = format!("{} ({})", msg, timestamp);
// Create commit
let parents: Vec<&git2::Commit> = parent.as_ref().map(|p| vec![p]).unwrap_or_default();
let commit_id = repo
.commit(Some("HEAD"), &sig, &sig, &full_message, &tree, &parents)
.map_err(|e| e.to_string())?;
Ok(CommitInfo {
id: commit_id.to_string()[..8].to_string(),
message: full_message,
timestamp: Utc::now().to_rfc3339(),
})
}
/// Initialize data directory as a git repository if not already
pub fn init_repo() -> Result<(), String> {
let data_path = config::data_dir();
if Repository::open(data_path).is_ok() {
return Ok(()); // Already a repo
}
Repository::init(data_path).map_err(|e| format!("Failed to init repo: {}", e))?;
// Create initial .gitignore
let gitignore_path = data_path.join(".gitignore");
if !gitignore_path.exists() {
std::fs::write(&gitignore_path, "*.tmp\n.DS_Store\n")
.map_err(|e| format!("Failed to create .gitignore: {}", e))?;
}
// Initial commit
commit_all(Some("Initial commit"))?;
Ok(())
}
/// Check for merge conflicts
pub fn check_conflicts() -> Result<Vec<String>, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let mut conflicts = Vec::new();
// Check for .git/index.lock (another git operation in progress)
let lock_path = data_path.join(".git").join("index.lock");
if lock_path.exists() {
// This isn't a conflict per se, but indicates git is busy
tracing::warn!("Git index.lock exists - another operation may be in progress");
}
// Check status for conflicted files
let mut opts = StatusOptions::new();
opts.include_untracked(false);
let statuses = repo.statuses(Some(&mut opts)).map_err(|e| e.to_string())?;
for entry in statuses.iter() {
let status = entry.status();
// Check for conflict status flags
if status.is_conflicted() {
if let Some(path) = entry.path() {
conflicts.push(path.to_string());
}
}
}
// Also check the index for conflicts
let index = repo.index().map_err(|e| e.to_string())?;
if index.has_conflicts() {
for conflict in index.conflicts().map_err(|e| e.to_string())? {
if let Ok(conflict) = conflict {
if let Some(ancestor) = conflict.ancestor {
if let Some(path) = std::str::from_utf8(&ancestor.path).ok() {
if !conflicts.contains(&path.to_string()) {
conflicts.push(path.to_string());
}
}
}
}
}
}
Ok(conflicts)
}
/// Push to remote repository
pub fn push_to_remote() -> Result<(), String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
// Get the current branch
let head = repo.head().map_err(|e| e.to_string())?;
let branch_name = head
.shorthand()
.ok_or_else(|| "Could not get branch name".to_string())?;
// Find the remote (default to "origin")
let mut remote = repo
.find_remote("origin")
.map_err(|e| format!("Remote 'origin' not found: {}", e))?;
// Check if remote URL is configured
let remote_url = remote.url().ok_or_else(|| "No remote URL configured".to_string())?;
if remote_url.is_empty() {
return Err("No remote URL configured".to_string());
}
// Create callbacks for authentication
let mut callbacks = git2::RemoteCallbacks::new();
// Try to use credential helper from git config
callbacks.credentials(|_url, username_from_url, _allowed_types| {
// Try SSH agent first
git2::Cred::ssh_key_from_agent(username_from_url.unwrap_or("git"))
});
// Set up push options
let mut push_options = git2::PushOptions::new();
push_options.remote_callbacks(callbacks);
// Push the current branch
let refspec = format!("refs/heads/{}:refs/heads/{}", branch_name, branch_name);
remote
.push(&[&refspec], Some(&mut push_options))
.map_err(|e| format!("Push failed: {}. Make sure SSH keys are configured.", e))?;
tracing::info!("Successfully pushed to origin/{}", branch_name);
Ok(())
}
/// Check if remote is configured
pub fn has_remote() -> bool {
let data_path = config::data_dir();
if let Ok(repo) = Repository::open(data_path) {
if let Ok(remote) = repo.find_remote("origin") {
return remote.url().is_some();
}
}
false
}
/// Start auto-commit background task.
/// Tries to commit every 60 seconds; commit_all() already handles "no changes" gracefully.
pub fn start_auto_commit() {
tokio::spawn(async move {
let mut interval = interval(Duration::from_secs(60));
loop {
interval.tick().await;
match commit_all(Some("Auto-save")) {
Ok(info) => {
tracing::info!("Auto-commit: {} - {}", info.id, info.message);
}
Err(e) => {
if !e.contains("No changes") {
tracing::warn!("Auto-commit failed: {}", e);
}
}
}
}
});
}
/// Get commit history (most recent first)
pub fn get_log(limit: Option<usize>) -> Result<Vec<CommitDetail>, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let mut revwalk = repo.revwalk().map_err(|e| e.to_string())?;
revwalk.push_head().map_err(|e| e.to_string())?;
revwalk
.set_sorting(git2::Sort::TIME)
.map_err(|e| e.to_string())?;
let max_commits = limit.unwrap_or(50);
let mut commits = Vec::new();
for (i, oid_result) in revwalk.enumerate() {
if i >= max_commits {
break;
}
let oid = oid_result.map_err(|e| e.to_string())?;
let commit = repo.find_commit(oid).map_err(|e| e.to_string())?;
// Count files changed in this commit
let files_changed = if commit.parent_count() > 0 {
let parent = commit.parent(0).ok();
let parent_tree = parent.as_ref().and_then(|p| p.tree().ok());
let commit_tree = commit.tree().ok();
if let (Some(pt), Some(ct)) = (parent_tree, commit_tree) {
let diff = repo
.diff_tree_to_tree(Some(&pt), Some(&ct), None)
.ok();
diff.map(|d| d.deltas().count()).unwrap_or(0)
} else {
0
}
} else {
// Initial commit - count all files
commit
.tree()
.ok()
.map(|t| count_tree_entries(&t))
.unwrap_or(0)
};
let timestamp =
chrono::DateTime::from_timestamp(commit.time().seconds(), 0)
.map(|dt| dt.to_rfc3339())
.unwrap_or_else(|| "Unknown".to_string());
commits.push(CommitDetail {
id: oid.to_string(),
short_id: oid.to_string()[..8].to_string(),
message: commit.message().unwrap_or("").trim().to_string(),
author: commit.author().name().unwrap_or("Unknown").to_string(),
timestamp,
files_changed,
});
}
Ok(commits)
}
/// Helper to count entries in a tree recursively
fn count_tree_entries(tree: &git2::Tree) -> usize {
tree.iter()
.filter(|entry| entry.kind() == Some(git2::ObjectType::Blob))
.count()
}
/// Get working directory diff (uncommitted changes)
pub fn get_working_diff() -> Result<DiffInfo, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
// Get HEAD tree (or empty tree if no commits)
let head_tree = repo
.head()
.ok()
.and_then(|h| h.peel_to_tree().ok());
// Diff against working directory
let diff = repo
.diff_tree_to_workdir_with_index(head_tree.as_ref(), None)
.map_err(|e| e.to_string())?;
parse_diff(&diff)
}
/// Get diff for a specific commit
pub fn get_commit_diff(commit_id: &str) -> Result<DiffInfo, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let oid = git2::Oid::from_str(commit_id).map_err(|e| format!("Invalid commit ID: {}", e))?;
let commit = repo
.find_commit(oid)
.map_err(|e| format!("Commit not found: {}", e))?;
let commit_tree = commit.tree().map_err(|e| e.to_string())?;
let parent_tree = if commit.parent_count() > 0 {
commit.parent(0).ok().and_then(|p| p.tree().ok())
} else {
None
};
let diff = repo
.diff_tree_to_tree(parent_tree.as_ref(), Some(&commit_tree), None)
.map_err(|e| e.to_string())?;
parse_diff(&diff)
}
/// Parse a git2::Diff into our DiffInfo structure
fn parse_diff(diff: &git2::Diff) -> Result<DiffInfo, String> {
let stats = diff.stats().map_err(|e| e.to_string())?;
let mut files = Vec::new();
for delta_idx in 0..diff.deltas().count() {
let delta = diff.get_delta(delta_idx).ok_or("Missing delta")?;
let path = delta
.new_file()
.path()
.or_else(|| delta.old_file().path())
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| "Unknown".to_string());
let status = match delta.status() {
git2::Delta::Added => "added",
git2::Delta::Deleted => "deleted",
git2::Delta::Modified => "modified",
git2::Delta::Renamed => "renamed",
git2::Delta::Copied => "copied",
_ => "unknown",
};
let mut hunks = Vec::new();
let mut additions = 0;
let mut deletions = 0;
// Get patch for this file
if let Ok(patch) = git2::Patch::from_diff(diff, delta_idx) {
if let Some(p) = patch {
for hunk_idx in 0..p.num_hunks() {
if let Ok((hunk, _)) = p.hunk(hunk_idx) {
let mut lines = Vec::new();
for line_idx in 0..p.num_lines_in_hunk(hunk_idx).unwrap_or(0) {
if let Ok(line) = p.line_in_hunk(hunk_idx, line_idx) {
let origin = line.origin();
let content = std::str::from_utf8(line.content())
.unwrap_or("")
.to_string();
match origin {
'+' => additions += 1,
'-' => deletions += 1,
_ => {}
}
lines.push(DiffLine { origin, content });
}
}
hunks.push(DiffHunk {
header: std::str::from_utf8(hunk.header())
.unwrap_or("")
.trim()
.to_string(),
lines,
});
}
}
}
}
files.push(FileDiff {
path,
status: status.to_string(),
additions,
deletions,
hunks,
});
}
Ok(DiffInfo {
files,
stats: DiffStats {
files_changed: stats.files_changed(),
insertions: stats.insertions(),
deletions: stats.deletions(),
},
})
}
/// Get remote repository information
pub fn get_remote_info() -> Result<Option<RemoteInfo>, String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let remote = match repo.find_remote("origin") {
Ok(r) => r,
Err(_) => return Ok(None),
};
let url = remote.url().unwrap_or("").to_string();
if url.is_empty() {
return Ok(None);
}
// Get current branch
let head = match repo.head() {
Ok(h) => h,
Err(_) => {
return Ok(Some(RemoteInfo {
name: "origin".to_string(),
url,
has_upstream: false,
ahead: 0,
behind: 0,
}));
}
};
let branch_name = head.shorthand().unwrap_or("HEAD");
// Try to find upstream branch
let local_branch = repo.find_branch(branch_name, git2::BranchType::Local).ok();
let upstream = local_branch.as_ref().and_then(|b| b.upstream().ok());
let (ahead, behind) = if let Some(ref up) = upstream {
// Calculate ahead/behind
let local_oid = head.target().unwrap_or_else(git2::Oid::zero);
let upstream_oid = up
.get()
.target()
.unwrap_or_else(git2::Oid::zero);
repo.graph_ahead_behind(local_oid, upstream_oid)
.unwrap_or((0, 0))
} else {
(0, 0)
};
Ok(Some(RemoteInfo {
name: "origin".to_string(),
url,
has_upstream: upstream.is_some(),
ahead,
behind,
}))
}
/// Fetch from remote
pub fn fetch_from_remote() -> Result<(), String> {
let data_path = config::data_dir();
let repo = Repository::open(data_path).map_err(|e| format!("Not a git repository: {}", e))?;
let mut remote = repo
.find_remote("origin")
.map_err(|e| format!("Remote 'origin' not found: {}", e))?;
// Create callbacks for authentication
let mut callbacks = git2::RemoteCallbacks::new();
callbacks.credentials(|_url, username_from_url, _allowed_types| {
git2::Cred::ssh_key_from_agent(username_from_url.unwrap_or("git"))
});
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
remote
.fetch(&[] as &[&str], Some(&mut fetch_options), None)
.map_err(|e| format!("Fetch failed: {}", e))?;
Ok(())
}

View File

@@ -0,0 +1,149 @@
use std::collections::HashMap;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::RwLock;
/// Type of lock held on a file
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LockType {
Editor,
TaskView,
}
/// Information about a file lock
#[derive(Debug, Clone, Serialize)]
pub struct LockInfo {
pub path: String,
pub client_id: String,
pub lock_type: LockType,
pub acquired_at: DateTime<Utc>,
}
/// Error type for lock operations
#[derive(Debug, Clone, Serialize)]
pub enum LockError {
AlreadyLocked { holder: String, lock_type: LockType },
NotLocked,
NotOwner,
}
impl std::fmt::Display for LockError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LockError::AlreadyLocked { holder, lock_type } => {
write!(f, "File already locked by {} ({:?})", holder, lock_type)
}
LockError::NotLocked => write!(f, "File is not locked"),
LockError::NotOwner => write!(f, "You do not own this lock"),
}
}
}
/// Manages file locks across the application
#[derive(Debug, Clone)]
pub struct FileLockManager {
locks: Arc<RwLock<HashMap<String, LockInfo>>>,
}
impl FileLockManager {
pub fn new() -> Self {
Self {
locks: Arc::new(RwLock::new(HashMap::new())),
}
}
/// Attempt to acquire a lock on a file
pub async fn acquire(
&self,
path: &str,
client_id: &str,
lock_type: LockType,
) -> Result<LockInfo, LockError> {
let mut locks = self.locks.write().await;
// Check if already locked
if let Some(existing) = locks.get(path) {
if existing.client_id != client_id {
return Err(LockError::AlreadyLocked {
holder: existing.client_id.clone(),
lock_type: existing.lock_type,
});
}
// Same client - update lock type
}
let lock_info = LockInfo {
path: path.to_string(),
client_id: client_id.to_string(),
lock_type,
acquired_at: Utc::now(),
};
locks.insert(path.to_string(), lock_info.clone());
Ok(lock_info)
}
/// Release a lock on a file
pub async fn release(&self, path: &str, client_id: &str) -> Result<(), LockError> {
let mut locks = self.locks.write().await;
if let Some(existing) = locks.get(path) {
if existing.client_id != client_id {
return Err(LockError::NotOwner);
}
locks.remove(path);
Ok(())
} else {
Err(LockError::NotLocked)
}
}
/// Check if a file is locked
pub async fn is_locked(&self, path: &str) -> Option<LockInfo> {
let locks = self.locks.read().await;
locks.get(path).cloned()
}
/// Check if a file is locked by someone other than the given client
pub async fn is_locked_by_other(&self, path: &str, client_id: &str) -> Option<LockInfo> {
let locks = self.locks.read().await;
locks.get(path).and_then(|lock| {
if lock.client_id != client_id {
Some(lock.clone())
} else {
None
}
})
}
/// Release all locks held by a client (used on disconnect)
pub async fn release_all_for_client(&self, client_id: &str) -> Vec<String> {
let mut locks = self.locks.write().await;
let paths_to_remove: Vec<String> = locks
.iter()
.filter(|(_, lock)| lock.client_id == client_id)
.map(|(path, _)| path.clone())
.collect();
for path in &paths_to_remove {
locks.remove(path);
}
paths_to_remove
}
/// Get all current locks (for debugging/monitoring)
pub async fn get_all_locks(&self) -> Vec<LockInfo> {
let locks = self.locks.read().await;
locks.values().cloned().collect()
}
}
impl Default for FileLockManager {
fn default() -> Self {
Self::new()
}
}

View File

View File

@@ -0,0 +1,6 @@
pub mod filesystem;
pub mod frontmatter;
pub mod git;
pub mod locks;
pub mod markdown;
pub mod search;

View File

@@ -0,0 +1,188 @@
use std::fs;
use std::path::Path;
use std::process::{Command, Stdio};
use serde::Serialize;
use walkdir::WalkDir;
use crate::config;
/// Search result item
#[derive(Debug, Serialize)]
pub struct SearchResult {
pub path: String,
pub title: String,
pub matches: Vec<SearchMatch>,
}
/// Individual match within a file
#[derive(Debug, Serialize)]
pub struct SearchMatch {
pub line_number: u32,
pub line_content: String,
}
/// Search notes using simple string matching
/// Falls back to manual search if ripgrep is not available
pub fn search_notes(query: &str) -> Result<Vec<SearchResult>, String> {
if query.trim().is_empty() {
return Ok(Vec::new());
}
// Try ripgrep first (faster)
match search_with_ripgrep(query) {
Ok(results) => return Ok(results),
Err(e) => {
tracing::debug!("ripgrep not available, falling back to manual search: {}", e);
}
}
// Fallback to manual search
search_manual(query)
}
/// Search using ripgrep (rg)
fn search_with_ripgrep(query: &str) -> Result<Vec<SearchResult>, String> {
let data_dir_str = config::data_dir().to_string_lossy();
let output = Command::new("rg")
.args([
"--json", // JSON output for parsing
"--ignore-case", // Case insensitive
"--type", "md", // Only markdown files
"--max-count", "5", // Max 5 matches per file
query,
&data_dir_str,
])
.stdout(Stdio::piped())
.stderr(Stdio::null())
.output()
.map_err(|e| format!("Failed to run ripgrep: {}", e))?;
if !output.status.success() && output.stdout.is_empty() {
// No matches found or error
return Ok(Vec::new());
}
parse_ripgrep_output(&output.stdout)
}
/// Parse ripgrep JSON output
fn parse_ripgrep_output(output: &[u8]) -> Result<Vec<SearchResult>, String> {
use std::collections::HashMap;
let output_str = String::from_utf8_lossy(output);
let mut results_map: HashMap<String, SearchResult> = HashMap::new();
for line in output_str.lines() {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(line) {
if json["type"] == "match" {
let data = &json["data"];
let path_str = data["path"]["text"].as_str().unwrap_or("");
let line_number = data["line_number"].as_u64().unwrap_or(0) as u32;
let line_content = data["lines"]["text"]
.as_str()
.unwrap_or("")
.trim()
.to_string();
let normalized_path = normalize_path(path_str);
let title = extract_title_from_path(&normalized_path);
let result = results_map.entry(normalized_path.clone()).or_insert_with(|| {
SearchResult {
path: normalized_path,
title,
matches: Vec::new(),
}
});
result.matches.push(SearchMatch {
line_number,
line_content,
});
}
}
}
Ok(results_map.into_values().collect())
}
/// Manual search fallback (no external dependencies)
fn search_manual(query: &str) -> Result<Vec<SearchResult>, String> {
let query_lower = query.to_lowercase();
let root = config::data_dir();
let mut results = Vec::new();
for entry in WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_ignored(e.path()))
.filter_map(Result::ok)
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => continue,
};
let mut matches = Vec::new();
for (i, line) in content.lines().enumerate() {
if line.to_lowercase().contains(&query_lower) {
matches.push(SearchMatch {
line_number: (i + 1) as u32,
line_content: line.trim().to_string(),
});
// Limit matches per file
if matches.len() >= 5 {
break;
}
}
}
if !matches.is_empty() {
let normalized_path = normalize_path(&path.to_string_lossy());
let title = extract_title_from_path(&normalized_path);
results.push(SearchResult {
path: normalized_path,
title,
matches,
});
}
}
Ok(results)
}
fn is_ignored(path: &Path) -> bool {
path.components().any(|c| {
matches!(
c.as_os_str().to_str(),
Some(".git") | Some("assets") | Some("archive")
)
})
}
fn normalize_path(path: &str) -> String {
if let Some(idx) = path.find("data") {
let stripped = &path[idx + 5..];
return stripped
.replace('\\', "/")
.trim_start_matches('/')
.to_string();
}
path.replace('\\', "/")
}
fn extract_title_from_path(path: &str) -> String {
Path::new(path)
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("Untitled")
.to_string()
}

161
backend/src/watcher.rs Normal file
View File

@@ -0,0 +1,161 @@
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use notify::{RecursiveMode, Watcher};
use notify_debouncer_full::{new_debouncer, DebouncedEvent};
use tokio::sync::mpsc;
use crate::config;
use crate::websocket::{WsMessage, WsState};
/// Start the file watcher in a background task
pub async fn start_watcher(ws_state: Arc<WsState>) -> Result<(), String> {
let (tx, mut rx) = mpsc::channel::<Vec<DebouncedEvent>>(100);
// Create debouncer with 500ms debounce time
let debouncer = new_debouncer(
Duration::from_millis(500),
None,
move |result: Result<Vec<DebouncedEvent>, Vec<notify::Error>>| {
if let Ok(events) = result {
let _ = tx.blocking_send(events);
}
},
)
.map_err(|e| format!("Failed to create file watcher: {}", e))?;
// Watch the data directory
let data_path = config::data_dir();
if !data_path.exists() {
return Err(format!("Data directory does not exist: {}", data_path.display()));
}
// We need to keep the debouncer alive, so we'll store it
let debouncer = Arc::new(tokio::sync::Mutex::new(debouncer));
{
let mut d = debouncer.lock().await;
d.watcher().watch(data_path, RecursiveMode::Recursive)
.map_err(|e| format!("Failed to watch directory: {}", e))?;
}
tracing::info!("File watcher started for: {}", data_path.display());
// Spawn task to process file events
let ws_state_clone = ws_state.clone();
tokio::spawn(async move {
// Keep debouncer alive
let _debouncer = debouncer;
while let Some(events) = rx.recv().await {
for event in events {
process_event(&event, &ws_state_clone);
}
}
});
Ok(())
}
/// Track recent saves to avoid notifying about our own changes
use std::sync::Mutex;
use std::collections::HashMap;
use std::time::Instant;
lazy_static::lazy_static! {
static ref RECENT_SAVES: Mutex<HashMap<String, Instant>> = Mutex::new(HashMap::new());
}
/// Mark a file as recently saved by us (call this before saving)
pub fn mark_file_saved(path: &str) {
if let Ok(mut saves) = RECENT_SAVES.lock() {
saves.insert(path.to_string(), Instant::now());
}
}
/// Process a single debounced file event
fn process_event(event: &DebouncedEvent, ws_state: &WsState) {
use notify::EventKind;
// Only process markdown files
let paths: Vec<_> = event
.paths
.iter()
.filter(|p| {
p.extension()
.and_then(|e| e.to_str())
.map(|e| e == "md")
.unwrap_or(false)
})
.collect();
if paths.is_empty() {
return;
}
// Skip temporary files (used for atomic writes)
if paths.iter().any(|p| {
p.file_name()
.and_then(|n| n.to_str())
.map(|n| n.starts_with('.') && n.ends_with(".tmp"))
.unwrap_or(false)
}) {
return;
}
// Skip archive and .git directories
if paths.iter().any(|p| {
let s = p.to_string_lossy();
s.contains("archive") || s.contains(".git")
}) {
return;
}
let path_str = normalize_path(&paths[0]);
// Check if this was a recent save by us (within last 2 seconds)
if let Ok(mut saves) = RECENT_SAVES.lock() {
// Clean up old entries
saves.retain(|_, t| t.elapsed().as_secs() < 5);
if let Some(saved_at) = saves.get(&path_str) {
if saved_at.elapsed().as_secs() < 2 {
return; // Skip - this was our own save
}
}
}
let msg = match &event.kind {
EventKind::Create(_) => {
tracing::info!("External file created: {}", path_str);
Some(WsMessage::FileCreated { path: path_str })
}
EventKind::Modify(_) => {
tracing::info!("External file modified: {}", path_str);
Some(WsMessage::FileModified { path: path_str })
}
EventKind::Remove(_) => {
tracing::info!("External file deleted: {}", path_str);
Some(WsMessage::FileDeleted { path: path_str })
}
_ => None,
};
if let Some(msg) = msg {
ws_state.broadcast(msg);
}
}
/// Normalize path for client consumption
fn normalize_path(path: &Path) -> String {
let path_str = path.to_string_lossy();
// Find "data" in the path and strip everything before and including it
if let Some(idx) = path_str.find("data") {
let stripped = &path_str[idx + 5..]; // Skip "data" + separator
return stripped.replace('\\', "/").trim_start_matches('/').to_string();
}
path_str.replace('\\', "/")
}

230
backend/src/websocket.rs Normal file
View File

@@ -0,0 +1,230 @@
use std::collections::HashSet;
use std::sync::Arc;
use axum::{
extract::{
ws::{Message, WebSocket},
State, WebSocketUpgrade,
},
response::IntoResponse,
};
use futures_util::{SinkExt, StreamExt};
use serde::{Deserialize, Serialize};
use tokio::sync::{broadcast, RwLock};
use crate::services::locks::{FileLockManager, LockType};
/// WebSocket message types sent to clients
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", content = "payload")]
pub enum WsMessage {
/// A file was created
FileCreated { path: String },
/// A file was modified
FileModified { path: String },
/// A file was deleted
FileDeleted { path: String },
/// A file was renamed
FileRenamed { from: String, to: String },
/// A file was locked
FileLocked {
path: String,
client_id: String,
lock_type: String,
},
/// A file was unlocked
FileUnlocked { path: String },
/// Git conflict detected
GitConflict { files: Vec<String> },
/// Server is sending a ping
Ping,
/// Client connection confirmed
Connected { client_id: String },
/// Error message
Error { message: String },
}
/// Client message types received from clients
#[derive(Debug, Clone, Deserialize)]
#[serde(tag = "type")]
pub enum ClientMessage {
/// Request to lock a file
#[serde(rename = "lock_file")]
LockFile { path: String, lock_type: String },
/// Request to unlock a file
#[serde(rename = "unlock_file")]
UnlockFile { path: String },
/// Ping response
#[serde(rename = "pong")]
Pong,
}
/// Shared state for WebSocket connections
#[derive(Debug, Clone)]
pub struct WsState {
/// Broadcast channel for sending messages to all clients
pub tx: broadcast::Sender<WsMessage>,
/// Set of connected client IDs
pub clients: Arc<RwLock<HashSet<String>>>,
/// File lock manager
pub lock_manager: FileLockManager,
}
impl WsState {
pub fn new() -> Self {
let (tx, _) = broadcast::channel(100);
Self {
tx,
clients: Arc::new(RwLock::new(HashSet::new())),
lock_manager: FileLockManager::new(),
}
}
/// Broadcast a message to all connected clients
pub fn broadcast(&self, msg: WsMessage) {
// Ignore send errors (no receivers)
let _ = self.tx.send(msg);
}
}
impl Default for WsState {
fn default() -> Self {
Self::new()
}
}
/// WebSocket upgrade handler
pub async fn ws_handler(
ws: WebSocketUpgrade,
State(state): State<Arc<WsState>>,
) -> impl IntoResponse {
ws.on_upgrade(move |socket| handle_socket(socket, state))
}
/// Handle individual WebSocket connection
async fn handle_socket(socket: WebSocket, state: Arc<WsState>) {
let client_id = uuid::Uuid::new_v4().to_string();
// Add client to set
{
let mut clients = state.clients.write().await;
clients.insert(client_id.clone());
}
tracing::info!("WebSocket client connected: {}", client_id);
let (mut sender, mut receiver) = socket.split();
// Subscribe to broadcast channel
let mut rx = state.tx.subscribe();
// Send connected message
let connected_msg = WsMessage::Connected {
client_id: client_id.clone(),
};
if let Ok(json) = serde_json::to_string(&connected_msg) {
let _ = sender.send(Message::Text(json.into())).await;
}
// Spawn task to forward broadcast messages to this client
let send_task = tokio::spawn(async move {
while let Ok(msg) = rx.recv().await {
if let Ok(json) = serde_json::to_string(&msg) {
if sender.send(Message::Text(json.into())).await.is_err() {
break;
}
}
}
});
// Handle incoming messages from client
let state_clone = state.clone();
let client_id_clone = client_id.clone();
let recv_task = tokio::spawn(async move {
while let Some(Ok(msg)) = receiver.next().await {
match msg {
Message::Text(text) => {
if let Ok(client_msg) = serde_json::from_str::<ClientMessage>(&text) {
handle_client_message(&state_clone, &client_id_clone, client_msg).await;
} else {
tracing::debug!(
"Unknown message from {}: {}",
client_id_clone,
text
);
}
}
Message::Close(_) => break,
_ => {}
}
}
});
// Wait for either task to complete
tokio::select! {
_ = send_task => {},
_ = recv_task => {},
}
// Clean up on disconnect
// Release all locks held by this client
let released_paths = state.lock_manager.release_all_for_client(&client_id).await;
for path in released_paths {
state.broadcast(WsMessage::FileUnlocked { path });
}
// Remove client from set
{
let mut clients = state.clients.write().await;
clients.remove(&client_id);
}
tracing::info!("WebSocket client disconnected: {}", client_id);
}
/// Handle a message from a client
async fn handle_client_message(state: &Arc<WsState>, client_id: &str, msg: ClientMessage) {
match msg {
ClientMessage::LockFile { path, lock_type } => {
let lock_type = match lock_type.as_str() {
"editor" => LockType::Editor,
"task_view" => LockType::TaskView,
_ => {
tracing::warn!("Unknown lock type: {}", lock_type);
return;
}
};
match state.lock_manager.acquire(&path, client_id, lock_type).await {
Ok(lock_info) => {
let lock_type_str = match lock_info.lock_type {
LockType::Editor => "editor",
LockType::TaskView => "task_view",
};
state.broadcast(WsMessage::FileLocked {
path: lock_info.path,
client_id: lock_info.client_id,
lock_type: lock_type_str.to_string(),
});
}
Err(e) => {
tracing::warn!("Failed to acquire lock: {}", e);
// Could send error back to specific client if needed
}
}
}
ClientMessage::UnlockFile { path } => {
match state.lock_manager.release(&path, client_id).await {
Ok(()) => {
state.broadcast(WsMessage::FileUnlocked { path });
}
Err(e) => {
tracing::warn!("Failed to release lock: {}", e);
}
}
}
ClientMessage::Pong => {
// Heartbeat response, no action needed
}
}
}