feature/library-patch-endpoint #94
470
src/libraries.rs
470
src/libraries.rs
@@ -1,8 +1,9 @@
|
||||
use actix_web::{HttpResponse, Responder, get, web::Data};
|
||||
use actix_web::{HttpResponse, Responder, get, patch, web, web::Data};
|
||||
use chrono::Utc;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use log::{info, warn};
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, RwLock};
|
||||
@@ -79,14 +80,21 @@ impl Library {
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a comma-separated excluded_dirs column into a Vec, dropping
|
||||
/// empty entries (mirrors `AppState::parse_excluded_dirs` for the env
|
||||
/// var). NULL → empty Vec.
|
||||
/// Parse an excluded_dirs string into a Vec, dropping empty entries.
|
||||
/// NULL → empty Vec. Duplicates are preserved — `PathExcluder` accepts
|
||||
/// repeats, and the storage-side normaliser is where dedup happens.
|
||||
///
|
||||
/// Accepts both `,` and newline (`\n` / `\r\n`) as separators so the
|
||||
/// UI's textarea can submit one-entry-per-line input without forcing
|
||||
/// the operator to remember commas. The DB stores the canonical
|
||||
/// comma-joined form (see `normalize_excluded_dirs_input`); the
|
||||
/// newline path matters mostly for the frontend submit, but mirroring
|
||||
/// it here keeps the parse direction round-trip safe.
|
||||
pub fn parse_excluded_dirs_column(raw: Option<&str>) -> Vec<String> {
|
||||
match raw {
|
||||
None => Vec::new(),
|
||||
Some(s) => s
|
||||
.split(',')
|
||||
.split(|c: char| matches!(c, ',' | '\n' | '\r'))
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(String::from)
|
||||
@@ -94,6 +102,121 @@ pub fn parse_excluded_dirs_column(raw: Option<&str>) -> Vec<String> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a single excluded_dirs entry, normalising trivial cosmetic
|
||||
/// differences and rejecting forms that `PathExcluder` would silently
|
||||
/// drop. Returns the entry to store, or an error message describing
|
||||
/// what's wrong with it.
|
||||
///
|
||||
/// Rules:
|
||||
/// - Backslashes are rejected — PathExcluder strips only a leading `/`;
|
||||
/// a Windows-typed `\photos` or `photos\2024` lands in the
|
||||
/// component-pattern bucket and never matches anything. Suggest the
|
||||
/// forward-slash form.
|
||||
/// - A Windows drive letter prefix (`Z:` etc.) is rejected — excluded
|
||||
/// entries are *relative to the library root*, not absolute system
|
||||
/// paths.
|
||||
/// - A no-leading-slash entry containing `/` is rejected — the
|
||||
/// component-pattern path matches a single segment only; the user
|
||||
/// almost certainly meant the leading-slash form.
|
||||
/// - A `..` segment in a path entry is rejected — `base.join("../x")`
|
||||
/// doesn't canonicalise, so the resulting prefix never matches and
|
||||
/// the exclude silently fails.
|
||||
/// - Trailing slashes on path entries are stripped silently
|
||||
/// (`/photos/` → `/photos`) — purely cosmetic.
|
||||
pub fn validate_excluded_dirs_entry(entry: &str) -> Result<String, String> {
|
||||
let trimmed = entry.trim();
|
||||
if trimmed.is_empty() {
|
||||
return Err("empty entry".to_string());
|
||||
}
|
||||
if trimmed.contains('\\') {
|
||||
return Err(format!(
|
||||
"'{}': use forward slashes — backslash paths never match on the watcher's component-by-component compare",
|
||||
trimmed
|
||||
));
|
||||
}
|
||||
// Windows drive letter prefix like `Z:` or `Z:/something`. A
|
||||
// length-2 ASCII-alpha + colon is the canonical form; we don't
|
||||
// bother with longer multi-letter Windows drive-equivalents
|
||||
// (`\\?\Volume{…}`) since the backslash check already catches them.
|
||||
let bytes = trimmed.as_bytes();
|
||||
if bytes.len() >= 2 && bytes[0].is_ascii_alphabetic() && bytes[1] == b':' {
|
||||
return Err(format!(
|
||||
"'{}': excluded entries are relative to the library root, not absolute system paths — drop the drive letter",
|
||||
trimmed
|
||||
));
|
||||
}
|
||||
if let Some(rel) = trimmed.strip_prefix('/') {
|
||||
// Path form. Reject `..` traversal — `base.join(\"../x\")` doesn't
|
||||
// canonicalise, so `path.starts_with(...)` never matches.
|
||||
if rel
|
||||
.split('/')
|
||||
.any(|seg| seg == "..")
|
||||
{
|
||||
return Err(format!(
|
||||
"'{}': '..' segments don't normalise — the prefix-match never fires",
|
||||
trimmed
|
||||
));
|
||||
}
|
||||
// Strip a trailing slash if any (`/photos/` → `/photos`). Purely
|
||||
// cosmetic; PathBuf::starts_with treats both forms identically.
|
||||
let stripped = if rel.ends_with('/') {
|
||||
format!("/{}", rel.trim_end_matches('/'))
|
||||
} else {
|
||||
trimmed.to_string()
|
||||
};
|
||||
// After stripping, an empty rel ("/" alone) excludes the root —
|
||||
// certainly a typo.
|
||||
if stripped == "/" {
|
||||
return Err("'/': excluding the library root is almost certainly a typo".to_string());
|
||||
}
|
||||
Ok(stripped)
|
||||
} else {
|
||||
// Component-pattern form: must be a single segment. A `/`
|
||||
// anywhere here is the common "I forgot the leading slash" typo
|
||||
// — reject so the user fixes it instead of staring at an
|
||||
// exclude that does nothing.
|
||||
if trimmed.contains('/') {
|
||||
return Err(format!(
|
||||
"'{}': multi-segment names only match with a leading slash — try '/{}'",
|
||||
trimmed, trimmed
|
||||
));
|
||||
}
|
||||
Ok(trimmed.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Canonicalise an excluded_dirs string for storage: validate each
|
||||
/// entry, then parse → trim → dedupe (preserving insertion order) →
|
||||
/// comma-join with no inner whitespace. Empty / whitespace-only input
|
||||
/// → `Ok(None)` (writes NULL). Any entry that fails validation aborts
|
||||
/// the whole patch with a descriptive error so the operator can fix
|
||||
/// the typo before retrying.
|
||||
///
|
||||
/// Used by `PATCH /libraries/{id}` so two users typing the same entries
|
||||
/// in different orders / casings / whitespace land on the same stored
|
||||
/// form, and a typo'd duplicate (`@eaDir, @eaDir`) collapses on save.
|
||||
/// Round-trip stable: writing the output back through this function
|
||||
/// yields the same string.
|
||||
pub fn normalize_excluded_dirs_input(raw: &str) -> Result<Option<String>, String> {
|
||||
let parsed = parse_excluded_dirs_column(Some(raw));
|
||||
if parsed.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
let mut deduped: Vec<String> = Vec::with_capacity(parsed.len());
|
||||
for entry in parsed {
|
||||
let validated = validate_excluded_dirs_entry(&entry)?;
|
||||
if seen.insert(validated.clone()) {
|
||||
deduped.push(validated);
|
||||
}
|
||||
}
|
||||
if deduped.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(deduped.join(",")))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LibraryRow> for Library {
|
||||
fn from(row: LibraryRow) -> Self {
|
||||
Library {
|
||||
@@ -334,16 +457,30 @@ pub struct LibraryStatus {
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct LibrariesResponse {
|
||||
pub libraries: Vec<LibraryStatus>,
|
||||
/// Globally-excluded paths/patterns from the `EXCLUDED_DIRS` env var.
|
||||
/// Applied **in union** with each library's own `excluded_dirs`. Surfaced
|
||||
/// here so an admin UI can show the operator "you already skip these
|
||||
/// everywhere" before they add per-library entries that would duplicate
|
||||
/// the global list. Read-only — globals live in `.env` and aren't
|
||||
/// mutable via the API today.
|
||||
pub global_excluded_dirs: Vec<String>,
|
||||
}
|
||||
|
||||
#[get("/libraries")]
|
||||
pub async fn list_libraries(_claims: Claims, app_state: Data<AppState>) -> impl Responder {
|
||||
// Read from the live view so a recent PATCH /libraries/{id} that
|
||||
// flipped `enabled` or rewrote `excluded_dirs` surfaces immediately
|
||||
// — the immutable `app_state.libraries` snapshot is stale once the
|
||||
// first mutation lands.
|
||||
let live_guard = app_state
|
||||
.live_libraries
|
||||
.read()
|
||||
.unwrap_or_else(|e| e.into_inner());
|
||||
let health_guard = app_state
|
||||
.library_health
|
||||
.read()
|
||||
.unwrap_or_else(|e| e.into_inner());
|
||||
let libraries = app_state
|
||||
.libraries
|
||||
let libraries = live_guard
|
||||
.iter()
|
||||
.map(|lib| LibraryStatus {
|
||||
library: lib.clone(),
|
||||
@@ -353,7 +490,118 @@ pub async fn list_libraries(_claims: Claims, app_state: Data<AppState>) -> impl
|
||||
.unwrap_or(LibraryHealth::Online),
|
||||
})
|
||||
.collect();
|
||||
HttpResponse::Ok().json(LibrariesResponse { libraries })
|
||||
HttpResponse::Ok().json(LibrariesResponse {
|
||||
libraries,
|
||||
global_excluded_dirs: app_state.excluded_dirs.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Body for PATCH /libraries/{id}. Both fields are optional — omitting
|
||||
/// one leaves it untouched. `excluded_dirs` is the same comma-separated
|
||||
/// shape as the DB column; an empty string clears (writes NULL).
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct PatchLibraryBody {
|
||||
pub enabled: Option<bool>,
|
||||
pub excluded_dirs: Option<String>,
|
||||
}
|
||||
|
||||
/// Mutate one library row. The watcher reads `app_state.live_libraries`
|
||||
/// at the top of each tick, so a successful PATCH is picked up within
|
||||
/// one WATCH_QUICK_INTERVAL_SECONDS without restart — no separate
|
||||
/// `apply_now` signal. Returns the updated `Library` so the caller can
|
||||
/// render the new state without a follow-up GET.
|
||||
///
|
||||
/// Despite CLAUDE.md noting "Toggle via SQL; there is intentionally no
|
||||
/// HTTP endpoint for library mutation", we now expose this for Apollo's
|
||||
/// Settings panel. The single-user trust model hasn't changed; the
|
||||
/// endpoint just removes the SSH-and-sqlite3 step.
|
||||
#[patch("/libraries/{id}")]
|
||||
pub async fn patch_library(
|
||||
_claims: Claims,
|
||||
path: web::Path<i32>,
|
||||
body: web::Json<PatchLibraryBody>,
|
||||
app_state: Data<AppState>,
|
||||
) -> impl Responder {
|
||||
let lib_id = path.into_inner();
|
||||
let body = body.into_inner();
|
||||
|
||||
if body.enabled.is_none() && body.excluded_dirs.is_none() {
|
||||
return HttpResponse::UnprocessableEntity().body("empty patch body");
|
||||
}
|
||||
|
||||
let mut conn = crate::database::connect();
|
||||
|
||||
// Build the SET clause. Diesel's set() takes a tuple of assignments;
|
||||
// we apply each field independently so an absent field doesn't get
|
||||
// forced to NULL / its default.
|
||||
let mut affected = 0usize;
|
||||
if let Some(enabled) = body.enabled {
|
||||
match diesel::update(libraries::table.filter(libraries::id.eq(lib_id)))
|
||||
.set(libraries::enabled.eq(enabled))
|
||||
.execute(&mut conn)
|
||||
{
|
||||
Ok(n) => affected = affected.max(n),
|
||||
Err(e) => {
|
||||
warn!("PATCH /libraries/{}: enabled update failed: {:?}", lib_id, e);
|
||||
return HttpResponse::InternalServerError().body(format!("{}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(raw) = body.excluded_dirs.as_deref() {
|
||||
// Canonicalise on write — trim, dedupe, validate, drop empties —
|
||||
// so the DB stores a round-trip-stable form regardless of how
|
||||
// messy the user typed it. Empty / whitespace-only → NULL
|
||||
// (matches a never-set library). Validation failures (Windows
|
||||
// backslash paths, drive letters, `..` traversal, etc.) bounce
|
||||
// back as 422 so the operator can fix the typo.
|
||||
let normalised = match normalize_excluded_dirs_input(raw) {
|
||||
Ok(v) => v,
|
||||
Err(msg) => return HttpResponse::UnprocessableEntity().body(msg),
|
||||
};
|
||||
let stored: Option<&str> = normalised.as_deref();
|
||||
match diesel::update(libraries::table.filter(libraries::id.eq(lib_id)))
|
||||
.set(libraries::excluded_dirs.eq(stored))
|
||||
.execute(&mut conn)
|
||||
{
|
||||
Ok(n) => affected = affected.max(n),
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"PATCH /libraries/{}: excluded_dirs update failed: {:?}",
|
||||
lib_id, e
|
||||
);
|
||||
return HttpResponse::InternalServerError().body(format!("{}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if affected == 0 {
|
||||
return HttpResponse::NotFound().body(format!("library id {} not found", lib_id));
|
||||
}
|
||||
|
||||
// Refresh the live view from the canonical DB state. Reloading the
|
||||
// whole table (rather than mutating one entry in place) is cheap
|
||||
// (handful of rows) and keeps the in-memory and DB views trivially
|
||||
// consistent.
|
||||
let fresh = load_all(&mut conn);
|
||||
let updated = fresh.iter().find(|l| l.id == lib_id).cloned();
|
||||
{
|
||||
let mut live = app_state
|
||||
.live_libraries
|
||||
.write()
|
||||
.unwrap_or_else(|e| e.into_inner());
|
||||
*live = fresh;
|
||||
}
|
||||
|
||||
match updated {
|
||||
Some(lib) => {
|
||||
info!(
|
||||
"PATCH /libraries/{}: enabled={:?} excluded_dirs={:?} → applied",
|
||||
lib_id, body.enabled, body.excluded_dirs
|
||||
);
|
||||
HttpResponse::Ok().json(lib)
|
||||
}
|
||||
None => HttpResponse::NotFound().body(format!("library id {} not found after update", lib_id)),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -496,6 +744,40 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_excluded_dirs_column_splits_on_newlines_too() {
|
||||
// Newline-separated input from a textarea submit. One-per-line
|
||||
// is the recommended UX because "I forgot the comma" was a
|
||||
// recurring footgun (.thumbnails .thumbnails2 silently
|
||||
// becomes a single never-matching pattern).
|
||||
assert_eq!(
|
||||
parse_excluded_dirs_column(Some("@eaDir\n.thumbnails\n/private")),
|
||||
vec![
|
||||
"@eaDir".to_string(),
|
||||
".thumbnails".to_string(),
|
||||
"/private".to_string()
|
||||
]
|
||||
);
|
||||
// Windows line endings (CRLF) — the carriage return is its own
|
||||
// separator so the trailing empty token between \r and \n gets
|
||||
// trimmed + dropped.
|
||||
assert_eq!(
|
||||
parse_excluded_dirs_column(Some("a\r\nb\r\nc")),
|
||||
vec!["a".to_string(), "b".to_string(), "c".to_string()]
|
||||
);
|
||||
// Mixed comma + newline — the user pastes from one source,
|
||||
// adds a few entries inline. Both work, in any combination.
|
||||
assert_eq!(
|
||||
parse_excluded_dirs_column(Some("a, b\nc,d")),
|
||||
vec![
|
||||
"a".to_string(),
|
||||
"b".to_string(),
|
||||
"c".to_string(),
|
||||
"d".to_string()
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn effective_excluded_dirs_unions_global_and_per_library() {
|
||||
let lib_no_extras = Library {
|
||||
@@ -523,6 +805,178 @@ mod tests {
|
||||
assert_eq!(combined.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn effective_excluded_dirs_keeps_overlap_between_global_and_per_library() {
|
||||
// Two sources both excluding `@eaDir` is legal — `PathExcluder`
|
||||
// accepts repeats, and there's no behavioral reason to dedupe
|
||||
// here. Documents the design choice so a future refactor that
|
||||
// tightens this is forced to update both code and tests.
|
||||
let globals = vec!["@eaDir".to_string()];
|
||||
let lib = Library {
|
||||
id: 1,
|
||||
name: "main".into(),
|
||||
root_path: "/x".into(),
|
||||
enabled: true,
|
||||
excluded_dirs: vec!["@eaDir".to_string(), "/private".to_string()],
|
||||
};
|
||||
let combined = lib.effective_excluded_dirs(&globals);
|
||||
// 2 occurrences of @eaDir + /private = 3 entries total.
|
||||
assert_eq!(combined, vec!["@eaDir", "@eaDir", "/private"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_excluded_dirs_input_handles_empty_and_whitespace() {
|
||||
assert_eq!(normalize_excluded_dirs_input(""), Ok(None));
|
||||
assert_eq!(normalize_excluded_dirs_input(" "), Ok(None));
|
||||
assert_eq!(normalize_excluded_dirs_input(",,,"), Ok(None));
|
||||
assert_eq!(normalize_excluded_dirs_input(" , , "), Ok(None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_excluded_dirs_input_trims_per_entry() {
|
||||
// Inner whitespace stripped on each item, comma-joined without
|
||||
// spaces. Mirrors how parse_excluded_dirs_column reads it back.
|
||||
assert_eq!(
|
||||
normalize_excluded_dirs_input(" @eaDir , /private , .thumbnails "),
|
||||
Ok(Some("@eaDir,/private,.thumbnails".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_excluded_dirs_input_dedupes_preserving_first_occurrence() {
|
||||
// Exact-string duplicates collapse; the first occurrence wins
|
||||
// (preserves the operator's typed order so they recognise their
|
||||
// intent on round-trip).
|
||||
assert_eq!(
|
||||
normalize_excluded_dirs_input("@eaDir, /private, @eaDir, /private"),
|
||||
Ok(Some("@eaDir,/private".to_string()))
|
||||
);
|
||||
// Whitespace-distinct entries collapse to the same canonical
|
||||
// form. Case is preserved — `Foo` and `foo` are different keys
|
||||
// (filesystem case-sensitivity is platform-dependent; we don't
|
||||
// make that call here).
|
||||
assert_eq!(
|
||||
normalize_excluded_dirs_input(" Foo,foo, Foo "),
|
||||
Ok(Some("Foo,foo".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_excluded_dirs_input_is_round_trip_stable() {
|
||||
// Writing the normaliser's output back through it yields the
|
||||
// same string. PATCH-clearing edits round-trip cleanly through
|
||||
// parse_excluded_dirs_column too.
|
||||
let raw = " /a/b ,, /a/b , c ";
|
||||
let once = normalize_excluded_dirs_input(raw)
|
||||
.expect("validation passes")
|
||||
.expect("not empty");
|
||||
let twice = normalize_excluded_dirs_input(&once)
|
||||
.expect("validation passes")
|
||||
.expect("not empty");
|
||||
assert_eq!(once, twice);
|
||||
// Parsing the stored form back gives the deduped Vec.
|
||||
assert_eq!(
|
||||
parse_excluded_dirs_column(Some(&once)),
|
||||
vec!["/a/b".to_string(), "c".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_rejects_backslash_paths() {
|
||||
// Windows-typed entries land in the component-pattern bucket
|
||||
// and never match — reject so the user gets feedback instead
|
||||
// of a silent no-op.
|
||||
assert!(validate_excluded_dirs_entry(r"\photos").is_err());
|
||||
assert!(validate_excluded_dirs_entry(r"photos\2024").is_err());
|
||||
assert!(validate_excluded_dirs_entry(r"\\server\share").is_err());
|
||||
// The error message names the entry and points at the fix.
|
||||
let err = validate_excluded_dirs_entry(r"\photos").unwrap_err();
|
||||
assert!(err.contains("forward slashes"), "{}", err);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_rejects_windows_drive_letters() {
|
||||
assert!(validate_excluded_dirs_entry("Z:/photos").is_err());
|
||||
assert!(validate_excluded_dirs_entry("z:photos").is_err());
|
||||
// Single-letter alpha + colon is the canonical drive prefix;
|
||||
// the message should steer toward the relative form.
|
||||
let err = validate_excluded_dirs_entry("Z:/foo").unwrap_err();
|
||||
assert!(err.contains("relative to the library root"), "{}", err);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_rejects_multi_segment_name_without_leading_slash() {
|
||||
// The common "I forgot the slash" typo. Today this would store
|
||||
// a never-matching component pattern; we catch it.
|
||||
let err = validate_excluded_dirs_entry("photos/2024").unwrap_err();
|
||||
assert!(err.contains("multi-segment"), "{}", err);
|
||||
// And the suggestion shows the corrected form.
|
||||
assert!(err.contains("/photos/2024"), "{}", err);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_rejects_parent_dir_traversal_in_path_entries() {
|
||||
// base.join("../sensitive") doesn't canonicalise, so the
|
||||
// resulting prefix never starts_with anything the walker sees.
|
||||
assert!(validate_excluded_dirs_entry("/../secret").is_err());
|
||||
assert!(validate_excluded_dirs_entry("/photos/../keys").is_err());
|
||||
// Same string as a non-leading-slash component is fine — it
|
||||
// just never matches (you'd literally need a directory named
|
||||
// `..` which is impossible on every filesystem we care about),
|
||||
// but the validator accepts it because the failure mode isn't
|
||||
// a silent footgun in that direction.
|
||||
assert!(validate_excluded_dirs_entry("..").is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_strips_trailing_slash_on_path_entries() {
|
||||
assert_eq!(
|
||||
validate_excluded_dirs_entry("/photos/").unwrap(),
|
||||
"/photos"
|
||||
);
|
||||
assert_eq!(
|
||||
validate_excluded_dirs_entry("/photos//").unwrap(),
|
||||
"/photos"
|
||||
);
|
||||
// Bare "/" is rejected — almost certainly a typo for the
|
||||
// library root.
|
||||
assert!(validate_excluded_dirs_entry("/").is_err());
|
||||
assert!(validate_excluded_dirs_entry("///").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validate_passes_valid_entries() {
|
||||
for entry in &[
|
||||
"/photos",
|
||||
"/photos/2024",
|
||||
"/media/raw/private",
|
||||
"@eaDir",
|
||||
".thumbnails",
|
||||
".DS_Store",
|
||||
"node_modules",
|
||||
] {
|
||||
assert!(
|
||||
validate_excluded_dirs_entry(entry).is_ok(),
|
||||
"expected {} to pass",
|
||||
entry
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_aborts_on_invalid_entry() {
|
||||
// One bad entry kills the whole patch — better to surface the
|
||||
// problem than to silently apply N-1 of N changes.
|
||||
let err = normalize_excluded_dirs_input("/photos, photos/2024").unwrap_err();
|
||||
assert!(err.contains("photos/2024"), "{}", err);
|
||||
// A valid mix succeeds — the bad-entry test isn't accidentally
|
||||
// matching the good prefix.
|
||||
assert_eq!(
|
||||
normalize_excluded_dirs_input("/photos, @eaDir, /private/"),
|
||||
Ok(Some("/photos,@eaDir,/private".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
fn probe_lib(id: i32, root: String) -> Library {
|
||||
Library {
|
||||
id,
|
||||
|
||||
@@ -128,8 +128,12 @@ fn main() -> std::io::Result<()> {
|
||||
// Start file watcher with playlist manager and preview generator
|
||||
let playlist_mgr_for_watcher = app_state.playlist_manager.as_ref().clone();
|
||||
let preview_gen_for_watcher = app_state.preview_clip_generator.as_ref().clone();
|
||||
// Both background jobs read from the shared `live_libraries` lock
|
||||
// so a PATCH /libraries/{id} that flips `enabled` or edits
|
||||
// `excluded_dirs` takes effect on the next watcher tick / cleanup
|
||||
// tick without an ImageApi restart.
|
||||
watcher::watch_files(
|
||||
app_state.libraries.clone(),
|
||||
app_state.live_libraries.clone(),
|
||||
playlist_mgr_for_watcher,
|
||||
preview_gen_for_watcher,
|
||||
app_state.face_client.clone(),
|
||||
@@ -142,7 +146,7 @@ fn main() -> std::io::Result<()> {
|
||||
// skips the whole cycle while any library is stale (a missing
|
||||
// source is indistinguishable from a transiently-unmounted share).
|
||||
watcher::cleanup_orphaned_playlists(
|
||||
app_state.libraries.clone(),
|
||||
app_state.live_libraries.clone(),
|
||||
app_state.excluded_dirs.clone(),
|
||||
app_state.library_health.clone(),
|
||||
);
|
||||
@@ -280,6 +284,7 @@ fn main() -> std::io::Result<()> {
|
||||
.service(ai::rate_insight_handler)
|
||||
.service(ai::export_training_data_handler)
|
||||
.service(libraries::list_libraries)
|
||||
.service(libraries::patch_library)
|
||||
.add_feature(add_tag_services::<_, SqliteTagDao>)
|
||||
.add_feature(knowledge::add_knowledge_services::<_, SqliteKnowledgeDao>)
|
||||
.add_feature(personas::add_persona_services)
|
||||
|
||||
16
src/state.rs
16
src/state.rs
@@ -18,15 +18,25 @@ use crate::video::actors::{
|
||||
};
|
||||
use actix::{Actor, Addr};
|
||||
use std::env;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
|
||||
pub struct AppState {
|
||||
pub stream_manager: Arc<Addr<StreamActor>>,
|
||||
pub playlist_manager: Arc<Addr<VideoPlaylistManager>>,
|
||||
pub preview_clip_generator: Arc<Addr<PreviewClipGenerator>>,
|
||||
/// All configured media libraries. Ordered by `id` ascending; the first
|
||||
/// entry is the primary library.
|
||||
/// entry is the primary library. Frozen at startup — handlers that
|
||||
/// only need stable lookup (id → name / root_path) read this. Mutable
|
||||
/// flags (`enabled`, `excluded_dirs`) reflect their startup values;
|
||||
/// for live state see [`AppState::live_libraries`].
|
||||
pub libraries: Vec<Library>,
|
||||
/// Live view of the libraries table, shared mutably between the
|
||||
/// watcher (which reads it at the top of each tick to honour the
|
||||
/// latest `enabled` / `excluded_dirs`) and the PATCH /libraries/{id}
|
||||
/// handler (which writes it on a successful mutation). The split
|
||||
/// from [`AppState::libraries`] is deliberate: handlers that only
|
||||
/// look up by id don't need to take a lock per request.
|
||||
pub live_libraries: Arc<RwLock<Vec<Library>>>,
|
||||
/// Per-library availability snapshot. Updated by the file watcher at
|
||||
/// the top of each tick via `libraries::refresh_health`. HTTP handlers
|
||||
/// read it (e.g. `/libraries` surfacing). See "Library availability
|
||||
@@ -112,11 +122,13 @@ impl AppState {
|
||||
);
|
||||
|
||||
let library_health = libraries::new_health_map(&libraries_vec);
|
||||
let live_libraries = Arc::new(RwLock::new(libraries_vec.clone()));
|
||||
Self {
|
||||
stream_manager,
|
||||
playlist_manager: Arc::new(video_playlist_manager.start()),
|
||||
preview_clip_generator: Arc::new(preview_clip_generator.start()),
|
||||
libraries: libraries_vec,
|
||||
live_libraries,
|
||||
library_health,
|
||||
base_path,
|
||||
thumbnail_path,
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use actix::Addr;
|
||||
@@ -42,9 +42,13 @@ use crate::thumbnails;
|
||||
use crate::video;
|
||||
use crate::video::actors::{GeneratePreviewClipMessage, QueueVideosMessage, VideoPlaylistManager};
|
||||
|
||||
/// Clean up orphaned HLS playlists and segments whose source videos no longer exist
|
||||
/// Clean up orphaned HLS playlists and segments whose source videos no longer exist.
|
||||
///
|
||||
/// `libs_lock` is the shared live view of the libraries table — read at the
|
||||
/// top of each cleanup pass so a PATCH /libraries/{id} that disables or
|
||||
/// re-mounts a library is picked up without a restart.
|
||||
pub fn cleanup_orphaned_playlists(
|
||||
libs: Vec<libraries::Library>,
|
||||
libs_lock: Arc<RwLock<Vec<libraries::Library>>>,
|
||||
excluded_dirs: Vec<String>,
|
||||
library_health: libraries::LibraryHealthMap,
|
||||
) {
|
||||
@@ -60,16 +64,25 @@ pub fn cleanup_orphaned_playlists(
|
||||
info!("Starting orphaned playlist cleanup job");
|
||||
info!(" Cleanup interval: {} seconds", cleanup_interval_secs);
|
||||
info!(" Playlist directory: {}", video_path);
|
||||
for lib in &libs {
|
||||
{
|
||||
let libs = libs_lock.read().unwrap_or_else(|e| e.into_inner());
|
||||
for lib in libs.iter() {
|
||||
info!(
|
||||
" Checking sources under '{}' at {}",
|
||||
lib.name, lib.root_path
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
std::thread::sleep(Duration::from_secs(cleanup_interval_secs));
|
||||
|
||||
// Fresh snapshot per tick so a PATCH /libraries/{id} that
|
||||
// disabled a library (or rewrote its excluded_dirs) is
|
||||
// honoured immediately.
|
||||
let libs: Vec<libraries::Library> =
|
||||
libs_lock.read().unwrap_or_else(|e| e.into_inner()).clone();
|
||||
|
||||
// Safety gate: skip the cleanup cycle if any library is
|
||||
// stale. A missing source video on a stale library is
|
||||
// indistinguishable from a transient unmount, and the
|
||||
@@ -211,7 +224,7 @@ pub fn cleanup_orphaned_playlists(
|
||||
}
|
||||
|
||||
pub fn watch_files(
|
||||
libs: Vec<libraries::Library>,
|
||||
libs_lock: Arc<RwLock<Vec<libraries::Library>>>,
|
||||
playlist_manager: Addr<VideoPlaylistManager>,
|
||||
preview_generator: Addr<video::actors::PreviewClipGenerator>,
|
||||
face_client: crate::ai::face_client::FaceClient,
|
||||
@@ -247,12 +260,15 @@ pub fn watch_files(
|
||||
or APOLLO_API_BASE_URL to enable)"
|
||||
);
|
||||
}
|
||||
for lib in &libs {
|
||||
{
|
||||
let libs = libs_lock.read().unwrap_or_else(|e| e.into_inner());
|
||||
for lib in libs.iter() {
|
||||
info!(
|
||||
" Watching library '{}' (id={}) at {}",
|
||||
lib.name, lib.id, lib.root_path
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create DAOs for tracking processed files
|
||||
let exif_dao = Arc::new(Mutex::new(
|
||||
@@ -306,7 +322,9 @@ pub fn watch_files(
|
||||
// below; no ingest runs here, just the health update + log.
|
||||
// Disabled libraries skip the probe entirely — they should
|
||||
// never enter the health map (treated as out-of-scope).
|
||||
for lib in &libs {
|
||||
{
|
||||
let libs = libs_lock.read().unwrap_or_else(|e| e.into_inner());
|
||||
for lib in libs.iter() {
|
||||
if !lib.enabled {
|
||||
continue;
|
||||
}
|
||||
@@ -319,6 +337,7 @@ pub fn watch_files(
|
||||
.unwrap_or(false);
|
||||
libraries::refresh_health(&library_health, lib, had_data);
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
std::thread::sleep(Duration::from_secs(quick_interval_secs));
|
||||
@@ -330,6 +349,11 @@ pub fn watch_files(
|
||||
|
||||
let is_full_scan = since_last_full.as_secs() >= full_interval_secs;
|
||||
|
||||
// Fresh snapshot per tick — picks up PATCH /libraries/{id}
|
||||
// mutations to `enabled` / `excluded_dirs` without restart.
|
||||
let libs: Vec<libraries::Library> =
|
||||
libs_lock.read().unwrap_or_else(|e| e.into_inner()).clone();
|
||||
|
||||
for lib in &libs {
|
||||
// Operator kill switch: a disabled library is invisible
|
||||
// to the watcher entirely. No probe, no ingest, no
|
||||
|
||||
Reference in New Issue
Block a user