Files
ImageApi/src/files.rs
Cameron Cordes 7621282419 Thumb orientation + library filter on /photos/exif
Two follow-ups on the same feature branch:

1. Bake EXIF orientation into generated thumbnails. The `image` crate
   doesn't apply Orientation on load, and `save_with_format(..Jpeg)`
   drops EXIF — so portrait phone shots ended up sideways in any client
   that displays the cached thumb directly (no EXIF tag for the browser
   to compensate from). New `exif::read_orientation` reads the tag
   cheaply (no full EXIF parse) and `exif::apply_orientation` does the
   rotate/flip via image's existing `rotate90/180/270` + `fliph/flipv`.
   Applied in both branches of `generate_image_thumbnail` (RAW embedded-
   JPEG path and the regular `image::open` path). Existing thumbnails
   in the cache are still wrong-orientation; wipe the thumb dir or run
   a one-off backfill once this lands.

2. Optional `library` query param on `/photos/exif`. Accepts numeric id
   or name (same shape as `/image?library=...`), resolved via the
   existing `resolve_library_param` helper so a bad value 400s before
   we touch the DAO. Filter is applied post-query in the handler
   rather than pushed into `query_by_exif` to keep the DAO trait
   (and its test mocks) unchanged. Cheap enough at typical library
   counts; can be moved into SQL later if it ever isn't.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-27 17:29:36 -04:00

1994 lines
70 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use ::anyhow;
use actix::{Handler, Message};
use anyhow::Context;
use std::collections::HashSet;
use std::fmt::Debug;
use std::fs::read_dir;
use std::io;
use std::io::ErrorKind;
use std::path::{Path, PathBuf};
use std::sync::Mutex;
use std::time::SystemTime;
use crate::data::{
Claims, ExifBatchRequest, ExifBatchResponse, ExifSummary, FilesRequest, FilterMode, MediaType,
PhotosResponse, SortType,
};
use crate::database::ExifDao;
use crate::file_types;
use crate::geo::{gps_bounding_box, haversine_distance};
use crate::memories::extract_date_from_filename;
use crate::utils::earliest_fs_time;
use crate::{AppState, create_thumbnails};
use actix_web::web::Data;
use actix_web::{
HttpRequest, HttpResponse,
web::{self, Query},
};
use chrono::{DateTime, Utc};
use log::{debug, error, info, trace, warn};
use opentelemetry::KeyValue;
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
use crate::data::SortType::NameAsc;
use crate::error::IntoHttpError;
use crate::otel::{extract_context_from_request, global_tracer};
use crate::tags::{FileWithTagCount, TagDao};
use crate::video::actors::StreamActor;
use path_absolutize::*;
use rand::prelude::SliceRandom;
use rand::thread_rng;
/// File metadata for sorting and filtering
/// Includes tag count and optional date for date-based sorting
pub struct FileWithMetadata {
pub file_name: String,
pub tag_count: i64,
pub date_taken: Option<i64>, // Unix timestamp from EXIF or filename extraction
pub library_id: i32,
}
use serde::Deserialize;
/// Apply sorting to files with EXIF data support for date-based sorting
/// Handles both date sorting (with EXIF/filename fallback) and regular sorting
/// Returns (sorted_file_paths, sorted_library_ids, total_count)
fn apply_sorting_with_exif(
files: Vec<FileWithTagCount>,
file_libraries: Vec<i32>,
sort_type: SortType,
exif_dao: &mut Box<dyn ExifDao>,
span_context: &opentelemetry::Context,
libraries: &[crate::libraries::Library],
limit: Option<i64>,
offset: i64,
) -> (Vec<String>, Vec<i32>, i64) {
let total_count = files.len() as i64;
match sort_type {
SortType::DateTakenAsc | SortType::DateTakenDesc => {
info!("Date sorting requested, using in-memory sort with EXIF/filename fallback");
let (sorted, sorted_libs, _) = in_memory_date_sort(
files,
file_libraries,
sort_type,
exif_dao,
span_context,
libraries,
limit,
offset,
);
(sorted, sorted_libs, total_count)
}
_ => {
let (sorted, sorted_libs) = sort(files, file_libraries, sort_type);
let (result, result_libs) = if let Some(limit_val) = limit {
let skip = offset as usize;
let take = limit_val as usize;
(
sorted.iter().skip(skip).take(take).cloned().collect(),
sorted_libs.iter().skip(skip).take(take).copied().collect(),
)
} else {
(sorted, sorted_libs)
};
(result, result_libs, total_count)
}
}
}
/// Fallback in-memory date sorting with EXIF/filename extraction
fn in_memory_date_sort(
files: Vec<FileWithTagCount>,
file_libraries: Vec<i32>,
sort_type: SortType,
exif_dao: &mut Box<dyn ExifDao>,
span_context: &opentelemetry::Context,
libraries: &[crate::libraries::Library],
limit: Option<i64>,
offset: i64,
) -> (Vec<String>, Vec<i32>, i64) {
let total_count = files.len() as i64;
let file_paths: Vec<String> = files.iter().map(|f| f.file_name.clone()).collect();
// Batch fetch EXIF data (keyed by rel_path; in union mode a rel_path may
// correspond to rows in multiple libraries — pick the date from the one
// matching the requesting row's library_id when possible).
let exif_rows = exif_dao
.get_exif_batch(span_context, &file_paths)
.unwrap_or_default();
let exif_map: std::collections::HashMap<(String, i32), i64> = exif_rows
.into_iter()
.filter_map(|exif| {
exif.date_taken
.map(|dt| ((exif.file_path, exif.library_id), dt))
})
.collect();
let lib_roots: std::collections::HashMap<i32, &str> = libraries
.iter()
.map(|l| (l.id, l.root_path.as_str()))
.collect();
// Convert to FileWithMetadata with date fallback logic
let files_with_metadata: Vec<FileWithMetadata> = files
.into_iter()
.zip(file_libraries.iter().copied())
.map(|(f, lib_id)| {
let date_taken = exif_map
.get(&(f.file_name.clone(), lib_id))
.copied()
.or_else(|| extract_date_from_filename(&f.file_name).map(|dt| dt.timestamp()))
.or_else(|| {
lib_roots.get(&lib_id).and_then(|root| {
let full_path = Path::new(root).join(&f.file_name);
std::fs::metadata(full_path)
.ok()
.and_then(|md| earliest_fs_time(&md))
.map(|system_time| {
<SystemTime as Into<DateTime<Utc>>>::into(system_time).timestamp()
})
})
});
FileWithMetadata {
file_name: f.file_name,
tag_count: f.tag_count,
date_taken,
library_id: lib_id,
}
})
.collect();
let (sorted, sorted_libs) = sort_with_metadata(files_with_metadata, sort_type);
let (result, result_libs) = if let Some(limit_val) = limit {
let skip = offset as usize;
let take = limit_val as usize;
(
sorted
.iter()
.skip(skip)
.take(take)
.cloned()
.collect::<Vec<String>>(),
sorted_libs
.iter()
.skip(skip)
.take(take)
.copied()
.collect::<Vec<i32>>(),
)
} else {
(sorted, sorted_libs)
};
(result, result_libs, total_count)
}
pub async fn list_photos<TagD: TagDao, FS: FileSystemAccess>(
_: Claims,
request: HttpRequest,
req: Query<FilesRequest>,
app_state: Data<AppState>,
file_system: Data<FS>,
tag_dao: Data<Mutex<TagD>>,
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
) -> HttpResponse {
let search_path = &req.path;
let tracer = global_tracer();
let context = extract_context_from_request(&request);
let mut span = tracer.start_with_context("list_photos", &context);
span.set_attributes(vec![
KeyValue::new("path", search_path.to_string()),
KeyValue::new("recursive", req.recursive.unwrap_or(false).to_string()),
KeyValue::new(
"tag_ids",
req.tag_ids.clone().unwrap_or_default().to_string(),
),
KeyValue::new(
"tag_filter_mode",
format!("{:?}", req.tag_filter_mode.unwrap_or(FilterMode::Any)),
),
KeyValue::new(
"exclude_tag_ids",
req.exclude_tag_ids.clone().unwrap_or_default().to_string(),
),
KeyValue::new("sort", format!("{:?}", &req.sort.unwrap_or(NameAsc))),
// EXIF search parameters
KeyValue::new("camera_make", req.camera_make.clone().unwrap_or_default()),
KeyValue::new("camera_model", req.camera_model.clone().unwrap_or_default()),
KeyValue::new("lens_model", req.lens_model.clone().unwrap_or_default()),
KeyValue::new(
"gps_lat",
req.gps_lat.map(|v| v.to_string()).unwrap_or_default(),
),
KeyValue::new(
"gps_lon",
req.gps_lon.map(|v| v.to_string()).unwrap_or_default(),
),
KeyValue::new(
"gps_radius_km",
req.gps_radius_km.map(|v| v.to_string()).unwrap_or_default(),
),
KeyValue::new(
"date_from",
req.date_from.map(|v| v.to_string()).unwrap_or_default(),
),
KeyValue::new(
"date_to",
req.date_to.map(|v| v.to_string()).unwrap_or_default(),
),
KeyValue::new(
"media_type",
req.media_type
.as_ref()
.map(|mt| format!("{:?}", mt))
.unwrap_or_default(),
),
// Pagination parameters
KeyValue::new("pagination.enabled", req.limit.is_some().to_string()),
KeyValue::new(
"pagination.limit",
req.limit.map(|l| l.to_string()).unwrap_or_default(),
),
KeyValue::new("pagination.offset", req.offset.unwrap_or(0).to_string()),
// Optimization flags
KeyValue::new("optimization.batch_tags", "true"),
KeyValue::new(
"optimization.db_sort",
matches!(
req.sort,
Some(SortType::DateTakenAsc | SortType::DateTakenDesc)
)
.to_string(),
),
KeyValue::new("library", req.library.clone().unwrap_or_default()),
]);
// Resolve the optional library filter. Unknown values return 400. A
// `None` result means "union across all libraries" and downstream
// walks iterate every configured library root.
let library = match crate::libraries::resolve_library_param(&app_state, req.library.as_deref())
{
Ok(lib) => lib,
Err(msg) => {
log::warn!("Rejecting /photos request: {}", msg);
return HttpResponse::BadRequest().body(msg);
}
};
let span_context = opentelemetry::Context::current_with_span(span);
// Check if EXIF filtering is requested
let has_exif_filters = req.camera_make.is_some()
|| req.camera_model.is_some()
|| req.lens_model.is_some()
|| req.gps_lat.is_some()
|| req.date_from.is_some()
|| req.date_to.is_some();
// Apply EXIF-based filtering if requested
let exif_matched_files: Option<HashSet<String>> = if has_exif_filters {
// Validate GPS parameters (all 3 must be present together)
if (req.gps_lat.is_some() || req.gps_lon.is_some() || req.gps_radius_km.is_some())
&& !(req.gps_lat.is_some() && req.gps_lon.is_some() && req.gps_radius_km.is_some())
{
warn!("GPS search requires lat, lon, and radius_km to all be specified");
span_context
.span()
.set_status(Status::error("Invalid GPS parameters"));
return HttpResponse::BadRequest().body("GPS search requires lat, lon, and radius_km");
}
// Calculate GPS bounding box if GPS search is requested
let gps_bounds = if let (Some(lat), Some(lon), Some(radius_km)) =
(req.gps_lat, req.gps_lon, req.gps_radius_km)
{
let (min_lat, max_lat, min_lon, max_lon) = gps_bounding_box(lat, lon, radius_km);
Some((min_lat, max_lat, min_lon, max_lon))
} else {
None
};
// Query EXIF database
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
let exif_results = exif_dao_guard
.query_by_exif(
&span_context,
req.camera_make.as_deref(),
req.camera_model.as_deref(),
req.lens_model.as_deref(),
gps_bounds,
req.date_from,
req.date_to,
)
.unwrap_or_else(|e| {
warn!("EXIF query failed: {:?}", e);
Vec::new()
});
// Apply precise GPS distance filtering if GPS search was requested
let filtered_results = if let (Some(lat), Some(lon), Some(radius_km)) =
(req.gps_lat, req.gps_lon, req.gps_radius_km)
{
exif_results
.into_iter()
.filter(|exif| {
if let (Some(photo_lat), Some(photo_lon)) =
(exif.gps_latitude, exif.gps_longitude)
{
let distance =
haversine_distance(lat, lon, photo_lat as f64, photo_lon as f64);
distance <= radius_km
} else {
false
}
})
.map(|exif| exif.file_path)
.collect::<HashSet<String>>()
} else {
exif_results
.into_iter()
.map(|exif| exif.file_path)
.collect::<HashSet<String>>()
};
info!("EXIF filtering matched {} files", filtered_results.len());
Some(filtered_results)
} else {
None
};
// In scoped mode (`library` is Some) we gate tag-based results (which
// key on rel_path only) by "does this rel_path actually exist on disk
// in the selected library's root". In union mode we assign each
// returned file to the first library it resolves in, and drop files
// that exist in no configured library.
let libraries_to_scan: Vec<&crate::libraries::Library> = match library {
Some(lib) => vec![lib],
None => app_state.libraries.iter().collect(),
};
let search_recursively = req.recursive.unwrap_or(false);
if let Some(tag_ids) = &req.tag_ids
&& search_recursively
{
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
info!(
"Searching for tags: {}. With path: '{}' and filter mode: {:?}",
tag_ids, search_path, filter_mode
);
let mut dao = tag_dao.lock().expect("Unable to get TagDao");
let tag_ids = tag_ids
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let exclude_tag_ids = req
.exclude_tag_ids
.clone()
.unwrap_or_default()
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
return match filter_mode {
FilterMode::Any => {
dao.get_files_with_any_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
}
FilterMode::All => {
dao.get_files_with_all_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
}
}
.context(format!(
"Failed to get files with tag_ids: {:?} with filter_mode: {:?}",
tag_ids, filter_mode
))
.inspect(|files| {
info!(
"Found {:?} tagged files, filtering down by search path {:?}",
files.len(),
search_path
)
})
.map(|tagged_files| {
tagged_files
.into_iter()
.filter(|f| {
// When searching at the root, everything matches recursively
if search_path.trim() == "" {
return true;
}
f.file_name.starts_with(&format!(
"{}/",
search_path.strip_suffix('/').unwrap_or_else(|| search_path)
))
})
.filter(|f| {
// Apply EXIF filtering if present
if let Some(ref exif_files) = exif_matched_files {
exif_files.contains(&f.file_name)
} else {
true
}
})
.filter_map(|f| {
// Apply media type filter first (cheap check before disk I/O).
if let Some(ref media_type) = req.media_type {
let path = PathBuf::from(&f.file_name);
if !matches_media_type(&path, media_type) {
return None;
}
}
// Resolve the file's library by checking each
// candidate library's root on disk. Falls back to
// content-hash siblings if the rel_path was
// registered under a different path but same content.
for lib in &libraries_to_scan {
if PathBuf::from(&lib.root_path).join(&f.file_name).exists() {
return Some((f, lib.id));
}
}
let siblings = {
let mut dao = exif_dao.lock().expect("Unable to get ExifDao");
match dao
.find_content_hash_anywhere(&span_context, &f.file_name)
.unwrap_or(None)
{
Some(hash) => dao
.get_rel_paths_by_hash(&span_context, &hash)
.unwrap_or_default(),
None => Vec::new(),
}
};
for lib in &libraries_to_scan {
if siblings
.iter()
.any(|p| PathBuf::from(&lib.root_path).join(p).exists())
{
return Some((f, lib.id));
}
}
// Tags are library-agnostic. If we can't confirm which
// library currently holds the file on disk (e.g. the
// tagged rel_path is stale or the caller is testing
// without real files), keep the tagged row and
// attribute it to the primary library so the client
// still sees the tag hit.
if library.is_none() {
Some((f, app_state.primary_library().id))
} else {
None
}
})
.collect::<Vec<(FileWithTagCount, i32)>>()
})
.map(|paired| {
// Handle sorting - use helper function that supports EXIF date sorting and pagination
let sort_type = req.sort.unwrap_or(NameAsc);
let limit = req.limit;
let offset = req.offset.unwrap_or(0);
let (files, file_libs): (Vec<FileWithTagCount>, Vec<i32>) = paired.into_iter().unzip();
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
let result = apply_sorting_with_exif(
files,
file_libs,
sort_type,
&mut exif_dao_guard,
&span_context,
&app_state.libraries,
limit,
offset,
);
drop(exif_dao_guard);
result
})
.inspect(|(files, _libs, total)| debug!("Found {:?} files (total: {})", files.len(), total))
.map(|(tagged_files, photo_libraries, total_count)| {
info!(
"Found {:?} tagged files: {:?}",
tagged_files.len(),
tagged_files
);
let returned_count = tagged_files.len() as i64;
let offset = req.offset.unwrap_or(0);
let pagination_metadata = if req.limit.is_some() {
(
Some(total_count),
Some(offset + returned_count < total_count),
if offset + returned_count < total_count {
Some(offset + returned_count)
} else {
None
},
)
} else {
(None, None, None)
};
span_context
.span()
.set_attribute(KeyValue::new("file_count", tagged_files.len().to_string()));
span_context
.span()
.set_attribute(KeyValue::new("total_count", total_count.to_string()));
span_context.span().set_status(Status::Ok);
HttpResponse::Ok().json(PhotosResponse {
photos: tagged_files,
dirs: vec![],
photo_libraries,
total_count: pagination_metadata.0,
has_more: pagination_metadata.1,
next_offset: pagination_metadata.2,
})
})
.into_http_internal_err()
.unwrap_or_else(|e| e.error_response());
}
// In scoped mode `libraries_to_scan` has one entry (the selected library);
// in union mode we enumerate every configured library and intermix results.
//
// Recursive mode pulls rel_paths from image_exif (kept in parity with disk
// by the watcher's full-scan reconciliation) instead of walking — a ~10k
// file library drops from multi-second to ~10ms for the listing itself.
// Non-recursive mode still walks because we need directory metadata for
// the `dirs` response and listing a single directory is cheap.
let mut file_names: Vec<String> = Vec::new();
let mut file_libraries: Vec<i32> = Vec::new();
let mut dirs_set: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut any_library_resolved = false;
if search_recursively {
let start_db_list = std::time::Instant::now();
let lib_ids: Vec<i32> = libraries_to_scan.iter().map(|l| l.id).collect();
let trimmed = search_path.trim();
let prefix = if trimmed.is_empty() || trimmed == "/" {
None
} else {
Some(trimmed)
};
let rows = {
let mut dao = exif_dao.lock().expect("Unable to get ExifDao");
dao.list_rel_paths_for_libraries(&span_context, &lib_ids, prefix)
.unwrap_or_else(|e| {
warn!("list_rel_paths_for_libraries failed: {:?}", e);
Vec::new()
})
};
info!(
"DB-backed recursive listing: {} files across {} libraries in {:?}",
rows.len(),
lib_ids.len(),
start_db_list.elapsed()
);
any_library_resolved = true;
for (lib_id, path) in rows {
file_libraries.push(lib_id);
file_names.push(path);
}
} else {
for lib in &libraries_to_scan {
let files_result = if lib.id == app_state.primary_library().id {
file_system.get_files_for_path(search_path)
} else {
is_valid_full_path(
&PathBuf::from(&lib.root_path),
&PathBuf::from(search_path),
false,
)
.map(|path| {
debug!("Valid path for non-recursive search: {:?}", path);
list_files(&path).unwrap_or_default()
})
.context("Invalid path")
};
let files = match files_result {
Ok(f) => {
any_library_resolved = true;
f
}
Err(e) => {
debug!(
"Skipping library '{}' for path '{}': {:?}",
lib.name, search_path, e
);
continue;
}
};
info!(
"Found {:?} files in library '{}' path: {:?} (recursive: {})",
files.len(),
lib.name,
search_path,
search_recursively
);
for path in &files {
match path.metadata() {
Ok(md) => {
let relative = path.strip_prefix(&lib.root_path).unwrap_or_else(|_| {
panic!(
"Unable to strip library root {} from file path {}",
&lib.root_path,
path.display()
)
});
// Normalize separators to '/' so downstream lookups
// (tags, EXIF, insights) that store rel_paths with
// forward slashes still match on Windows.
let relative_str = relative.to_str().unwrap().replace('\\', "/");
if md.is_file() {
file_names.push(relative_str);
file_libraries.push(lib.id);
} else if md.is_dir() {
dirs_set.insert(relative_str);
}
}
Err(e) => {
error!("Failed getting file metadata: {:?}", e);
// Include files without metadata if they have extensions
if path.extension().is_some() {
let relative = path.strip_prefix(&lib.root_path).unwrap_or_else(|_| {
panic!(
"Unable to strip library root {} from file path {}",
&lib.root_path,
path.display()
)
});
file_names.push(relative.to_str().unwrap().replace('\\', "/"));
file_libraries.push(lib.id);
}
}
}
}
}
}
if !any_library_resolved {
error!("Bad photos request: {}", req.path);
span_context
.span()
.set_status(Status::error("Invalid path"));
return HttpResponse::BadRequest().finish();
}
let dirs: Vec<String> = dirs_set.into_iter().collect();
info!(
"Starting to filter {} files from filesystem",
file_names.len()
);
let start_filter = std::time::Instant::now();
info!(
"File filtering took {:?}, now fetching tag counts for {} files",
start_filter.elapsed(),
file_names.len()
);
let start_tags = std::time::Instant::now();
// Batch query for tag counts (tags are library-agnostic / keyed by rel_path).
let tag_counts = {
let mut tag_dao_guard = tag_dao.lock().expect("Unable to get TagDao");
tag_dao_guard
.get_tag_counts_batch(&span_context, &file_names)
.unwrap_or_default()
};
info!("Batch tag count query took {:?}", start_tags.elapsed());
let start_tag_filter = std::time::Instant::now();
let file_tags_map: std::collections::HashMap<String, Vec<crate::tags::Tag>> =
if req.tag_ids.is_some() || req.exclude_tag_ids.is_some() {
info!(
"Tag filtering requested, fetching full tag lists for {} files",
file_names.len()
);
let mut tag_dao_guard = tag_dao.lock().expect("Unable to get TagDao");
file_names
.iter()
.filter_map(|file_name| {
tag_dao_guard
.get_tags_for_path(&span_context, file_name)
.ok()
.map(|tags| (file_name.clone(), tags))
})
.collect()
} else {
std::collections::HashMap::new()
};
if req.tag_ids.is_some() || req.exclude_tag_ids.is_some() {
info!("Full tag list fetch took {:?}", start_tag_filter.elapsed());
}
// Filter + pair with the parallel library_id while preserving ordering
// so the downstream sort can return both arrays in lockstep.
let photos_with_libs: Vec<(FileWithTagCount, i32)> = file_names
.into_iter()
.zip(file_libraries.into_iter())
.filter_map(|(file_name, lib_id)| {
let file_tags = file_tags_map.get(&file_name).cloned().unwrap_or_default();
if let Some(tag_ids_csv) = &req.tag_ids {
let tag_ids = tag_ids_csv
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let excluded_tag_ids = req
.exclude_tag_ids
.clone()
.unwrap_or_default()
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let filter_mode = req.tag_filter_mode.unwrap_or(FilterMode::Any);
let excluded = file_tags.iter().any(|t| excluded_tag_ids.contains(&t.id));
let keep = !excluded
&& match filter_mode {
FilterMode::Any => file_tags.iter().any(|t| tag_ids.contains(&t.id)),
FilterMode::All => tag_ids
.iter()
.all(|id| file_tags.iter().any(|tag| &tag.id == id)),
};
if !keep {
return None;
}
}
if let Some(ref exif_files) = exif_matched_files
&& !exif_files.contains(&file_name)
{
return None;
}
if let Some(ref media_type) = req.media_type {
let path = PathBuf::from(&file_name);
if !matches_media_type(&path, media_type) {
return None;
}
}
let tag_count = *tag_counts.get(&file_name).unwrap_or(&0);
Some((
FileWithTagCount {
file_name,
tag_count,
},
lib_id,
))
})
.collect();
info!(
"After all filters, {} files remain (filtering took {:?})",
photos_with_libs.len(),
start_filter.elapsed()
);
// Extract pagination parameters
let limit = req.limit;
let offset = req.offset.unwrap_or(0);
let start_sort = std::time::Instant::now();
let (photos, file_libs_sorted_input): (Vec<FileWithTagCount>, Vec<i32>) =
photos_with_libs.into_iter().unzip();
let (response_files, response_libraries, total_count) = if let Some(sort_type) = req.sort {
info!("Sorting {} files by {:?}", photos.len(), sort_type);
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
let result = apply_sorting_with_exif(
photos,
file_libs_sorted_input,
sort_type,
&mut exif_dao_guard,
&span_context,
&app_state.libraries,
limit,
offset,
);
drop(exif_dao_guard);
result
} else {
// No sorting requested - apply pagination if requested
let total = photos.len() as i64;
let (paged_files, paged_libs): (Vec<String>, Vec<i32>) = if let Some(limit_val) = limit {
photos
.into_iter()
.zip(file_libs_sorted_input)
.skip(offset as usize)
.take(limit_val as usize)
.map(|(f, lib)| (f.file_name, lib))
.unzip()
} else {
photos
.into_iter()
.zip(file_libs_sorted_input)
.map(|(f, lib)| (f.file_name, lib))
.unzip()
};
(paged_files, paged_libs, total)
};
info!(
"Sorting took {:?}, returned {} files (total: {})",
start_sort.elapsed(),
response_files.len(),
total_count
);
let returned_count = response_files.len() as i64;
let pagination_metadata = if limit.is_some() {
(
Some(total_count),
Some(offset + returned_count < total_count),
if offset + returned_count < total_count {
Some(offset + returned_count)
} else {
None
},
)
} else {
(None, None, None)
};
span_context.span().set_attribute(KeyValue::new(
"file_count",
response_files.len().to_string(),
));
span_context
.span()
.set_attribute(KeyValue::new("returned_count", returned_count.to_string()));
span_context
.span()
.set_attribute(KeyValue::new("total_count", total_count.to_string()));
span_context.span().set_status(Status::Ok);
HttpResponse::Ok().json(PhotosResponse {
photos: response_files,
dirs,
photo_libraries: response_libraries,
total_count: pagination_metadata.0,
has_more: pagination_metadata.1,
next_offset: pagination_metadata.2,
})
}
fn sort(
files: Vec<FileWithTagCount>,
file_libraries: Vec<i32>,
sort_type: SortType,
) -> (Vec<String>, Vec<i32>) {
let mut paired: Vec<(FileWithTagCount, i32)> = files.into_iter().zip(file_libraries).collect();
match sort_type {
SortType::Shuffle => paired.shuffle(&mut thread_rng()),
NameAsc => paired.sort_by(|l, r| l.0.file_name.cmp(&r.0.file_name)),
SortType::NameDesc => paired.sort_by(|l, r| r.0.file_name.cmp(&l.0.file_name)),
SortType::TagCountAsc => paired.sort_by(|l, r| l.0.tag_count.cmp(&r.0.tag_count)),
SortType::TagCountDesc => paired.sort_by(|l, r| r.0.tag_count.cmp(&l.0.tag_count)),
SortType::DateTakenAsc | SortType::DateTakenDesc => {
warn!("Date sorting not implemented for FileWithTagCount");
paired.sort_by(|l, r| l.0.file_name.cmp(&r.0.file_name));
}
}
paired
.into_iter()
.map(|(f, lib)| (f.file_name, lib))
.unzip()
}
/// Sort files with metadata support (including date sorting)
fn sort_with_metadata(
mut files: Vec<FileWithMetadata>,
sort_type: SortType,
) -> (Vec<String>, Vec<i32>) {
match sort_type {
SortType::Shuffle => files.shuffle(&mut thread_rng()),
NameAsc => {
files.sort_by(|l, r| l.file_name.cmp(&r.file_name));
}
SortType::NameDesc => {
files.sort_by(|l, r| r.file_name.cmp(&l.file_name));
}
SortType::TagCountAsc => {
files.sort_by(|l, r| l.tag_count.cmp(&r.tag_count));
}
SortType::TagCountDesc => {
files.sort_by(|l, r| r.tag_count.cmp(&l.tag_count));
}
SortType::DateTakenAsc | SortType::DateTakenDesc => {
files.sort_by(|l, r| {
match (l.date_taken, r.date_taken) {
(Some(a), Some(b)) => {
if sort_type == SortType::DateTakenAsc {
a.cmp(&b)
} else {
b.cmp(&a)
}
}
(Some(_), None) => std::cmp::Ordering::Less, // Dated photos first
(None, Some(_)) => std::cmp::Ordering::Greater,
(None, None) => l.file_name.cmp(&r.file_name), // Fallback to name
}
});
}
}
files
.into_iter()
.map(|f| (f.file_name, f.library_id))
.unzip()
}
pub fn list_files(dir: &Path) -> io::Result<Vec<PathBuf>> {
let tracer = global_tracer();
let mut span = tracer.start("list_files");
let dir_name_string = dir.to_str().unwrap_or_default().to_string();
span.set_attribute(KeyValue::new("dir", dir_name_string));
info!("Listing files in: {:?}", dir);
let files = read_dir(dir)?
.filter_map(|res| res.ok())
.filter(|entry| is_image_or_video(&entry.path()) || entry.file_type().unwrap().is_dir())
.map(|entry| entry.path())
.collect::<Vec<PathBuf>>();
span.set_attribute(KeyValue::new("file_count", files.len().to_string()));
span.set_status(Status::Ok);
info!("Found {:?} files in directory: {:?}", files.len(), dir);
Ok(files)
}
pub fn is_image_or_video(path: &Path) -> bool {
file_types::is_media_file(path)
}
/// Check if a file matches the media type filter
fn matches_media_type(path: &Path, media_type: &MediaType) -> bool {
let result = match media_type {
MediaType::All => file_types::is_image_file(path) || file_types::is_video_file(path),
MediaType::Photo => file_types::is_image_file(path),
MediaType::Video => file_types::is_video_file(path),
};
let extension = path
.extension()
.and_then(|p| p.to_str())
.map_or(String::from(""), |p| p.to_lowercase());
debug!(
"Media type check: path={:?}, extension='{}', type={:?}, match={}",
path, extension, media_type, result
);
result
}
pub fn is_valid_full_path<P: AsRef<Path> + Debug + AsRef<std::ffi::OsStr>>(
base: &P,
path: &P,
new_file: bool,
) -> Option<PathBuf> {
trace!("is_valid_full_path => Base: {:?}. Path: {:?}", base, path);
let path = PathBuf::from(&path);
let mut path = if path.is_relative() {
let mut full_path = PathBuf::new();
full_path.push(base);
full_path.push(&path);
full_path
} else {
path
};
match is_path_above_base_dir(base, &mut path, new_file) {
Ok(path) => Some(path),
Err(PathValidationError::DoesNotExist(p)) => {
debug!("Path does not exist under base {:?}: {:?}", base, p);
None
}
Err(PathValidationError::AboveBase(p)) => {
error!("Path above base directory {:?}: {:?}", base, p);
None
}
Err(PathValidationError::Other(e)) => {
error!("{}", e);
None
}
}
}
#[derive(Debug)]
enum PathValidationError {
DoesNotExist(PathBuf),
AboveBase(PathBuf),
Other(anyhow::Error),
}
impl std::fmt::Display for PathValidationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PathValidationError::DoesNotExist(p) => write!(f, "Path does not exist: {:?}", p),
PathValidationError::AboveBase(p) => write!(f, "Path above base directory: {:?}", p),
PathValidationError::Other(e) => write!(f, "{}", e),
}
}
}
fn is_path_above_base_dir<P: AsRef<Path> + Debug>(
base: P,
full_path: &mut PathBuf,
new_file: bool,
) -> Result<PathBuf, PathValidationError> {
match full_path.absolutize() {
Err(e) => Err(PathValidationError::Other(
anyhow::Error::new(e)
.context(format!("Unable to resolve absolute path: {:?}", full_path)),
)),
Ok(p) => {
if p.starts_with(base) && (new_file || p.exists()) {
Ok(p.into_owned())
} else if !p.exists() {
Err(PathValidationError::DoesNotExist(p.into_owned()))
} else {
Err(PathValidationError::AboveBase(p.into_owned()))
}
}
}
}
/// Handler for GPS summary endpoint
/// Returns lightweight GPS data for all photos with coordinates
pub async fn get_gps_summary(
_: Claims,
request: HttpRequest,
req: Query<FilesRequest>,
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
app_state: Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
use crate::data::{GpsPhotoSummary, GpsPhotosResponse};
let parent_cx = extract_context_from_request(&request);
let tracer = global_tracer();
let mut span = tracer
.span_builder("get_gps_summary")
.start_with_context(&tracer, &parent_cx);
span.set_attribute(KeyValue::new("path", req.path.clone()));
span.set_attribute(KeyValue::new(
"recursive",
req.recursive.unwrap_or(false).to_string(),
));
let cx = opentelemetry::Context::current_with_span(span);
// The database stores relative paths, so we use the path as-is.
// Normalize empty path or "/" to return all GPS photos. Validation
// is purely a traversal guard — the path need not exist on disk
// under any particular library, because the DAO just does a prefix
// match against image_exif.rel_path (which is library-agnostic for
// this summary query).
let requested_path = if req.path.is_empty() || req.path == "/" {
String::new()
} else {
let req_path = PathBuf::from(&req.path);
let validated = app_state.libraries.iter().any(|lib| {
is_valid_full_path(&PathBuf::from(&lib.root_path), &req_path, true).is_some()
});
if !validated {
warn!("Invalid path for GPS summary: {}", req.path);
cx.span().set_status(Status::error("Invalid path"));
return Ok(HttpResponse::BadRequest().json(serde_json::json!({
"error": "Invalid path"
})));
}
req.path.clone()
};
let recursive = req.recursive.unwrap_or(false);
info!(
"Fetching GPS photos for path='{}' recursive={}",
requested_path, recursive
);
// Query database for all photos with GPS
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
match exif_dao_guard.get_all_with_gps(&cx, &requested_path, recursive) {
Ok(gps_data) => {
let mut photos: Vec<GpsPhotoSummary> = gps_data
.into_iter()
.map(|(path, lat, lon, date_taken)| GpsPhotoSummary {
path,
lat,
lon,
date_taken,
})
.collect();
// Sort by date_taken based on request, defaulting to ascending (oldest to newest)
use crate::data::SortType;
let sort_type = req.sort.unwrap_or(SortType::DateTakenAsc);
match sort_type {
SortType::DateTakenDesc => {
photos.sort_by(|a, b| match (a.date_taken, b.date_taken) {
(Some(date_a), Some(date_b)) => date_b.cmp(&date_a),
(Some(_), None) => std::cmp::Ordering::Less,
(None, Some(_)) => std::cmp::Ordering::Greater,
(None, None) => a.path.cmp(&b.path),
});
}
NameAsc => {
photos.sort_by(|a, b| a.path.cmp(&b.path));
}
SortType::NameDesc => {
photos.sort_by(|a, b| b.path.cmp(&a.path));
}
_ => {
// Default: DateTakenAsc
photos.sort_by(|a, b| match (a.date_taken, b.date_taken) {
(Some(date_a), Some(date_b)) => date_a.cmp(&date_b),
(Some(_), None) => std::cmp::Ordering::Less,
(None, Some(_)) => std::cmp::Ordering::Greater,
(None, None) => a.path.cmp(&b.path),
});
}
}
let total = photos.len();
cx.span()
.set_attribute(KeyValue::new("result_count", total as i64));
cx.span().set_status(Status::Ok);
Ok(HttpResponse::Ok().json(GpsPhotosResponse { photos, total }))
}
Err(e) => {
error!("Error querying GPS data: {:?}", e);
cx.span()
.set_status(Status::error(format!("Database error: {:?}", e)));
Ok(HttpResponse::InternalServerError().json(serde_json::json!({
"error": "Failed to query GPS data"
})))
}
}
}
/// Handler for the batch EXIF endpoint at `GET /photos/exif`.
///
/// Returns a single-row projection of `image_exif` for every photo whose
/// `date_taken` falls in `[date_from, date_to]`, across all libraries.
/// Designed to replace the N+1 pattern of `/photos` + per-file
/// `/image/metadata` for window-scoped consumers like Apollo's photo-to-
/// track matcher: one DB query, one HTTP round-trip, one mutex acquisition.
///
/// Photos with no `date_taken` are excluded by construction (the underlying
/// `query_by_exif` filter requires a non-null timestamp once a range is
/// supplied). Filename-extracted dates are not synthesized here; if a
/// caller needs that fallback, fetch the row separately via
/// `/image/metadata` (rare path).
pub async fn list_exif_summary(
_: Claims,
request: HttpRequest,
req: Query<ExifBatchRequest>,
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
app_state: Data<AppState>,
) -> Result<HttpResponse, actix_web::Error> {
let parent_cx = extract_context_from_request(&request);
let tracer = global_tracer();
let mut span = tracer
.span_builder("list_exif_summary")
.start_with_context(&tracer, &parent_cx);
span.set_attribute(KeyValue::new(
"date_from",
req.date_from.map(|v| v.to_string()).unwrap_or_default(),
));
span.set_attribute(KeyValue::new(
"date_to",
req.date_to.map(|v| v.to_string()).unwrap_or_default(),
));
span.set_attribute(KeyValue::new(
"library",
req.library.clone().unwrap_or_default(),
));
// Resolve the library filter up front so a bad id/name 400s before we
// ever take the DAO mutex. None == union across all libraries.
let library_filter =
match crate::libraries::resolve_library_param(&app_state, req.library.as_deref()) {
Ok(lib) => lib.map(|l| l.id),
Err(msg) => {
span.set_status(Status::error(msg.clone()));
return Ok(HttpResponse::BadRequest().json(serde_json::json!({ "error": msg })));
}
};
let cx = opentelemetry::Context::current_with_span(span);
// Pre-build an id → name map so we don't linear-scan libraries per row.
let library_names: std::collections::HashMap<i32, String> = app_state
.libraries
.iter()
.map(|lib| (lib.id, lib.name.clone()))
.collect();
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
match exif_dao_guard.query_by_exif(&cx, None, None, None, None, req.date_from, req.date_to) {
Ok(rows) => {
let photos: Vec<ExifSummary> = rows
.into_iter()
// Library filter post-query: keeps the DAO trait (and its
// mocks) unchanged. For typical 23 library setups the in-
// memory pass over a date-bounded result set is negligible;
// can be pushed into SQL later if it ever isn't.
.filter(|r| library_filter.is_none_or(|id| r.library_id == id))
.map(|r| ExifSummary {
library_name: library_names.get(&r.library_id).cloned(),
file_path: r.file_path,
library_id: r.library_id,
camera_model: r.camera_model,
width: r.width,
height: r.height,
// image_exif stores GPS as f32 to keep row size small;
// widen for the JSON shape so clients don't need to
// know about the on-disk precision.
gps_latitude: r.gps_latitude.map(f64::from),
gps_longitude: r.gps_longitude.map(f64::from),
date_taken: r.date_taken,
})
.collect();
let total = photos.len();
cx.span()
.set_attribute(KeyValue::new("result_count", total as i64));
cx.span().set_status(Status::Ok);
Ok(HttpResponse::Ok().json(ExifBatchResponse { photos, total }))
}
Err(e) => {
error!("Error querying EXIF batch: {:?}", e);
cx.span()
.set_status(Status::error(format!("Database error: {:?}", e)));
Ok(HttpResponse::InternalServerError().json(serde_json::json!({
"error": "Failed to query EXIF data"
})))
}
}
}
pub async fn move_file<FS: FileSystemAccess>(
_: Claims,
file_system: Data<FS>,
app_state: Data<AppState>,
request: web::Json<MoveFileRequest>,
) -> HttpResponse {
info!("Moving file: {:?}", request);
match is_valid_full_path(&app_state.base_path, &request.source, false)
.ok_or(ErrorKind::InvalidData)
.and_then(|source| {
is_valid_full_path(&app_state.base_path, &request.destination, true)
.ok_or(ErrorKind::InvalidData)
.and_then(|dest| {
if dest.exists() {
error!("Destination already exists, not moving file: {:?}", source);
Err(ErrorKind::AlreadyExists)
} else {
Ok(dest)
}
})
.map(|dest| (source, dest))
})
.map(|(source, dest)| file_system.move_file(source, dest))
{
Ok(_) => {
info!("Moved file: {} -> {}", request.source, request.destination,);
app_state.stream_manager.do_send(RefreshThumbnailsMessage);
HttpResponse::Ok().finish()
}
Err(e) => {
error!(
"Error moving file: {} to: {}. {}",
request.source, request.destination, e
);
if e == ErrorKind::InvalidData {
HttpResponse::BadRequest().finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
}
}
#[derive(Deserialize, Debug)]
pub struct MoveFileRequest {
source: String,
destination: String,
}
pub trait FileSystemAccess {
fn get_files_for_path(&self, path: &str) -> anyhow::Result<Vec<PathBuf>>;
fn move_file<P: AsRef<Path>>(&self, from: P, destination: P) -> anyhow::Result<()>;
}
pub struct RealFileSystem {
base_path: String,
}
impl RealFileSystem {
#[allow(dead_code)] // Used in main.rs binary and tests
pub(crate) fn new(base_path: String) -> RealFileSystem {
RealFileSystem { base_path }
}
}
impl FileSystemAccess for RealFileSystem {
fn get_files_for_path(&self, path: &str) -> anyhow::Result<Vec<PathBuf>> {
is_valid_full_path(&PathBuf::from(&self.base_path), &PathBuf::from(path), false)
.map(|path| {
debug!("Valid path: {:?}", path);
list_files(&path).unwrap_or_default()
})
.context("Invalid path")
}
fn move_file<P: AsRef<Path>>(&self, from: P, destination: P) -> anyhow::Result<()> {
info!(
"Moving file: '{:?}' -> '{:?}'",
from.as_ref(),
destination.as_ref()
);
let name = from
.as_ref()
.file_name()
.map(|n| n.to_str().unwrap_or_default().to_string())
.unwrap_or_default();
std::fs::rename(from, destination)
.with_context(|| format!("Failed to move file: {:?}", name))
}
}
pub struct RefreshThumbnailsMessage;
impl Message for RefreshThumbnailsMessage {
type Result = ();
}
impl Handler<RefreshThumbnailsMessage> for StreamActor {
type Result = ();
fn handle(&mut self, _msg: RefreshThumbnailsMessage, _ctx: &mut Self::Context) -> Self::Result {
let tracer = global_tracer();
let _ = tracer.start("RefreshThumbnailsMessage");
info!("Refreshing thumbnails after upload");
// The stub in lib.rs is a no-op; the real generation is driven by
// the file watcher tick in main.rs, which has access to the
// configured libraries.
create_thumbnails(&[])
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::DbError;
use ::anyhow::anyhow;
use std::collections::HashMap;
use std::env;
use std::fs::File;
struct FakeFileSystem {
files: HashMap<String, Vec<String>>,
base_path: String,
err: bool,
}
impl FakeFileSystem {
#[allow(dead_code)]
fn with_error() -> FakeFileSystem {
FakeFileSystem {
files: HashMap::new(),
base_path: String::new(),
err: true,
}
}
fn new(files: HashMap<String, Vec<String>>) -> FakeFileSystem {
// Use temp dir as base path for consistency
let base_path = env::temp_dir();
FakeFileSystem {
files,
base_path: base_path.to_str().unwrap().to_string(),
err: false,
}
}
}
impl FileSystemAccess for FakeFileSystem {
fn get_files_for_path(&self, path: &str) -> anyhow::Result<Vec<PathBuf>> {
if self.err {
Err(anyhow!("Error for test"))
} else if let Some(files) = self.files.get(path) {
// Prepend base_path to all returned files
Ok(files
.iter()
.map(|f| PathBuf::from(&self.base_path).join(f))
.collect::<Vec<PathBuf>>())
} else {
Ok(Vec::new())
}
}
fn move_file<P: AsRef<Path>>(&self, _from: P, _destination: P) -> anyhow::Result<()> {
todo!()
}
}
struct MockExifDao;
impl ExifDao for MockExifDao {
fn store_exif(
&mut self,
_context: &opentelemetry::Context,
data: crate::database::models::InsertImageExif,
) -> Result<crate::database::models::ImageExif, DbError> {
// Return a dummy ImageExif for tests
Ok(crate::database::models::ImageExif {
id: 1,
library_id: data.library_id,
file_path: data.file_path.to_string(),
camera_make: data.camera_make.map(|s| s.to_string()),
camera_model: data.camera_model.map(|s| s.to_string()),
lens_model: data.lens_model.map(|s| s.to_string()),
width: data.width,
height: data.height,
orientation: data.orientation,
gps_latitude: data.gps_latitude,
gps_longitude: data.gps_longitude,
gps_altitude: data.gps_altitude,
focal_length: data.focal_length,
aperture: data.aperture,
shutter_speed: data.shutter_speed,
iso: data.iso,
date_taken: data.date_taken,
created_time: data.created_time,
last_modified: data.last_modified,
content_hash: data.content_hash.clone(),
size_bytes: data.size_bytes,
})
}
fn get_exif(
&mut self,
_context: &opentelemetry::Context,
_: &str,
) -> Result<Option<crate::database::models::ImageExif>, DbError> {
Ok(None)
}
fn update_exif(
&mut self,
_context: &opentelemetry::Context,
data: crate::database::models::InsertImageExif,
) -> Result<crate::database::models::ImageExif, DbError> {
// Return a dummy ImageExif for tests
Ok(crate::database::models::ImageExif {
id: 1,
library_id: data.library_id,
file_path: data.file_path.to_string(),
camera_make: data.camera_make.map(|s| s.to_string()),
camera_model: data.camera_model.map(|s| s.to_string()),
lens_model: data.lens_model.map(|s| s.to_string()),
width: data.width,
height: data.height,
orientation: data.orientation,
gps_latitude: data.gps_latitude,
gps_longitude: data.gps_longitude,
gps_altitude: data.gps_altitude,
focal_length: data.focal_length,
aperture: data.aperture,
shutter_speed: data.shutter_speed,
iso: data.iso,
date_taken: data.date_taken,
created_time: data.created_time,
last_modified: data.last_modified,
content_hash: data.content_hash.clone(),
size_bytes: data.size_bytes,
})
}
fn delete_exif(
&mut self,
_context: &opentelemetry::Context,
_: &str,
) -> Result<(), DbError> {
Ok(())
}
fn get_all_with_date_taken(
&mut self,
_context: &opentelemetry::Context,
_library_id: Option<i32>,
) -> Result<Vec<(String, i64)>, DbError> {
Ok(Vec::new())
}
fn get_exif_batch(
&mut self,
_context: &opentelemetry::Context,
_: &[String],
) -> Result<Vec<crate::database::models::ImageExif>, DbError> {
Ok(Vec::new())
}
fn query_by_exif(
&mut self,
_context: &opentelemetry::Context,
_: Option<&str>,
_: Option<&str>,
_: Option<&str>,
_: Option<(f64, f64, f64, f64)>,
_: Option<i64>,
_: Option<i64>,
) -> Result<Vec<crate::database::models::ImageExif>, DbError> {
Ok(Vec::new())
}
fn get_camera_makes(
&mut self,
_context: &opentelemetry::Context,
) -> Result<Vec<(String, i64)>, DbError> {
Ok(Vec::new())
}
fn update_file_path(
&mut self,
_context: &opentelemetry::Context,
_old_path: &str,
_new_path: &str,
) -> Result<(), DbError> {
Ok(())
}
fn get_all_file_paths(
&mut self,
_context: &opentelemetry::Context,
) -> Result<Vec<String>, DbError> {
Ok(Vec::new())
}
fn get_all_with_gps(
&mut self,
_context: &opentelemetry::Context,
_base_path: &str,
_recursive: bool,
) -> Result<Vec<(String, f64, f64, Option<i64>)>, DbError> {
todo!()
}
fn get_rows_missing_hash(
&mut self,
_context: &opentelemetry::Context,
_limit: i64,
) -> Result<Vec<(i32, String)>, DbError> {
Ok(Vec::new())
}
fn backfill_content_hash(
&mut self,
_context: &opentelemetry::Context,
_library_id: i32,
_rel_path: &str,
_hash: &str,
_size_bytes: i64,
) -> Result<(), DbError> {
Ok(())
}
fn find_by_content_hash(
&mut self,
_context: &opentelemetry::Context,
_hash: &str,
) -> Result<Option<crate::database::models::ImageExif>, DbError> {
Ok(None)
}
fn get_rel_paths_sharing_content(
&mut self,
_context: &opentelemetry::Context,
_library_id: i32,
rel_path: &str,
) -> Result<Vec<String>, DbError> {
Ok(vec![rel_path.to_string()])
}
fn get_rel_paths_for_library(
&mut self,
_context: &opentelemetry::Context,
_library_id: i32,
) -> Result<Vec<String>, DbError> {
Ok(vec![])
}
fn find_content_hash_anywhere(
&mut self,
_context: &opentelemetry::Context,
_rel_path: &str,
) -> Result<Option<String>, DbError> {
Ok(None)
}
fn get_rel_paths_by_hash(
&mut self,
_context: &opentelemetry::Context,
_hash: &str,
) -> Result<Vec<String>, DbError> {
Ok(vec![])
}
fn list_rel_paths_for_libraries(
&mut self,
_context: &opentelemetry::Context,
_library_ids: &[i32],
_path_prefix: Option<&str>,
) -> Result<Vec<(i32, String)>, DbError> {
Ok(vec![])
}
fn delete_exif_by_library(
&mut self,
_context: &opentelemetry::Context,
_library_id: i32,
_rel_path: &str,
) -> Result<(), DbError> {
Ok(())
}
}
mod api {
use super::*;
use actix_web::{HttpResponse, web::Query};
use crate::{
AppState,
data::{Claims, PhotosResponse},
testhelpers::BodyReader,
};
use crate::database::test::in_memory_db_connection;
use crate::tags::SqliteTagDao;
use actix_web::test::TestRequest;
use actix_web::web::Data;
use std::fs;
fn setup() {
let _ = env_logger::builder().is_test(true).try_init();
}
#[actix_rt::test]
async fn test_list_photos() {
setup();
let claims = Claims {
sub: String::from("1"),
exp: 12345,
};
let request: Query<FilesRequest> = Query::from_query("path=").unwrap();
// Create AppState with the same base_path as RealFileSystem
let test_state = AppState::test_state();
// Create a dedicated test directory to avoid interference from other files in system temp
let test_base = PathBuf::from(test_state.base_path.clone());
let mut test_dir = test_base.clone();
test_dir.push("test-dir");
fs::create_dir_all(&test_dir).unwrap();
let mut photo_path = test_base.clone();
photo_path.push("photo.jpg");
File::create(&photo_path).unwrap();
let response: HttpResponse = list_photos(
claims,
TestRequest::default().to_http_request(),
request,
Data::new(test_state),
Data::new(RealFileSystem::new(test_base.to_str().unwrap().to_string())),
Data::new(Mutex::new(SqliteTagDao::default())),
Data::new(Mutex::new(Box::new(MockExifDao) as Box<dyn ExifDao>)),
)
.await;
let status = response.status();
assert_eq!(status, 200);
let body: PhotosResponse = serde_json::from_str(&response.read_to_str()).unwrap();
debug!("{:?}", body);
assert!(body.photos.contains(&String::from("photo.jpg")));
assert!(body.dirs.contains(&String::from("test-dir")));
assert!(
body.photos
.iter()
.filter(|filename| !filename.ends_with(".png")
&& !filename.ends_with(".jpg")
&& !filename.ends_with(".jpeg"))
.collect::<Vec<&String>>()
.is_empty()
);
// Cleanup
let _ = fs::remove_dir_all(test_base);
}
#[actix_rt::test]
async fn test_list_below_base_fails_400() {
setup();
let claims = Claims {
sub: String::from("1"),
exp: 12345,
};
let request: Query<FilesRequest> = Query::from_query("path=..").unwrap();
let temp_dir = env::temp_dir();
let response = list_photos(
claims,
TestRequest::default().to_http_request(),
request,
Data::new(AppState::test_state()),
Data::new(RealFileSystem::new(temp_dir.to_str().unwrap().to_string())),
Data::new(Mutex::new(SqliteTagDao::default())),
Data::new(Mutex::new(Box::new(MockExifDao) as Box<dyn ExifDao>)),
)
.await;
assert_eq!(response.status(), 400);
}
#[actix_rt::test]
async fn get_files_with_tag_any_filter() {
setup();
let claims = Claims {
sub: String::from("1"),
exp: 12345,
};
let request: Query<FilesRequest> =
Query::from_query("path=&tag_ids=1,3&recursive=true").unwrap();
let mut tag_dao =
SqliteTagDao::new(std::sync::Arc::new(Mutex::new(in_memory_db_connection())));
let tag1 = tag_dao
.create_tag(&opentelemetry::Context::current(), "tag1")
.unwrap();
let _tag2 = tag_dao
.create_tag(&opentelemetry::Context::current(), "tag2")
.unwrap();
let tag3 = tag_dao
.create_tag(&opentelemetry::Context::current(), "tag3")
.unwrap();
let _ = &tag_dao
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag1.id)
.unwrap();
let _ = &tag_dao
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag3.id)
.unwrap();
let response: HttpResponse = list_photos(
claims,
TestRequest::default().to_http_request(),
request,
Data::new(AppState::test_state()),
Data::new(FakeFileSystem::new(HashMap::new())),
Data::new(Mutex::new(tag_dao)),
Data::new(Mutex::new(Box::new(MockExifDao) as Box<dyn ExifDao>)),
)
.await;
assert_eq!(200, response.status());
let body: PhotosResponse = serde_json::from_str(&response.read_to_str()).unwrap();
assert_eq!(1, body.photos.len());
assert!(body.photos.contains(&String::from("test.jpg")));
}
#[actix_rt::test]
async fn get_files_with_tag_all_filter() {
setup();
let claims = Claims {
sub: String::from("1"),
exp: 12345,
};
let mut tag_dao =
SqliteTagDao::new(std::sync::Arc::new(Mutex::new(in_memory_db_connection())));
let tag1 = tag_dao
.create_tag(&opentelemetry::Context::current(), "tag1")
.unwrap();
let _tag2 = tag_dao
.create_tag(&opentelemetry::Context::current(), "tag2")
.unwrap();
let tag3 = tag_dao
.create_tag(&opentelemetry::Context::current(), "tag3")
.unwrap();
let _ = &tag_dao
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag1.id)
.unwrap();
let _ = &tag_dao
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag3.id)
.unwrap();
// Should get filtered since it doesn't have tag3
tag_dao
.tag_file(
&opentelemetry::Context::current(),
"some-other.jpg",
tag1.id,
)
.unwrap();
let request: Query<FilesRequest> = Query::from_query(&format!(
"path=&tag_ids={},{}&tag_filter_mode=All&recursive=true",
tag1.id, tag3.id
))
.unwrap();
let response: HttpResponse = list_photos(
claims,
TestRequest::default().to_http_request(),
request,
Data::new(AppState::test_state()),
Data::new(FakeFileSystem::new(HashMap::new())),
Data::new(Mutex::new(tag_dao)),
Data::new(Mutex::new(Box::new(MockExifDao) as Box<dyn ExifDao>)),
)
.await;
assert_eq!(200, response.status());
let body: PhotosResponse = serde_json::from_str(&response.read_to_str()).unwrap();
assert_eq!(1, body.photos.len());
assert!(body.photos.contains(&String::from("test.jpg")));
}
}
#[test]
fn directory_traversal_test() {
let base = env::temp_dir();
assert_eq!(
None,
is_valid_full_path(&base, &PathBuf::from("../"), false)
);
assert_eq!(None, is_valid_full_path(&base, &PathBuf::from(".."), false));
assert_eq!(
None,
is_valid_full_path(&base, &PathBuf::from("fake/../../../"), false)
);
assert_eq!(
None,
is_valid_full_path(&base, &PathBuf::from("../../../etc/passwd"), false)
);
assert_eq!(
None,
is_valid_full_path(&base, &PathBuf::from("..//etc/passwd"), false)
);
assert_eq!(
None,
is_valid_full_path(&base, &PathBuf::from("../../etc/passwd"), false)
);
}
#[test]
fn build_from_path_relative_to_base_test() {
let base = env::temp_dir();
let mut test_file = PathBuf::from(&base);
test_file.push("test.png");
File::create(test_file).unwrap();
assert!(is_valid_full_path(&base, &PathBuf::from("test.png"), false).is_some());
}
#[test]
fn build_from_relative_returns_none_if_directory_does_not_exist_test() {
let base = env::temp_dir();
let path = "relative/path/test.png";
let mut test_file = PathBuf::from(&base);
test_file.push(path);
assert_eq!(None, is_valid_full_path(&base, &test_file, false));
}
#[test]
fn build_from_absolute_path_test() {
let base = env::temp_dir();
let mut test_file = PathBuf::from(&base);
test_file.push("test.png");
File::create(&test_file).unwrap();
assert!(is_valid_full_path(&base, &test_file, false).is_some());
assert_eq!(
Some(test_file.clone()),
is_valid_full_path(&base, &test_file, false)
);
}
macro_rules! extension_test {
($name:ident, $filename:literal) => {
#[test]
fn $name() {
assert!(is_image_or_video(Path::new($filename)));
}
};
}
extension_test!(valid_png, "image.png");
extension_test!(valid_png_mixed_case, "image.pNg");
extension_test!(valid_png_upper_case, "image.PNG");
extension_test!(valid_jpeg, "image.jpeg");
extension_test!(valid_jpeg_upper_case, "image.JPEG");
extension_test!(valid_jpg, "image.jpg");
extension_test!(valid_jpg_upper_case, "image.JPG");
extension_test!(valid_mp4, "image.mp4");
extension_test!(valid_mp4_mixed_case, "image.mP4");
extension_test!(valid_mp4_upper_case, "image.MP4");
extension_test!(valid_mov, "image.mov");
extension_test!(valid_mov_mixed_case, "image.mOV");
extension_test!(valid_mov_upper_case, "image.MOV");
extension_test!(valid_nef, "image.nef");
extension_test!(valid_nef_mixed_case, "image.nEF");
extension_test!(valid_nef_upper_case, "image.NEF");
#[test]
fn hidden_file_not_valid_test() {
assert!(!is_image_or_video(Path::new(".DS_store")));
}
}