Implement database-level sorting with composite indexes for efficient date and tag queries. Add pagination metadata support and optimize tag count queries using batch processing.
1578 lines
55 KiB
Rust
1578 lines
55 KiB
Rust
use ::anyhow;
|
|
use actix::{Handler, Message};
|
|
use anyhow::{Context, anyhow};
|
|
use std::collections::HashSet;
|
|
use std::fmt::Debug;
|
|
use std::fs::read_dir;
|
|
use std::io;
|
|
use std::io::ErrorKind;
|
|
use std::path::{Path, PathBuf};
|
|
use std::sync::Mutex;
|
|
use std::time::SystemTime;
|
|
|
|
use crate::data::{Claims, FilesRequest, FilterMode, MediaType, PhotosResponse, SortType};
|
|
use crate::database::ExifDao;
|
|
use crate::file_types;
|
|
use crate::geo::{gps_bounding_box, haversine_distance};
|
|
use crate::memories::extract_date_from_filename;
|
|
use crate::{AppState, create_thumbnails};
|
|
use actix_web::dev::ResourcePath;
|
|
use actix_web::web::Data;
|
|
use actix_web::{
|
|
HttpRequest, HttpResponse,
|
|
web::{self, Query},
|
|
};
|
|
use chrono::{DateTime, Utc};
|
|
use log::{debug, error, info, trace, warn};
|
|
use opentelemetry::KeyValue;
|
|
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
|
|
|
|
use crate::data::SortType::NameAsc;
|
|
use crate::error::IntoHttpError;
|
|
use crate::otel::{extract_context_from_request, global_tracer};
|
|
use crate::tags::{FileWithTagCount, TagDao};
|
|
use crate::video::actors::StreamActor;
|
|
use path_absolutize::*;
|
|
use rand::prelude::SliceRandom;
|
|
use rand::thread_rng;
|
|
|
|
/// File metadata for sorting and filtering
|
|
/// Includes tag count and optional date for date-based sorting
|
|
pub struct FileWithMetadata {
|
|
pub file_name: String,
|
|
pub tag_count: i64,
|
|
pub date_taken: Option<i64>, // Unix timestamp from EXIF or filename extraction
|
|
}
|
|
use serde::Deserialize;
|
|
|
|
/// Apply sorting to files with EXIF data support for date-based sorting
|
|
/// Handles both date sorting (with EXIF/filename fallback) and regular sorting
|
|
/// Returns (sorted_file_paths, total_count)
|
|
fn apply_sorting_with_exif(
|
|
files: Vec<FileWithTagCount>,
|
|
sort_type: SortType,
|
|
exif_dao: &mut Box<dyn ExifDao>,
|
|
span_context: &opentelemetry::Context,
|
|
base_path: &Path,
|
|
limit: Option<i64>,
|
|
offset: i64,
|
|
) -> (Vec<String>, i64) {
|
|
let total_count = files.len() as i64;
|
|
|
|
match sort_type {
|
|
SortType::DateTakenAsc | SortType::DateTakenDesc => {
|
|
info!("Date sorting requested, using database-level sorting");
|
|
|
|
// Collect file paths for batch EXIF query
|
|
let file_paths: Vec<String> = files.iter().map(|f| f.file_name.clone()).collect();
|
|
|
|
// Try database-level sorting first (most efficient)
|
|
let ascending = sort_type == SortType::DateTakenAsc;
|
|
match exif_dao.get_files_sorted_by_date(
|
|
span_context,
|
|
&file_paths,
|
|
ascending,
|
|
limit,
|
|
offset,
|
|
) {
|
|
Ok((sorted_files, db_total)) => {
|
|
info!(
|
|
"Database-level date sorting succeeded, returned {} files",
|
|
sorted_files.len()
|
|
);
|
|
(sorted_files, db_total)
|
|
}
|
|
Err(e) => {
|
|
warn!(
|
|
"Database-level sorting failed: {:?}, falling back to in-memory sort",
|
|
e
|
|
);
|
|
// Fallback to in-memory sorting with date extraction
|
|
let (sorted, _) = in_memory_date_sort(
|
|
files,
|
|
sort_type,
|
|
exif_dao,
|
|
span_context,
|
|
base_path,
|
|
limit,
|
|
offset,
|
|
);
|
|
(sorted, total_count)
|
|
}
|
|
}
|
|
}
|
|
_ => {
|
|
// Use regular sort for non-date sorting
|
|
let sorted = sort(files, sort_type);
|
|
let result = if let Some(limit_val) = limit {
|
|
sorted
|
|
.into_iter()
|
|
.skip(offset as usize)
|
|
.take(limit_val as usize)
|
|
.collect()
|
|
} else {
|
|
sorted
|
|
};
|
|
(result, total_count)
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Fallback in-memory date sorting with EXIF/filename extraction
|
|
fn in_memory_date_sort(
|
|
files: Vec<FileWithTagCount>,
|
|
sort_type: SortType,
|
|
exif_dao: &mut Box<dyn ExifDao>,
|
|
span_context: &opentelemetry::Context,
|
|
base_path: &Path,
|
|
limit: Option<i64>,
|
|
offset: i64,
|
|
) -> (Vec<String>, i64) {
|
|
let total_count = files.len() as i64;
|
|
let file_paths: Vec<String> = files.iter().map(|f| f.file_name.clone()).collect();
|
|
|
|
// Batch fetch EXIF data
|
|
let exif_map: std::collections::HashMap<String, i64> = exif_dao
|
|
.get_exif_batch(span_context, &file_paths)
|
|
.unwrap_or_default()
|
|
.into_iter()
|
|
.filter_map(|exif| exif.date_taken.map(|dt| (exif.file_path, dt)))
|
|
.collect();
|
|
|
|
// Convert to FileWithMetadata with date fallback logic
|
|
let files_with_metadata: Vec<FileWithMetadata> = files
|
|
.into_iter()
|
|
.map(|f| {
|
|
// Try EXIF date first
|
|
let date_taken = exif_map
|
|
.get(&f.file_name)
|
|
.copied()
|
|
.or_else(|| {
|
|
// Fallback to filename extraction
|
|
extract_date_from_filename(&f.file_name).map(|dt| dt.timestamp())
|
|
})
|
|
.or_else(|| {
|
|
// Fallback to filesystem metadata creation date
|
|
let full_path = base_path.join(&f.file_name);
|
|
std::fs::metadata(full_path)
|
|
.and_then(|md| md.created().or(md.modified()))
|
|
.ok()
|
|
.map(|system_time| {
|
|
<SystemTime as Into<DateTime<Utc>>>::into(system_time).timestamp()
|
|
})
|
|
});
|
|
|
|
FileWithMetadata {
|
|
file_name: f.file_name,
|
|
tag_count: f.tag_count,
|
|
date_taken,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
let sorted = sort_with_metadata(files_with_metadata, sort_type);
|
|
let result = if let Some(limit_val) = limit {
|
|
sorted
|
|
.into_iter()
|
|
.skip(offset as usize)
|
|
.take(limit_val as usize)
|
|
.collect()
|
|
} else {
|
|
sorted
|
|
};
|
|
(result, total_count)
|
|
}
|
|
|
|
pub async fn list_photos<TagD: TagDao, FS: FileSystemAccess>(
|
|
_: Claims,
|
|
request: HttpRequest,
|
|
req: Query<FilesRequest>,
|
|
app_state: web::Data<AppState>,
|
|
file_system: web::Data<FS>,
|
|
tag_dao: web::Data<Mutex<TagD>>,
|
|
exif_dao: web::Data<Mutex<Box<dyn ExifDao>>>,
|
|
) -> HttpResponse {
|
|
let search_path = &req.path;
|
|
|
|
let tracer = global_tracer();
|
|
let context = extract_context_from_request(&request);
|
|
let mut span = tracer.start_with_context("list_photos", &context);
|
|
span.set_attributes(vec![
|
|
KeyValue::new("path", search_path.to_string()),
|
|
KeyValue::new("recursive", req.recursive.unwrap_or(false).to_string()),
|
|
KeyValue::new(
|
|
"tag_ids",
|
|
req.tag_ids.clone().unwrap_or_default().to_string(),
|
|
),
|
|
KeyValue::new(
|
|
"tag_filter_mode",
|
|
format!("{:?}", req.tag_filter_mode.unwrap_or(FilterMode::Any)),
|
|
),
|
|
KeyValue::new(
|
|
"exclude_tag_ids",
|
|
req.exclude_tag_ids.clone().unwrap_or_default().to_string(),
|
|
),
|
|
KeyValue::new("sort", format!("{:?}", &req.sort.unwrap_or(NameAsc))),
|
|
// EXIF search parameters
|
|
KeyValue::new("camera_make", req.camera_make.clone().unwrap_or_default()),
|
|
KeyValue::new("camera_model", req.camera_model.clone().unwrap_or_default()),
|
|
KeyValue::new("lens_model", req.lens_model.clone().unwrap_or_default()),
|
|
KeyValue::new(
|
|
"gps_lat",
|
|
req.gps_lat.map(|v| v.to_string()).unwrap_or_default(),
|
|
),
|
|
KeyValue::new(
|
|
"gps_lon",
|
|
req.gps_lon.map(|v| v.to_string()).unwrap_or_default(),
|
|
),
|
|
KeyValue::new(
|
|
"gps_radius_km",
|
|
req.gps_radius_km.map(|v| v.to_string()).unwrap_or_default(),
|
|
),
|
|
KeyValue::new(
|
|
"date_from",
|
|
req.date_from.map(|v| v.to_string()).unwrap_or_default(),
|
|
),
|
|
KeyValue::new(
|
|
"date_to",
|
|
req.date_to.map(|v| v.to_string()).unwrap_or_default(),
|
|
),
|
|
KeyValue::new(
|
|
"media_type",
|
|
req.media_type
|
|
.as_ref()
|
|
.map(|mt| format!("{:?}", mt))
|
|
.unwrap_or_default(),
|
|
),
|
|
// Pagination parameters
|
|
KeyValue::new("pagination.enabled", req.limit.is_some().to_string()),
|
|
KeyValue::new(
|
|
"pagination.limit",
|
|
req.limit.map(|l| l.to_string()).unwrap_or_default(),
|
|
),
|
|
KeyValue::new("pagination.offset", req.offset.unwrap_or(0).to_string()),
|
|
// Optimization flags
|
|
KeyValue::new("optimization.batch_tags", "true"),
|
|
KeyValue::new(
|
|
"optimization.db_sort",
|
|
matches!(
|
|
req.sort,
|
|
Some(SortType::DateTakenAsc | SortType::DateTakenDesc)
|
|
)
|
|
.to_string(),
|
|
),
|
|
]);
|
|
|
|
let span_context = opentelemetry::Context::current_with_span(span);
|
|
|
|
// Check if EXIF filtering is requested
|
|
let has_exif_filters = req.camera_make.is_some()
|
|
|| req.camera_model.is_some()
|
|
|| req.lens_model.is_some()
|
|
|| req.gps_lat.is_some()
|
|
|| req.date_from.is_some()
|
|
|| req.date_to.is_some();
|
|
|
|
// Apply EXIF-based filtering if requested
|
|
let exif_matched_files: Option<HashSet<String>> = if has_exif_filters {
|
|
// Validate GPS parameters (all 3 must be present together)
|
|
if (req.gps_lat.is_some() || req.gps_lon.is_some() || req.gps_radius_km.is_some())
|
|
&& !(req.gps_lat.is_some() && req.gps_lon.is_some() && req.gps_radius_km.is_some())
|
|
{
|
|
warn!("GPS search requires lat, lon, and radius_km to all be specified");
|
|
span_context
|
|
.span()
|
|
.set_status(Status::error("Invalid GPS parameters"));
|
|
return HttpResponse::BadRequest().body("GPS search requires lat, lon, and radius_km");
|
|
}
|
|
|
|
// Calculate GPS bounding box if GPS search is requested
|
|
let gps_bounds = if let (Some(lat), Some(lon), Some(radius_km)) =
|
|
(req.gps_lat, req.gps_lon, req.gps_radius_km)
|
|
{
|
|
let (min_lat, max_lat, min_lon, max_lon) = gps_bounding_box(lat, lon, radius_km);
|
|
Some((min_lat, max_lat, min_lon, max_lon))
|
|
} else {
|
|
None
|
|
};
|
|
|
|
// Query EXIF database
|
|
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
|
|
let exif_results = exif_dao_guard
|
|
.query_by_exif(
|
|
&span_context,
|
|
req.camera_make.as_deref(),
|
|
req.camera_model.as_deref(),
|
|
req.lens_model.as_deref(),
|
|
gps_bounds,
|
|
req.date_from,
|
|
req.date_to,
|
|
)
|
|
.unwrap_or_else(|e| {
|
|
warn!("EXIF query failed: {:?}", e);
|
|
Vec::new()
|
|
});
|
|
|
|
// Apply precise GPS distance filtering if GPS search was requested
|
|
let filtered_results = if let (Some(lat), Some(lon), Some(radius_km)) =
|
|
(req.gps_lat, req.gps_lon, req.gps_radius_km)
|
|
{
|
|
exif_results
|
|
.into_iter()
|
|
.filter(|exif| {
|
|
if let (Some(photo_lat), Some(photo_lon)) =
|
|
(exif.gps_latitude, exif.gps_longitude)
|
|
{
|
|
let distance =
|
|
haversine_distance(lat, lon, photo_lat as f64, photo_lon as f64);
|
|
distance <= radius_km
|
|
} else {
|
|
false
|
|
}
|
|
})
|
|
.map(|exif| exif.file_path)
|
|
.collect::<HashSet<String>>()
|
|
} else {
|
|
exif_results
|
|
.into_iter()
|
|
.map(|exif| exif.file_path)
|
|
.collect::<HashSet<String>>()
|
|
};
|
|
|
|
info!("EXIF filtering matched {} files", filtered_results.len());
|
|
Some(filtered_results)
|
|
} else {
|
|
None
|
|
};
|
|
|
|
let search_recursively = req.recursive.unwrap_or(false);
|
|
if let Some(tag_ids) = &req.tag_ids
|
|
&& search_recursively
|
|
{
|
|
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
|
|
info!(
|
|
"Searching for tags: {}. With path: '{}' and filter mode: {:?}",
|
|
tag_ids, search_path, filter_mode
|
|
);
|
|
|
|
let mut dao = tag_dao.lock().expect("Unable to get TagDao");
|
|
let tag_ids = tag_ids
|
|
.split(',')
|
|
.filter_map(|t| t.parse().ok())
|
|
.collect::<Vec<i32>>();
|
|
|
|
let exclude_tag_ids = req
|
|
.exclude_tag_ids
|
|
.clone()
|
|
.unwrap_or_default()
|
|
.split(',')
|
|
.filter_map(|t| t.parse().ok())
|
|
.collect::<Vec<i32>>();
|
|
|
|
return match filter_mode {
|
|
FilterMode::Any => {
|
|
dao.get_files_with_any_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
|
|
}
|
|
FilterMode::All => {
|
|
dao.get_files_with_all_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
|
|
}
|
|
}
|
|
.context(format!(
|
|
"Failed to get files with tag_ids: {:?} with filter_mode: {:?}",
|
|
tag_ids, filter_mode
|
|
))
|
|
.inspect(|files| {
|
|
info!(
|
|
"Found {:?} tagged files, filtering down by search path {:?}",
|
|
files.len(),
|
|
search_path
|
|
)
|
|
})
|
|
.map(|tagged_files| {
|
|
tagged_files
|
|
.into_iter()
|
|
.filter(|f| {
|
|
// When searching at the root, everything matches recursively
|
|
if search_path.trim() == "" {
|
|
return true;
|
|
}
|
|
|
|
f.file_name.starts_with(&format!(
|
|
"{}/",
|
|
search_path.strip_suffix('/').unwrap_or_else(|| search_path)
|
|
))
|
|
})
|
|
.filter(|f| {
|
|
// Apply EXIF filtering if present
|
|
if let Some(ref exif_files) = exif_matched_files {
|
|
exif_files.contains(&f.file_name)
|
|
} else {
|
|
true
|
|
}
|
|
})
|
|
.filter(|f| {
|
|
// Apply media type filtering if specified
|
|
if let Some(ref media_type) = req.media_type {
|
|
let path = PathBuf::from(&f.file_name);
|
|
matches_media_type(&path, media_type)
|
|
} else {
|
|
true
|
|
}
|
|
})
|
|
.collect::<Vec<FileWithTagCount>>()
|
|
})
|
|
.map(|files| {
|
|
// Handle sorting - use helper function that supports EXIF date sorting and pagination
|
|
let sort_type = req.sort.unwrap_or(NameAsc);
|
|
let limit = req.limit;
|
|
let offset = req.offset.unwrap_or(0);
|
|
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
|
|
let result = apply_sorting_with_exif(
|
|
files,
|
|
sort_type,
|
|
&mut exif_dao_guard,
|
|
&span_context,
|
|
app_state.base_path.as_ref(),
|
|
limit,
|
|
offset,
|
|
);
|
|
drop(exif_dao_guard);
|
|
result
|
|
})
|
|
.inspect(|(files, total)| debug!("Found {:?} files (total: {})", files.len(), total))
|
|
.map(|(tagged_files, total_count)| {
|
|
info!(
|
|
"Found {:?} tagged files: {:?}",
|
|
tagged_files.len(),
|
|
tagged_files
|
|
);
|
|
|
|
let returned_count = tagged_files.len() as i64;
|
|
let offset = req.offset.unwrap_or(0);
|
|
let pagination_metadata = if req.limit.is_some() {
|
|
(
|
|
Some(total_count),
|
|
Some(offset + returned_count < total_count),
|
|
if offset + returned_count < total_count {
|
|
Some(offset + returned_count)
|
|
} else {
|
|
None
|
|
},
|
|
)
|
|
} else {
|
|
(None, None, None)
|
|
};
|
|
|
|
span_context
|
|
.span()
|
|
.set_attribute(KeyValue::new("file_count", tagged_files.len().to_string()));
|
|
span_context
|
|
.span()
|
|
.set_attribute(KeyValue::new("total_count", total_count.to_string()));
|
|
span_context.span().set_status(Status::Ok);
|
|
|
|
HttpResponse::Ok().json(PhotosResponse {
|
|
photos: tagged_files,
|
|
dirs: vec![],
|
|
total_count: pagination_metadata.0,
|
|
has_more: pagination_metadata.1,
|
|
next_offset: pagination_metadata.2,
|
|
})
|
|
})
|
|
.into_http_internal_err()
|
|
.unwrap_or_else(|e| e.error_response());
|
|
}
|
|
|
|
// Use recursive or non-recursive file listing based on flag
|
|
let files_result = if search_recursively {
|
|
// For recursive search without tags, manually list files recursively
|
|
is_valid_full_path(
|
|
&PathBuf::from(&app_state.base_path),
|
|
&PathBuf::from(search_path),
|
|
false,
|
|
)
|
|
.map(|path| {
|
|
debug!("Valid path for recursive search: {:?}", path);
|
|
list_files_recursive(&path).unwrap_or_default()
|
|
})
|
|
.context("Invalid path")
|
|
} else {
|
|
file_system.get_files_for_path(search_path)
|
|
};
|
|
|
|
match files_result {
|
|
Ok(files) => {
|
|
info!(
|
|
"Found {:?} files in path: {:?} (recursive: {})",
|
|
files.len(),
|
|
search_path,
|
|
search_recursively
|
|
);
|
|
|
|
info!("Starting to filter {} files from filesystem", files.len());
|
|
let start_filter = std::time::Instant::now();
|
|
|
|
// Separate files and directories in a single pass to avoid redundant metadata calls
|
|
let (file_names, dirs): (Vec<String>, Vec<String>) =
|
|
files
|
|
.iter()
|
|
.fold((Vec::new(), Vec::new()), |(mut files, mut dirs), path| {
|
|
match path.metadata() {
|
|
Ok(md) => {
|
|
let relative =
|
|
path.strip_prefix(&app_state.base_path).unwrap_or_else(|_| {
|
|
panic!(
|
|
"Unable to strip base path {} from file path {}",
|
|
&app_state.base_path.path(),
|
|
path.display()
|
|
)
|
|
});
|
|
let relative_str = relative.to_str().unwrap().to_string();
|
|
|
|
if md.is_file() {
|
|
files.push(relative_str);
|
|
} else if md.is_dir() {
|
|
dirs.push(relative_str);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
error!("Failed getting file metadata: {:?}", e);
|
|
// Include files without metadata if they have extensions
|
|
if path.extension().is_some() {
|
|
let relative = path
|
|
.strip_prefix(&app_state.base_path)
|
|
.unwrap_or_else(|_| {
|
|
panic!(
|
|
"Unable to strip base path {} from file path {}",
|
|
&app_state.base_path.path(),
|
|
path.display()
|
|
)
|
|
});
|
|
files.push(relative.to_str().unwrap().to_string());
|
|
}
|
|
}
|
|
}
|
|
(files, dirs)
|
|
});
|
|
|
|
info!(
|
|
"File filtering took {:?}, now fetching tag counts for {} files",
|
|
start_filter.elapsed(),
|
|
file_names.len()
|
|
);
|
|
let start_tags = std::time::Instant::now();
|
|
|
|
// Batch query for tag counts to avoid N+1 queries
|
|
let tag_counts = {
|
|
let mut tag_dao_guard = tag_dao.lock().expect("Unable to get TagDao");
|
|
tag_dao_guard
|
|
.get_tag_counts_batch(&span_context, &file_names)
|
|
.unwrap_or_default()
|
|
};
|
|
info!("Batch tag count query took {:?}", start_tags.elapsed());
|
|
|
|
// Also get full tag lists for files that need tag filtering
|
|
let start_tag_filter = std::time::Instant::now();
|
|
let file_tags_map: std::collections::HashMap<String, Vec<crate::tags::Tag>> =
|
|
if req.tag_ids.is_some() || req.exclude_tag_ids.is_some() {
|
|
info!(
|
|
"Tag filtering requested, fetching full tag lists for {} files",
|
|
file_names.len()
|
|
);
|
|
let mut tag_dao_guard = tag_dao.lock().expect("Unable to get TagDao");
|
|
file_names
|
|
.iter()
|
|
.filter_map(|file_name| {
|
|
tag_dao_guard
|
|
.get_tags_for_path(&span_context, file_name)
|
|
.ok()
|
|
.map(|tags| (file_name.clone(), tags))
|
|
})
|
|
.collect()
|
|
} else {
|
|
std::collections::HashMap::new()
|
|
};
|
|
if req.tag_ids.is_some() || req.exclude_tag_ids.is_some() {
|
|
info!("Full tag list fetch took {:?}", start_tag_filter.elapsed());
|
|
}
|
|
|
|
let photos = file_names
|
|
.into_iter()
|
|
.map(|file_name| {
|
|
let file_tags = file_tags_map.get(&file_name).cloned().unwrap_or_default();
|
|
(file_name, file_tags)
|
|
})
|
|
.filter(|(_, file_tags): &(String, Vec<crate::tags::Tag>)| {
|
|
if let Some(tag_ids) = &req.tag_ids {
|
|
let tag_ids = tag_ids
|
|
.split(',')
|
|
.filter_map(|t| t.parse().ok())
|
|
.collect::<Vec<i32>>();
|
|
|
|
let excluded_tag_ids = &req
|
|
.exclude_tag_ids
|
|
.clone()
|
|
.unwrap_or_default()
|
|
.split(',')
|
|
.filter_map(|t| t.parse().ok())
|
|
.collect::<Vec<i32>>();
|
|
|
|
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
|
|
let excluded = file_tags.iter().any(|t| excluded_tag_ids.contains(&t.id));
|
|
|
|
return !excluded
|
|
&& match filter_mode {
|
|
FilterMode::Any => {
|
|
file_tags.iter().any(|t| tag_ids.contains(&t.id))
|
|
}
|
|
FilterMode::All => tag_ids
|
|
.iter()
|
|
.all(|id| file_tags.iter().any(|tag| &tag.id == id)),
|
|
};
|
|
}
|
|
|
|
true
|
|
})
|
|
.filter(|(file_name, _)| {
|
|
// Apply EXIF filtering if present
|
|
if let Some(ref exif_files) = exif_matched_files {
|
|
exif_files.contains(file_name)
|
|
} else {
|
|
true
|
|
}
|
|
})
|
|
.filter(|(file_name, _)| {
|
|
// Apply media type filtering if specified
|
|
if let Some(ref media_type) = req.media_type {
|
|
let path = PathBuf::from(file_name);
|
|
matches_media_type(&path, media_type)
|
|
} else {
|
|
true
|
|
}
|
|
})
|
|
.map(
|
|
|(file_name, _tags): (String, Vec<crate::tags::Tag>)| FileWithTagCount {
|
|
file_name: file_name.clone(),
|
|
tag_count: *tag_counts.get(&file_name).unwrap_or(&0),
|
|
},
|
|
)
|
|
.collect::<Vec<FileWithTagCount>>();
|
|
|
|
info!(
|
|
"After all filters, {} files remain (filtering took {:?})",
|
|
photos.len(),
|
|
start_filter.elapsed()
|
|
);
|
|
|
|
// Extract pagination parameters
|
|
let limit = req.limit;
|
|
let offset = req.offset.unwrap_or(0);
|
|
let start_sort = std::time::Instant::now();
|
|
|
|
// Handle sorting - use helper function that supports EXIF date sorting and pagination
|
|
let (response_files, total_count) = if let Some(sort_type) = req.sort {
|
|
info!("Sorting {} files by {:?}", photos.len(), sort_type);
|
|
let mut exif_dao_guard = exif_dao.lock().expect("Unable to get ExifDao");
|
|
let result = apply_sorting_with_exif(
|
|
photos,
|
|
sort_type,
|
|
&mut exif_dao_guard,
|
|
&span_context,
|
|
app_state.base_path.as_ref(),
|
|
limit,
|
|
offset,
|
|
);
|
|
drop(exif_dao_guard);
|
|
result
|
|
} else {
|
|
// No sorting requested - apply pagination if requested
|
|
let total = photos.len() as i64;
|
|
let files: Vec<String> = if let Some(limit_val) = limit {
|
|
photos
|
|
.into_iter()
|
|
.skip(offset as usize)
|
|
.take(limit_val as usize)
|
|
.map(|f| f.file_name)
|
|
.collect()
|
|
} else {
|
|
photos.into_iter().map(|f| f.file_name).collect()
|
|
};
|
|
(files, total)
|
|
};
|
|
info!(
|
|
"Sorting took {:?}, returned {} files (total: {})",
|
|
start_sort.elapsed(),
|
|
response_files.len(),
|
|
total_count
|
|
);
|
|
|
|
// Note: dirs were already collected during file filtering to avoid redundant metadata calls
|
|
|
|
// Calculate pagination metadata
|
|
let returned_count = response_files.len() as i64;
|
|
let pagination_metadata = if limit.is_some() {
|
|
(
|
|
Some(total_count),
|
|
Some(offset + returned_count < total_count),
|
|
if offset + returned_count < total_count {
|
|
Some(offset + returned_count)
|
|
} else {
|
|
None
|
|
},
|
|
)
|
|
} else {
|
|
(None, None, None)
|
|
};
|
|
|
|
span_context
|
|
.span()
|
|
.set_attribute(KeyValue::new("file_count", files.len().to_string()));
|
|
span_context
|
|
.span()
|
|
.set_attribute(KeyValue::new("returned_count", returned_count.to_string()));
|
|
span_context
|
|
.span()
|
|
.set_attribute(KeyValue::new("total_count", total_count.to_string()));
|
|
span_context.span().set_status(Status::Ok);
|
|
|
|
HttpResponse::Ok().json(PhotosResponse {
|
|
photos: response_files,
|
|
dirs,
|
|
total_count: pagination_metadata.0,
|
|
has_more: pagination_metadata.1,
|
|
next_offset: pagination_metadata.2,
|
|
})
|
|
}
|
|
_ => {
|
|
error!("Bad photos request: {}", req.path);
|
|
span_context
|
|
.span()
|
|
.set_status(Status::error("Invalid path"));
|
|
HttpResponse::BadRequest().finish()
|
|
}
|
|
}
|
|
}
|
|
|
|
fn sort(mut files: Vec<FileWithTagCount>, sort_type: SortType) -> Vec<String> {
|
|
match sort_type {
|
|
SortType::Shuffle => files.shuffle(&mut thread_rng()),
|
|
SortType::NameAsc => {
|
|
files.sort_by(|l, r| l.file_name.cmp(&r.file_name));
|
|
}
|
|
SortType::NameDesc => {
|
|
files.sort_by(|l, r| r.file_name.cmp(&l.file_name));
|
|
}
|
|
SortType::TagCountAsc => {
|
|
files.sort_by(|l, r| l.tag_count.cmp(&r.tag_count));
|
|
}
|
|
SortType::TagCountDesc => {
|
|
files.sort_by(|l, r| r.tag_count.cmp(&l.tag_count));
|
|
}
|
|
SortType::DateTakenAsc | SortType::DateTakenDesc => {
|
|
// Date sorting not implemented for FileWithTagCount
|
|
// We shouldn't be hitting this code
|
|
warn!("Date sorting not implemented for FileWithTagCount");
|
|
files.sort_by(|l, r| l.file_name.cmp(&r.file_name));
|
|
}
|
|
}
|
|
|
|
files
|
|
.iter()
|
|
.map(|f| f.file_name.clone())
|
|
.collect::<Vec<String>>()
|
|
}
|
|
|
|
/// Sort files with metadata support (including date sorting)
|
|
fn sort_with_metadata(mut files: Vec<FileWithMetadata>, sort_type: SortType) -> Vec<String> {
|
|
match sort_type {
|
|
SortType::Shuffle => files.shuffle(&mut thread_rng()),
|
|
SortType::NameAsc => {
|
|
files.sort_by(|l, r| l.file_name.cmp(&r.file_name));
|
|
}
|
|
SortType::NameDesc => {
|
|
files.sort_by(|l, r| r.file_name.cmp(&l.file_name));
|
|
}
|
|
SortType::TagCountAsc => {
|
|
files.sort_by(|l, r| l.tag_count.cmp(&r.tag_count));
|
|
}
|
|
SortType::TagCountDesc => {
|
|
files.sort_by(|l, r| r.tag_count.cmp(&l.tag_count));
|
|
}
|
|
SortType::DateTakenAsc | SortType::DateTakenDesc => {
|
|
files.sort_by(|l, r| {
|
|
match (l.date_taken, r.date_taken) {
|
|
(Some(a), Some(b)) => {
|
|
if sort_type == SortType::DateTakenAsc {
|
|
a.cmp(&b)
|
|
} else {
|
|
b.cmp(&a)
|
|
}
|
|
}
|
|
(Some(_), None) => std::cmp::Ordering::Less, // Dated photos first
|
|
(None, Some(_)) => std::cmp::Ordering::Greater,
|
|
(None, None) => l.file_name.cmp(&r.file_name), // Fallback to name
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
files
|
|
.iter()
|
|
.map(|f| f.file_name.clone())
|
|
.collect::<Vec<String>>()
|
|
}
|
|
|
|
pub fn list_files(dir: &Path) -> io::Result<Vec<PathBuf>> {
|
|
let tracer = global_tracer();
|
|
let mut span = tracer.start("list_files");
|
|
let dir_name_string = dir.to_str().unwrap_or_default().to_string();
|
|
span.set_attribute(KeyValue::new("dir", dir_name_string));
|
|
info!("Listing files in: {:?}", dir);
|
|
|
|
let files = read_dir(dir)?
|
|
.filter_map(|res| res.ok())
|
|
.filter(|entry| is_image_or_video(&entry.path()) || entry.file_type().unwrap().is_dir())
|
|
.map(|entry| entry.path())
|
|
.collect::<Vec<PathBuf>>();
|
|
|
|
span.set_attribute(KeyValue::new("file_count", files.len().to_string()));
|
|
span.set_status(Status::Ok);
|
|
info!("Found {:?} files in directory: {:?}", files.len(), dir);
|
|
Ok(files)
|
|
}
|
|
|
|
pub fn list_files_recursive(dir: &Path) -> io::Result<Vec<PathBuf>> {
|
|
let tracer = global_tracer();
|
|
let mut span = tracer.start("list_files_recursive");
|
|
let dir_name_string = dir.to_str().unwrap_or_default().to_string();
|
|
span.set_attribute(KeyValue::new("dir", dir_name_string));
|
|
info!("Recursively listing files in: {:?}", dir);
|
|
|
|
let mut result = Vec::new();
|
|
|
|
fn visit_dirs(dir: &Path, files: &mut Vec<PathBuf>) -> io::Result<()> {
|
|
if dir.is_dir() {
|
|
for entry in read_dir(dir)? {
|
|
let entry = entry?;
|
|
let path = entry.path();
|
|
|
|
if path.is_dir() {
|
|
visit_dirs(&path, files)?;
|
|
} else if is_image_or_video(&path) {
|
|
files.push(path);
|
|
}
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
visit_dirs(dir, &mut result)?;
|
|
|
|
span.set_attribute(KeyValue::new("file_count", result.len().to_string()));
|
|
span.set_status(Status::Ok);
|
|
info!(
|
|
"Found {:?} files recursively in directory: {:?}",
|
|
result.len(),
|
|
dir
|
|
);
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn is_image_or_video(path: &Path) -> bool {
|
|
file_types::is_media_file(path)
|
|
}
|
|
|
|
/// Check if a file matches the media type filter
|
|
fn matches_media_type(path: &Path, media_type: &MediaType) -> bool {
|
|
let result = match media_type {
|
|
MediaType::All => file_types::is_image_file(path) || file_types::is_video_file(path),
|
|
MediaType::Photo => file_types::is_image_file(path),
|
|
MediaType::Video => file_types::is_video_file(path),
|
|
};
|
|
|
|
let extension = path
|
|
.extension()
|
|
.and_then(|p| p.to_str())
|
|
.map_or(String::from(""), |p| p.to_lowercase());
|
|
|
|
debug!(
|
|
"Media type check: path={:?}, extension='{}', type={:?}, match={}",
|
|
path, extension, media_type, result
|
|
);
|
|
|
|
result
|
|
}
|
|
|
|
pub fn is_valid_full_path<P: AsRef<Path> + Debug + AsRef<std::ffi::OsStr>>(
|
|
base: &P,
|
|
path: &P,
|
|
new_file: bool,
|
|
) -> Option<PathBuf> {
|
|
trace!("is_valid_full_path => Base: {:?}. Path: {:?}", base, path);
|
|
|
|
let path = PathBuf::from(&path);
|
|
let mut path = if path.is_relative() {
|
|
let mut full_path = PathBuf::new();
|
|
full_path.push(base);
|
|
full_path.push(&path);
|
|
full_path
|
|
} else {
|
|
path
|
|
};
|
|
|
|
match is_path_above_base_dir(base, &mut path, new_file) {
|
|
Ok(path) => Some(path),
|
|
Err(e) => {
|
|
error!("{}", e);
|
|
None
|
|
}
|
|
}
|
|
}
|
|
|
|
fn is_path_above_base_dir<P: AsRef<Path> + Debug>(
|
|
base: P,
|
|
full_path: &mut PathBuf,
|
|
new_file: bool,
|
|
) -> anyhow::Result<PathBuf> {
|
|
full_path
|
|
.absolutize()
|
|
.with_context(|| format!("Unable to resolve absolute path: {:?}", full_path))
|
|
.map_or_else(
|
|
|e| Err(anyhow!(e)),
|
|
|p| {
|
|
if p.starts_with(base) && (new_file || p.exists()) {
|
|
Ok(p.into_owned())
|
|
} else if !p.exists() {
|
|
Err(anyhow!("Path does not exist: {:?}", p))
|
|
} else {
|
|
Err(anyhow!("Path above base directory"))
|
|
}
|
|
},
|
|
)
|
|
}
|
|
|
|
pub async fn move_file<FS: FileSystemAccess>(
|
|
_: Claims,
|
|
file_system: web::Data<FS>,
|
|
app_state: Data<AppState>,
|
|
request: web::Json<MoveFileRequest>,
|
|
) -> HttpResponse {
|
|
info!("Moving file: {:?}", request);
|
|
|
|
match is_valid_full_path(&app_state.base_path, &request.source, false)
|
|
.ok_or(ErrorKind::InvalidData)
|
|
.and_then(|source| {
|
|
is_valid_full_path(&app_state.base_path, &request.destination, true)
|
|
.ok_or(ErrorKind::InvalidData)
|
|
.and_then(|dest| {
|
|
if dest.exists() {
|
|
error!("Destination already exists, not moving file: {:?}", source);
|
|
|
|
Err(ErrorKind::AlreadyExists)
|
|
} else {
|
|
Ok(dest)
|
|
}
|
|
})
|
|
.map(|dest| (source, dest))
|
|
})
|
|
.map(|(source, dest)| file_system.move_file(source, dest))
|
|
{
|
|
Ok(_) => {
|
|
info!("Moved file: {} -> {}", request.source, request.destination,);
|
|
app_state.stream_manager.do_send(RefreshThumbnailsMessage);
|
|
|
|
HttpResponse::Ok().finish()
|
|
}
|
|
Err(e) => {
|
|
error!(
|
|
"Error moving file: {} to: {}. {}",
|
|
request.source, request.destination, e
|
|
);
|
|
|
|
if e == ErrorKind::InvalidData {
|
|
HttpResponse::BadRequest().finish()
|
|
} else {
|
|
HttpResponse::InternalServerError().finish()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Deserialize, Debug)]
|
|
pub struct MoveFileRequest {
|
|
source: String,
|
|
destination: String,
|
|
}
|
|
|
|
pub trait FileSystemAccess {
|
|
fn get_files_for_path(&self, path: &str) -> anyhow::Result<Vec<PathBuf>>;
|
|
fn move_file<P: AsRef<Path>>(&self, from: P, destination: P) -> anyhow::Result<()>;
|
|
}
|
|
|
|
pub struct RealFileSystem {
|
|
base_path: String,
|
|
}
|
|
|
|
impl RealFileSystem {
|
|
#[allow(dead_code)] // Used in main.rs binary and tests
|
|
pub(crate) fn new(base_path: String) -> RealFileSystem {
|
|
RealFileSystem { base_path }
|
|
}
|
|
}
|
|
|
|
impl FileSystemAccess for RealFileSystem {
|
|
fn get_files_for_path(&self, path: &str) -> anyhow::Result<Vec<PathBuf>> {
|
|
is_valid_full_path(&PathBuf::from(&self.base_path), &PathBuf::from(path), false)
|
|
.map(|path| {
|
|
debug!("Valid path: {:?}", path);
|
|
list_files(&path).unwrap_or_default()
|
|
})
|
|
.context("Invalid path")
|
|
}
|
|
|
|
fn move_file<P: AsRef<Path>>(&self, from: P, destination: P) -> anyhow::Result<()> {
|
|
info!(
|
|
"Moving file: '{:?}' -> '{:?}'",
|
|
from.as_ref(),
|
|
destination.as_ref()
|
|
);
|
|
let name = from
|
|
.as_ref()
|
|
.file_name()
|
|
.map(|n| n.to_str().unwrap_or_default().to_string())
|
|
.unwrap_or_default();
|
|
|
|
std::fs::rename(from, destination)
|
|
.with_context(|| format!("Failed to move file: {:?}", name))
|
|
}
|
|
}
|
|
|
|
pub struct RefreshThumbnailsMessage;
|
|
|
|
impl Message for RefreshThumbnailsMessage {
|
|
type Result = ();
|
|
}
|
|
|
|
impl Handler<RefreshThumbnailsMessage> for StreamActor {
|
|
type Result = ();
|
|
|
|
fn handle(&mut self, _msg: RefreshThumbnailsMessage, _ctx: &mut Self::Context) -> Self::Result {
|
|
let tracer = global_tracer();
|
|
let _ = tracer.start("RefreshThumbnailsMessage");
|
|
info!("Refreshing thumbnails after upload");
|
|
create_thumbnails()
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::database::DbError;
|
|
use std::collections::HashMap;
|
|
use std::env;
|
|
use std::fs::File;
|
|
|
|
struct FakeFileSystem {
|
|
files: HashMap<String, Vec<String>>,
|
|
base_path: String,
|
|
err: bool,
|
|
}
|
|
|
|
impl FakeFileSystem {
|
|
fn with_error() -> FakeFileSystem {
|
|
FakeFileSystem {
|
|
files: HashMap::new(),
|
|
base_path: String::new(),
|
|
err: true,
|
|
}
|
|
}
|
|
|
|
fn new(files: HashMap<String, Vec<String>>) -> FakeFileSystem {
|
|
// Use temp dir as base path for consistency
|
|
let base_path = env::temp_dir();
|
|
FakeFileSystem {
|
|
files,
|
|
base_path: base_path.to_str().unwrap().to_string(),
|
|
err: false,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl FileSystemAccess for FakeFileSystem {
|
|
fn get_files_for_path(&self, path: &str) -> anyhow::Result<Vec<PathBuf>> {
|
|
if self.err {
|
|
Err(anyhow!("Error for test"))
|
|
} else if let Some(files) = self.files.get(path) {
|
|
// Prepend base_path to all returned files
|
|
Ok(files
|
|
.iter()
|
|
.map(|f| PathBuf::from(&self.base_path).join(f))
|
|
.collect::<Vec<PathBuf>>())
|
|
} else {
|
|
Ok(Vec::new())
|
|
}
|
|
}
|
|
|
|
fn move_file<P: AsRef<Path>>(&self, _from: P, _destination: P) -> anyhow::Result<()> {
|
|
todo!()
|
|
}
|
|
}
|
|
|
|
struct MockExifDao;
|
|
|
|
impl ExifDao for MockExifDao {
|
|
fn store_exif(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
data: crate::database::models::InsertImageExif,
|
|
) -> Result<crate::database::models::ImageExif, DbError> {
|
|
// Return a dummy ImageExif for tests
|
|
Ok(crate::database::models::ImageExif {
|
|
id: 1,
|
|
file_path: data.file_path.to_string(),
|
|
camera_make: data.camera_make.map(|s| s.to_string()),
|
|
camera_model: data.camera_model.map(|s| s.to_string()),
|
|
lens_model: data.lens_model.map(|s| s.to_string()),
|
|
width: data.width,
|
|
height: data.height,
|
|
orientation: data.orientation,
|
|
gps_latitude: data.gps_latitude,
|
|
gps_longitude: data.gps_longitude,
|
|
gps_altitude: data.gps_altitude,
|
|
focal_length: data.focal_length,
|
|
aperture: data.aperture,
|
|
shutter_speed: data.shutter_speed,
|
|
iso: data.iso,
|
|
date_taken: data.date_taken,
|
|
created_time: data.created_time,
|
|
last_modified: data.last_modified,
|
|
})
|
|
}
|
|
|
|
fn get_exif(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
_: &str,
|
|
) -> Result<Option<crate::database::models::ImageExif>, crate::database::DbError> {
|
|
Ok(None)
|
|
}
|
|
|
|
fn update_exif(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
data: crate::database::models::InsertImageExif,
|
|
) -> Result<crate::database::models::ImageExif, crate::database::DbError> {
|
|
// Return a dummy ImageExif for tests
|
|
Ok(crate::database::models::ImageExif {
|
|
id: 1,
|
|
file_path: data.file_path.to_string(),
|
|
camera_make: data.camera_make.map(|s| s.to_string()),
|
|
camera_model: data.camera_model.map(|s| s.to_string()),
|
|
lens_model: data.lens_model.map(|s| s.to_string()),
|
|
width: data.width,
|
|
height: data.height,
|
|
orientation: data.orientation,
|
|
gps_latitude: data.gps_latitude,
|
|
gps_longitude: data.gps_longitude,
|
|
gps_altitude: data.gps_altitude,
|
|
focal_length: data.focal_length,
|
|
aperture: data.aperture,
|
|
shutter_speed: data.shutter_speed,
|
|
iso: data.iso,
|
|
date_taken: data.date_taken,
|
|
created_time: data.created_time,
|
|
last_modified: data.last_modified,
|
|
})
|
|
}
|
|
|
|
fn delete_exif(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
_: &str,
|
|
) -> Result<(), crate::database::DbError> {
|
|
Ok(())
|
|
}
|
|
|
|
fn get_all_with_date_taken(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
) -> Result<Vec<(String, i64)>, crate::database::DbError> {
|
|
Ok(Vec::new())
|
|
}
|
|
|
|
fn get_exif_batch(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
_: &[String],
|
|
) -> Result<Vec<crate::database::models::ImageExif>, crate::database::DbError> {
|
|
Ok(Vec::new())
|
|
}
|
|
|
|
fn query_by_exif(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
_: Option<&str>,
|
|
_: Option<&str>,
|
|
_: Option<&str>,
|
|
_: Option<(f64, f64, f64, f64)>,
|
|
_: Option<i64>,
|
|
_: Option<i64>,
|
|
) -> Result<Vec<crate::database::models::ImageExif>, crate::database::DbError> {
|
|
Ok(Vec::new())
|
|
}
|
|
|
|
fn get_camera_makes(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
) -> Result<Vec<(String, i64)>, crate::database::DbError> {
|
|
Ok(Vec::new())
|
|
}
|
|
|
|
fn update_file_path(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
_old_path: &str,
|
|
_new_path: &str,
|
|
) -> Result<(), DbError> {
|
|
Ok(())
|
|
}
|
|
|
|
fn get_all_file_paths(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
) -> Result<Vec<String>, DbError> {
|
|
Ok(Vec::new())
|
|
}
|
|
|
|
fn get_files_sorted_by_date(
|
|
&mut self,
|
|
_context: &opentelemetry::Context,
|
|
file_paths: &[String],
|
|
_ascending: bool,
|
|
_limit: Option<i64>,
|
|
_offset: i64,
|
|
) -> Result<(Vec<String>, i64), DbError> {
|
|
// For tests, just return all files unsorted
|
|
let count = file_paths.len() as i64;
|
|
Ok((file_paths.to_vec(), count))
|
|
}
|
|
}
|
|
|
|
mod api {
|
|
use super::*;
|
|
use actix_web::{HttpResponse, web::Query};
|
|
|
|
use crate::{
|
|
AppState,
|
|
data::{Claims, PhotosResponse},
|
|
testhelpers::BodyReader,
|
|
};
|
|
|
|
use crate::database::test::in_memory_db_connection;
|
|
use crate::tags::SqliteTagDao;
|
|
use actix_web::test::TestRequest;
|
|
use actix_web::web::Data;
|
|
use std::fs;
|
|
|
|
fn setup() {
|
|
let _ = env_logger::builder().is_test(true).try_init();
|
|
}
|
|
|
|
#[actix_rt::test]
|
|
async fn test_list_photos() {
|
|
setup();
|
|
|
|
let claims = Claims {
|
|
sub: String::from("1"),
|
|
exp: 12345,
|
|
};
|
|
|
|
let request: Query<FilesRequest> = Query::from_query("path=").unwrap();
|
|
|
|
// Create AppState with the same base_path as RealFileSystem
|
|
let test_state = AppState::test_state();
|
|
|
|
// Create a dedicated test directory to avoid interference from other files in system temp
|
|
let test_base = PathBuf::from(test_state.base_path.clone());
|
|
|
|
let mut test_dir = test_base.clone();
|
|
test_dir.push("test-dir");
|
|
fs::create_dir_all(&test_dir).unwrap();
|
|
|
|
let mut photo_path = test_base.clone();
|
|
photo_path.push("photo.jpg");
|
|
File::create(&photo_path).unwrap();
|
|
|
|
let response: HttpResponse = list_photos(
|
|
claims,
|
|
TestRequest::default().to_http_request(),
|
|
request,
|
|
Data::new(test_state),
|
|
Data::new(RealFileSystem::new(test_base.to_str().unwrap().to_string())),
|
|
Data::new(Mutex::new(SqliteTagDao::default())),
|
|
Data::new(Mutex::new(Box::new(MockExifDao) as Box<dyn ExifDao>)),
|
|
)
|
|
.await;
|
|
let status = response.status();
|
|
assert_eq!(status, 200);
|
|
|
|
let body: PhotosResponse = serde_json::from_str(&response.read_to_str()).unwrap();
|
|
debug!("{:?}", body);
|
|
|
|
assert!(body.photos.contains(&String::from("photo.jpg")));
|
|
assert!(body.dirs.contains(&String::from("test-dir")));
|
|
assert!(
|
|
body.photos
|
|
.iter()
|
|
.filter(|filename| !filename.ends_with(".png")
|
|
&& !filename.ends_with(".jpg")
|
|
&& !filename.ends_with(".jpeg"))
|
|
.collect::<Vec<&String>>()
|
|
.is_empty()
|
|
);
|
|
|
|
// Cleanup
|
|
let _ = fs::remove_dir_all(test_base);
|
|
}
|
|
|
|
#[actix_rt::test]
|
|
async fn test_list_below_base_fails_400() {
|
|
setup();
|
|
|
|
let claims = Claims {
|
|
sub: String::from("1"),
|
|
exp: 12345,
|
|
};
|
|
|
|
let request: Query<FilesRequest> = Query::from_query("path=..").unwrap();
|
|
|
|
let temp_dir = env::temp_dir();
|
|
let response = list_photos(
|
|
claims,
|
|
TestRequest::default().to_http_request(),
|
|
request,
|
|
Data::new(AppState::test_state()),
|
|
Data::new(RealFileSystem::new(temp_dir.to_str().unwrap().to_string())),
|
|
Data::new(Mutex::new(SqliteTagDao::default())),
|
|
Data::new(Mutex::new(
|
|
Box::new(MockExifDao) as Box<dyn crate::database::ExifDao>
|
|
)),
|
|
)
|
|
.await;
|
|
|
|
assert_eq!(response.status(), 400);
|
|
}
|
|
|
|
#[actix_rt::test]
|
|
async fn get_files_with_tag_any_filter() {
|
|
setup();
|
|
|
|
let claims = Claims {
|
|
sub: String::from("1"),
|
|
exp: 12345,
|
|
};
|
|
|
|
let request: Query<FilesRequest> =
|
|
Query::from_query("path=&tag_ids=1,3&recursive=true").unwrap();
|
|
|
|
let mut tag_dao = SqliteTagDao::new(in_memory_db_connection());
|
|
|
|
let tag1 = tag_dao
|
|
.create_tag(&opentelemetry::Context::current(), "tag1")
|
|
.unwrap();
|
|
let _tag2 = tag_dao
|
|
.create_tag(&opentelemetry::Context::current(), "tag2")
|
|
.unwrap();
|
|
let tag3 = tag_dao
|
|
.create_tag(&opentelemetry::Context::current(), "tag3")
|
|
.unwrap();
|
|
|
|
let _ = &tag_dao
|
|
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag1.id)
|
|
.unwrap();
|
|
let _ = &tag_dao
|
|
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag3.id)
|
|
.unwrap();
|
|
|
|
let response: HttpResponse = list_photos(
|
|
claims,
|
|
TestRequest::default().to_http_request(),
|
|
request,
|
|
Data::new(AppState::test_state()),
|
|
Data::new(FakeFileSystem::new(HashMap::new())),
|
|
Data::new(Mutex::new(tag_dao)),
|
|
Data::new(Mutex::new(
|
|
Box::new(MockExifDao) as Box<dyn crate::database::ExifDao>
|
|
)),
|
|
)
|
|
.await;
|
|
|
|
assert_eq!(200, response.status());
|
|
|
|
let body: PhotosResponse = serde_json::from_str(&response.read_to_str()).unwrap();
|
|
assert_eq!(1, body.photos.len());
|
|
assert!(body.photos.contains(&String::from("test.jpg")));
|
|
}
|
|
|
|
#[actix_rt::test]
|
|
async fn get_files_with_tag_all_filter() {
|
|
setup();
|
|
|
|
let claims = Claims {
|
|
sub: String::from("1"),
|
|
exp: 12345,
|
|
};
|
|
|
|
let mut tag_dao = SqliteTagDao::new(in_memory_db_connection());
|
|
|
|
let tag1 = tag_dao
|
|
.create_tag(&opentelemetry::Context::current(), "tag1")
|
|
.unwrap();
|
|
let _tag2 = tag_dao
|
|
.create_tag(&opentelemetry::Context::current(), "tag2")
|
|
.unwrap();
|
|
let tag3 = tag_dao
|
|
.create_tag(&opentelemetry::Context::current(), "tag3")
|
|
.unwrap();
|
|
|
|
let _ = &tag_dao
|
|
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag1.id)
|
|
.unwrap();
|
|
let _ = &tag_dao
|
|
.tag_file(&opentelemetry::Context::current(), "test.jpg", tag3.id)
|
|
.unwrap();
|
|
|
|
// Should get filtered since it doesn't have tag3
|
|
tag_dao
|
|
.tag_file(
|
|
&opentelemetry::Context::current(),
|
|
"some-other.jpg",
|
|
tag1.id,
|
|
)
|
|
.unwrap();
|
|
|
|
let request: Query<FilesRequest> = Query::from_query(&format!(
|
|
"path=&tag_ids={},{}&tag_filter_mode=All&recursive=true",
|
|
tag1.id, tag3.id
|
|
))
|
|
.unwrap();
|
|
|
|
let response: HttpResponse = list_photos(
|
|
claims,
|
|
TestRequest::default().to_http_request(),
|
|
request,
|
|
Data::new(AppState::test_state()),
|
|
Data::new(FakeFileSystem::new(HashMap::new())),
|
|
Data::new(Mutex::new(tag_dao)),
|
|
Data::new(Mutex::new(
|
|
Box::new(MockExifDao) as Box<dyn crate::database::ExifDao>
|
|
)),
|
|
)
|
|
.await;
|
|
|
|
assert_eq!(200, response.status());
|
|
|
|
let body: PhotosResponse = serde_json::from_str(&response.read_to_str()).unwrap();
|
|
assert_eq!(1, body.photos.len());
|
|
assert!(body.photos.contains(&String::from("test.jpg")));
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn directory_traversal_test() {
|
|
let base = env::temp_dir();
|
|
assert_eq!(
|
|
None,
|
|
is_valid_full_path(&base, &PathBuf::from("../"), false)
|
|
);
|
|
assert_eq!(None, is_valid_full_path(&base, &PathBuf::from(".."), false));
|
|
assert_eq!(
|
|
None,
|
|
is_valid_full_path(&base, &PathBuf::from("fake/../../../"), false)
|
|
);
|
|
assert_eq!(
|
|
None,
|
|
is_valid_full_path(&base, &PathBuf::from("../../../etc/passwd"), false)
|
|
);
|
|
assert_eq!(
|
|
None,
|
|
is_valid_full_path(&base, &PathBuf::from("..//etc/passwd"), false)
|
|
);
|
|
assert_eq!(
|
|
None,
|
|
is_valid_full_path(&base, &PathBuf::from("../../etc/passwd"), false)
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn build_from_path_relative_to_base_test() {
|
|
let base = env::temp_dir();
|
|
let mut test_file = PathBuf::from(&base);
|
|
test_file.push("test.png");
|
|
File::create(test_file).unwrap();
|
|
|
|
assert!(is_valid_full_path(&base, &PathBuf::from("test.png"), false).is_some());
|
|
}
|
|
|
|
#[test]
|
|
fn build_from_relative_returns_none_if_directory_does_not_exist_test() {
|
|
let base = env::temp_dir();
|
|
|
|
let path = "relative/path/test.png";
|
|
let mut test_file = PathBuf::from(&base);
|
|
test_file.push(path);
|
|
|
|
assert_eq!(None, is_valid_full_path(&base, &test_file, false));
|
|
}
|
|
|
|
#[test]
|
|
fn build_from_absolute_path_test() {
|
|
let base = env::temp_dir();
|
|
let mut test_file = PathBuf::from(&base);
|
|
test_file.push("test.png");
|
|
File::create(&test_file).unwrap();
|
|
|
|
assert!(is_valid_full_path(&base, &test_file, false).is_some());
|
|
|
|
assert_eq!(
|
|
Some(test_file.clone()),
|
|
is_valid_full_path(&base, &test_file, false)
|
|
);
|
|
}
|
|
|
|
macro_rules! extension_test {
|
|
($name:ident, $filename:literal) => {
|
|
#[test]
|
|
fn $name() {
|
|
assert!(is_image_or_video(Path::new($filename)));
|
|
}
|
|
};
|
|
}
|
|
|
|
extension_test!(valid_png, "image.png");
|
|
extension_test!(valid_png_mixed_case, "image.pNg");
|
|
extension_test!(valid_png_upper_case, "image.PNG");
|
|
|
|
extension_test!(valid_jpeg, "image.jpeg");
|
|
extension_test!(valid_jpeg_upper_case, "image.JPEG");
|
|
extension_test!(valid_jpg, "image.jpg");
|
|
extension_test!(valid_jpg_upper_case, "image.JPG");
|
|
|
|
extension_test!(valid_mp4, "image.mp4");
|
|
extension_test!(valid_mp4_mixed_case, "image.mP4");
|
|
extension_test!(valid_mp4_upper_case, "image.MP4");
|
|
|
|
extension_test!(valid_mov, "image.mov");
|
|
extension_test!(valid_mov_mixed_case, "image.mOV");
|
|
extension_test!(valid_mov_upper_case, "image.MOV");
|
|
|
|
extension_test!(valid_nef, "image.nef");
|
|
extension_test!(valid_nef_mixed_case, "image.nEF");
|
|
extension_test!(valid_nef_upper_case, "image.NEF");
|
|
|
|
#[test]
|
|
fn hidden_file_not_valid_test() {
|
|
assert!(!is_image_or_video(Path::new(".DS_store")));
|
|
}
|
|
}
|